text
stringlengths
29
850k
import unittest import sys sys.path.append('../src') from islandgame import * from service_console import * from service_player import * from service_log import * from service_screen import * from iofactory import FakeIO from tiles import * class TestGame(unittest.TestCase): def test_golden_initialization(self): """ When the game is initiated and the user enters valid input, should generate a board with all players """ # Arrange bus = IslandBus() messageFactory = MessageFactory() playerFactory = PlayerFactory() game = IslandGame(bus, messageFactory) island = Island() pDeck = PlayerDeck() fio = FakeIO() tiles = Tiles() fio.callStack = [StackItem(1, "What is your name? ", 'Alex\n' ), StackItem(2, "How many players will be playing? ", '2\n' ), StackItem(3, "Choose a player type: ", 'Diver\n' ), StackItem(4, "Choose a player type: ", 'Engineer\n' )] cs = ConsoleService(bus, fio) ss = ScreenService(bus, island, fio, tiles) ps = PlayerService(bus, playerFactory, tiles) ls = LogService(bus, fio) # Act game.play() board = island.generate_board(tiles.tiles) self.assertTrue (len(board) > 0) self.assertTrue(Constant.PlayerType["Diver"] in board) self.assertTrue(Constant.PlayerType["Engineer"] in board)
Deep in the woods of..well, where the hhmm were we?! There was a beach, a beautiful decor of people and a lovely atmosphere to play in: HAMERFEST, you have a new lover. Want to listen to some of that bleep bleep and hmmff hmmff? Check it out!
from solver import Solver import numpy as np class LogisticRegression(Solver): """ This uses the generalized linear model with gradient descent to calculate linear coefficients (parameters). This class is abstract and must be extended by the underlying distribution to determine the exact gradient and method for applying parameters. Inspired by Matlab code: http://www.cs.cmu.edu/~ggordon/IRLS-example/ """ epsilon = 1e-10 max_iterations = 500 ridge = 1e-5 def __init__(self): Solver.__init__(self) def calculate_parameters(self, x, y): # dimensions n, m = np.shape(x) # use standard least squares algorithm from numpy i = 0 params = np.zeros((m, 1)) old_exp_y = - np.ones(np.shape(y)) while i < self.max_iterations: # count iteration i += 1 # calculate adj_y = np.dot(x, params) exp_y = 1 / (1 + np.exp(-adj_y)) deriv = exp_y * (1 - exp_y) w_adj_y = (deriv * adj_y + (y - exp_y)) # * w weights = np.diag(deriv.flatten(1)) # * w try: params = np.dot(np.dot(np.linalg.inv(np.dot(np.dot(x.T, weights), x) + self.ridge), x.T), w_adj_y) except np.linalg.linalg.LinAlgError as err: print "Warning: Singular matrix" return params if np.sum(np.abs(exp_y - old_exp_y)) < n * self.epsilon: return params old_exp_y = exp_y # todo print "Warning: Does not converge" return params def apply_parameters(self, x, params): return 1 / (1 + np.exp(-np.dot(x, params)))
on international order and multilateralism. I focused primarily on my visit to Venezuela 26 November to 4 December 2017. When the Human Rights Council appointed me as the first independent expert on the promotion of a democratic and equitable international order in 2012, I felt overwhelmed by the magnitude of the task, but at the same time very honoured that I was given the opportunity to demonstrate the added value of Human Rights Council Resolution 18/6, showing in a concrete and tangible way that human rights are indeed interdependent and interrelated, and addressing fundamental issues that have been frequently avoided, because perceived as not opportune or not “politically correct”. Six years later I have written seven thematic reports to the Council and six to the General Assembly and currently I am drafting my mission report to Venezuela and Ecuador. My final report A/HRC/37/63 contains 23 Principles of International Order that should help governments, inter-governmental organizations and civil society advance toward a more just and peaceful world, where the United Nations Charter is recognized as the World Constitution and the International Court of Justice as the World Constitutional Court. My final report summarizes the lessons learned from the mandate thus far and formulates recommendations for further thematic reports e.g. on the impact on the international order of the activities and behind-closed-door-decisions of private groups and organizations like the World Economic Forum, the G7, the G20, the Bilderberg, the Transnational Commission, the Council on Foreign Relations, etc. Also important will be to measure the impact of credit rating agencies like Moody’s and Standard and Poor’s on the international order, private enterprises that influence world finances while operating without transparency or accountability and lacking democratic legitimacy. Among the achievements of the last six years I would like to highlight my country mission to Venezuela and Ecuador with a view to examine the impact of alternative economic models on the international order, and the possibility of achieving enhanced respect for economic, social and cultural rights through the implementation of such models, supported by international solidarity. Early in my preparations for the mission and in the course of studying the mainstream narratives on Venezuela and Ecuador, it became apparent that some powerful countries strongly oppose “experiments” like the Revolución Bolivariana in Venezuela and the Revolución Ciudadana in Ecuador, and do not hesitate to conduct an economic war against those countries with the declared intention to make those experiments fail. This contravenes Chapter 4, article 19, of the OAS Charter and GA Resolution 2625 on Friendly Relations, both of which prohibit such economic pressures on sovereign States. I feel honoured to have been the first UN Special Procedures mandate holder since 1996 to have carried out a mission to Venezuela, which I conducted in strict compliance with United Nations mission guidelines and the code of conduct. Among other things, the mission contributed to increased cooperation of UN agencies with the Venezuelan government and to the release of a number of detainees. I made a considerable effort to see members of the opposition, the National Assembly, Fedecameras, business leaders, university professors, representatives of the Church, non-governmental organizations critical of the government, including Amnesty International and Provea, etc. I sought and received ample documentation from all stakeholders, including persons suffering from scarcity of medicines, relatives of detained persons, electoral commissions and government officials. Before travelling to Venezuela and Ecuador, I conscientiously studied the 2017 report of the Office of the High Commissioner for Human Rights on Venezuela, as well as the concluding observations of the examination of Venezuela’s reports to the Human Rights Committee and Committee on Economic, Social and Cultural Rights. I studied the reports of the Inter-American Commission on Human Rights, Amnesty International, Human Rights Watch and other organizations, including numerous Venezuela-based ngo’s. I also studied the pertinent responses of the Venezuelan government. In my opinion, every mission by a UN rapporteur should be constructive rather than confrontational, it should formulate pragmatic recommendations and not engage in needless rhetoric. With regard to my mission to Venezuela, my methodology was to listen to all stakeholders, study all relevant documentation and statistics, including from FAO, WHO, CEPAL, and to try to arrive at a balanced view of the situation and to devise a coherent strategy how to solve the pressing problems of scarcity or foods and medicines, galloping inflation and general insecurity. Our priority was and must be how to help the Venezuelan people. The United Nations has a responsibility to advance an international order that is more peaceful, democratic and equitable. As rapporteur, I have always seen my mandate as result-oriented. I have been guided by the fundamental rule “audiatur et altera pars”, a habit that I practiced for decades as Deputy-Chief of the Communications Branch and subsequently Chief of the Petitions Department at OHCHR. I have always been committed to a culture of dialogue, convinced that States have the responsibility to protect their own populations and that only States can effectively improve the lives of persons under their jurisdiction. I am persuaded that States will act for human rights when they recognize that it is in their own interest and in the interest of their peoples to respect human dignity and to ensure that human rights are better served. Having studied the detailed responses given by the Venezuelan government to communications transmitted by OHCHR, the Inter-American Commission on Human Rights, and other institutions. I have the impression that these responses have been largely ignored, something that would have been unthinkable in the procedure under the Optional Protocol to the International Covenant on Civil and Political Rights. While responses by governments may not always be complete and may not always be convincing, they must be taken into account sine ira et studio. If responses are deemed insufficient, more dialogue becomes necessary, and ultimately solutions can be found. Personally, I do not see the function of a mandate-holder as playing judge and jury, grandstanding and condemning States. More important is to try to mediate, to listen to the victims and confront the State with their grievances. I consider the all too popular policy of “naming and shaming” as ineffective, because the government being “named and shamed” may not recognize the moral authority of the “namer and shamer”. Insulting a head of State is hardly a promising strategy. Far more fruitful is quiet diplomacy and a good faith offer of advisory services and technical assistance. During my mission I told my interlocutors that I was coming to listen and to learn, in the hope to be able to help, and that I remained available for mediation. I explained that I am not a super-rapporteur and cannot encroach on the areas of competence of the working group on arbitrary detention, the rapporteur on freedom of expression, the rapporteur on the right to peaceful assembly and association, the rapporteur on human rights defenders etc. Nevertheless, out of respect for many who welcomed the visit of a UN rapporteur, and out of compassion for all victims, I did receive their documentation and listened to their grievances. I promised to transfer their concerns to the pertinent rapporteurs and working groups, which I did forthwith, and continue to do whenever I receive additional information, for instance, this very week. From ngo’s and opposition politicians I received lists of persons in detention. I thought it appropriate to take advantage of the opportunity of communicating to the Venezuelan authorities my desire that they reconsider the cases in question and release as many detainees as possible in a manner consistent with the rule of law. I was pleased to learn that on 23 December the Government decided to release 80 persons, including many whose cases I had brought to the attention of the government, for instance the engineer Roberto Picon, whose wife and son I had met in Caracas. The focus of my visit to two Alba Countries was to examine measures taken to recast government priorities toward economic, social and cultural rights, toward a more equitable distribution of wealth, the elimination of analphabetism, the expansion of free education from primary schools to universities, the reduction of extreme poverty, the construction of affordable housing and the distribution of subsidized food and medicines. Bearing in mind that unilateral coercive measures, financial blockades, induced inflation, international criminal rings involved in contraband of foods and medicines, as well as narcotrafficking all impact the international order, I endeavoured to study these phenomena and their impact on the economic crisis in Venezuela, the growing scarcity of certain foods, medicines and items of personal hygiene. In the mainstream media and in statements and press releases of the Office of the High Commissioner for Human Rights and also of some of my colleague Special Procedures mandate-holders, we learn about the problems of hunger, disease, scarcity and insecurity. Those are precisely the problems that we in the United Nations must endeavour to solve. In order to solve them, we must inquire into the causes of these problems. We read that the socialistic model has failed and that the problems are exclusively attributable to incompetence and corruption. But where is the empirical evidence to back up such general statements? Any investigative journalist, scientist or truth seeker can find at least some empirical evidence showing that the sanctions imposed by Obama and Trump, as well as sanctions imposed by Canada and the European Union, the economic war being waged against the Venezuelan government since 1998, the closing of Venezuelan bank accounts in many countries, the financial blockade, etc. have significantly aggravated the Venezuelan crisis. An article published this week by Mark Weisbrot in US News and World Report touches on these issues. Much more detailed is the analysis by Professor Pasqualina Curcio of the University of Caracas that documents in her book The Visible Hand of the Market how the non-conventional war against the Venezuelan government is hurting the Venezuelan people in a manner not unlike the non-conventional war waged against the Salvador Allende government from 1970 to 1973 when he was toppled by the coup d’etat of General Augusto Pinochet. The book by Peter Kornbluh – the Pinochet files – based on declassified US documents shows how Richard Nixon told Henry Kissinger in 1970 that an alternative economic model in Latin America would not be tolerated – hence he ordered “to make the Chilean economy scream”. Henry Kissinger was particularly concerned about Allende’s program as a “precedent” for Marxist-like measures in other countries. A more equitable distribution of wealth nationally and internationally has never been part of the neoliberal agenda. One of my main concerns during the visit was to observe the situation on the ground. My methodology was aimed at objectivity, the ultimate aim was to formulate constructive proposals in the spirit of international solidarity, as formulated by my colleague Virginia Dandan in her Declaration on the right to international solidarity. I was particularly sensitive to the fact that as the first rapporteur to visit Venezuela in 21 years, I should encourage the government to invite other rapporteurs. Indeed, the United Nations can and should offer advisory services and technical assistance so as to help the government tackle the complex problems they are facing. I am referring to the imperative to defend the independence of rapporteurs. Already six weeks before the beginning of my mission to Venezuela and Ecuador, a defamation campaign against me was launched by critics of my mission, and considerable pressure was brought upon me in an attempt to intimidate me. I was subjected to constant ad hominem attacks, my credibility was put into question – notwithstanding the fact that I have again and again proven my independence, having written twelve reports to the Council and General Assembly; notwithstanding the fact that I had been Secretary of the Human Rights Committee and Chief of Petitions and author of numerous books, including the Handbook United Nations Human Rights Committee Caselaw (co-authored with Jakob Möller) and many encyclopedia articles including two on the Office of the UN High Commissioner for Human Rights, on Nelson Mandela, Simon Wiesenthal, Kenneth Roth, Aryeh Neier, founder of Human Rights Watch, etc.. Six weeks before my trip I received letters and emails from ngo’s essentially telling me not to go – because I was not the “pertinent” rapporteur. Out of solidarity with the other rapporteurs who had asked for invitations before me, I should condition my acceptance on the acceptance by Venezuela of visits by the other rapporteurs. I received emails essentially dictating to me what should be in my report. I was subjected to multiple insults and intimidations. My mission was labelled “a fake investigation” even before I had landed in Caracas. During the visit to Venezuela an ngo launched a facebook and twitter campaign against my visit, in which I was insulted in terms that I cannot repeat. Following my visit to Venezuela this kind of mobbing continued. I have signalled this to the Office and to the Coordinating Committee, but as of today nothing has been done to defend my honour and reputation. Indeed, if the independence and credibility of a rapporteur is attacked, this also affects the independence and credibility of the whole system of Special Procedures. Rapporteurs must be able to conduct their investigations according to the code of conduct and not be subjected to defamation and intimidation. Otherwise the rapporteurs may choose the easy way and practice self-censorship, making only “safe” statements that will not offend anyone. Had I wanted applause, had I been an opportunist, I could have gone along with the ngo’s and mainstream media. But this would have violated not only the code of conduct, but also my own conscience. Coming back to the priority of helping the Venezuelan people overcome the economic crisis, I wish to conclude with a simple statement. Those who shout “humanitarian crisis” should first look for the causes thereof, and to the extent that they themselves are contributing to the crisis through sanctions and an economic war, they are estopped and lack moral authority. Whoever wants to help the Venezuelan people should prevail upon their governments to lift all unilateral coercive measures and put an end to the economic war. I had welcomed the two-year negotiating process between the Venezuelan government and the opposition in the Dominican Republic, a noble process initiated by the former Spanish Prime Minister Jose Luis Rodriguez Zapatero and accompanied by representatives of six Latin American and Caribbean countries. It was most regrettable that after a compromise text had been reached, at the last moment and to everyone’s surprise, the opposition refused to sign. Be that as it may, I call on all Venezuelans to continue the dialogue and to participate in the forthcoming elections of May 2018. I call upon the United Nations and the Carter Centre to send observers to the elections. Boycotting these elections would be undemocratic and contrary to the interests of the Venezuelan people. I have not yet completed drafting the report, which will be presented to the Council by my successor in September 2018. I welcome all here present to send me pertinent information and documentation, which I shall endeavour to incorporate into my report.
from .sqrl_conv import sqrl_base64_encode, sqrl_base64_decode from .sqrl_url import SqrlUrl from .sqrl_crypto import * def sqrl_query(imk, sks, server): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=query\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) client += b'opt=cps~suk\r\n' print('client', client) client = sqrl_base64_encode(client) print('server', server) server = sqrl_base64_encode(server) ids = sqrl_sign(ssk, client + server) print('ids', ids) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form def sqrl_ident(ilk, imk, sks, server, sin, create_suk): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=ident\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) if sin: ins = sqrl_hmac(EnHash(ssk), sin) client += b'ins=%s\r\n' % sqrl_base64_encode(ins) if create_suk: suk, vuk = sqrl_idlock_keys(ilk) client += b'suk=%s\r\n' % sqrl_base64_encode(suk) client += b'vuk=%s\r\n' % sqrl_base64_encode(vuk) client += b'opt=cps~suk\r\n' # TODO: Not always true? client = sqrl_base64_encode(client) ids = sqrl_sign(ssk, client + server) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form def sqrl_disable(ilk, imk, sks, server, sin, create_suk): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=disable\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) if sin: ins = sqrl_hmac(EnHash(ssk), sin) client += b'ins=%s\r\n' % sqrl_base64_encode(ins) if create_suk: # TODO: Does this even make any sense? suk, vuk = sqrl_idlock_keys(ilk) client += b'suk=%s\r\n' % sqrl_base64_encode(suk) client += b'vuk=%s\r\n' % sqrl_base64_encode(vuk) client += b'opt=cps~suk\r\n' # TODO: Not always true? client = sqrl_base64_encode(client) ids = sqrl_sign(ssk, client + server) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form
Interview with "Sarge" from Ballistic Bait Co. Ultra-realistic, multi-jointed swimbaits have been around for quite some time. I got a chance to play with the six inch, “Green ‘Gill” coloured model from Got ‘Em Coach Tackle recently and had lots of fun with it. Right along with the bait’s natural appearance and swimming motion are a handful of smart design features. This is a tough little lure that’s also pretty versatile. Top-end hardware was the first thing that struck me about the Green ‘Gill. Though I’m sure it will be a hit with bass guys, the bait’s hooks, rings and paint finish certainly seem over-engineered. You could easily throw this lure with confidence for either of the Esox species. Right off the bat, I liked the fact that this bait may look pretty, but it’s built very tough. Way too many lures out there (ultra-realistic ones, especially) put all the emphasis on a flashy package without any guts. Super sharp, black nickel trebles and small, heavy split rings pretty much ensure you’ll hook fish solidly and be able to muscle them with no fear. In sloppy weeds or wood, this is a definite ‘must,’ particularly with no-stretch line. The tail hook hanger is actually a ball bearing swivel that’s been molded into the body. There’s no chance of any leverage between the sets of hooks. That’s a smart wrinkle, and I’m surprised more manufacturers don’t apply it. The bait’s paint finish resists hook rash well, so far. And in the clear water I used it in, it struck a great balance between subtle realism and attraction. The natural, bristle tail is really cool. It catches light differently than the rest of the body, and appears thin and delicate underwater, just like a fish’s tail, in the wild. Even without an internal rattle chamber, you’ll still get great, natural sound and vibration from the bait. Body segments click and clack together, and it also throws a lot of thump when straight-reeled. I was impressed at the lure’s stability overall. Other than simply swimming it along using a straight retrieve or long, smooth pulls, I was also able to tap, twitch and jerk it. It didn’t roll over or foul. The first afternoon I tried the Green ‘Gill, I experimented with it outside a shallow cove filled with cabbage weeds. The lure ripped through the slop and recovered well. Not only did it shed weeds, it also never tangled or stopped thumping away. As a twitchbait, it acts a lot like a Musky Innovations Shallow Invader, a bait that I use a lot with good luck. A fat little smallmouth hammered the lure just outside the weeds on my first afternoon, after a series of hard snaps and pauses. You can get it walking left to right, up and down and all over the place. On a pause, the lure sinks horizontally, about one foot per second. It’s very realistic, and certainly has more to offer than just plain ‘swimming’ it. The lure stays down, too. As soon as I let off the retrieve, i could easily tick the tops of deeper, fringe weeds out in 12 to 13 feet of water. For fish in late summer and into fall, this is a swimbait that you can fish deep, if that’s what’s needed. I rigged mine on an 8’ casting rod with 80lb braid and a light, solid wire leader. Dropping down to 50 or 65 pound line and a more supple, fluorocarbon leader would bring the bait to life even more. The lure is so stable you could troll it, too. I’ve seen firsthand the way our lakes are changing. From The Great Lakes to The Kawarthas to my home waters of Georgian Bay, we’re living and fishing in a time of steady, visible change. One of the biggest things I see is water that’s weedier, clearer and home to more and more panfish species. Where I used to see perch and rockbass, I see a lot more sunfish. Right before shooting the little video that accompanies this write-up, we watched a loose clump of 4-6 inch pumpkinseed milling around the boat. At the end of the day, baits that imitate this forage and baits that are tailored to fishing clearing water are definitely going to factor into more and more fishermen’s approaches. I was out walleye fishing in the dead of night just last weekend and had about a ten pound giant cough up a small sunfish in the net. While rummaging through the shallows, at night, panfish were clearly a food source. Got ‘Em Coach Tackle offers a wide range of hues and species-specific looks with their swimbaits, including crappie and other shades of sunfish. The Green ‘Gill looks great in the water I fish. There’s probably a colour that will work where you fish, too. The experiment will continue with these cool little baits! It’s never a bad idea to support new, smaller tackle companies. They’re the bedrock of the industry, and where a lot of innovations catch fire. To see this nasty little critter in action, check out the video below, and keep an eye on the Bushey Angle Facebook Page for updates on how I’m doing with it. There’s still plenty of summer left, and things will really heat up down the stretch into September. Be safe out there and enjoy it all, wherever you happen to be wetting a line. For more information on Got-em Coach Tackle, check out their website. JP Bushey is a multi-species, multi-season fisherman living in Barrie, Ontario. North-Central Ontario’s ‘big water’ is where he spends most of his time, from his home waters of Georgian Bay to The Great Lakes, Lake Nipissing and The French River. JP’s been a freelance fishing contributor for over fifteen years, and enjoys helping people to improve their fishing through his articles, speaking engagements and on-the-water instruction.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ A patch to tweepy for storing original json in Status. File: tweepy_patch.py Author: SpaceLis Email: Wen.Li@tudelft.nl GitHub: http://github.com/spacelis Description: """ import types import sys from json import dumps def stack(cls, method, mock): """ Patching a method in class. """ mockee = getattr(cls, method) def wrapper(s, *args, **kwargs): """ wrapper """ return mock(s, mockee, *args, **kwargs) bak = '__patched__' + method setattr(cls, bak, mockee) setattr(cls, method, types.MethodType(wrapper, cls)) def preserve_origin(_, mockee, api, json): """ preserving the the original json object """ s = mockee(api, json) setattr(s, '_raw', dumps(json)) return s def patchStatus(): """ Patching the tweepy Status objects to make it store the raw json. """ if 'tweepy.models' in sys.modules: stack(sys.modules['tweepy.models'].Status, 'parse', preserve_origin) class ResourceKeeper(object): """ Return a resource keeper that will return false when all resource have been consumed. :returns: Whether there are resource left. """ def __init__(self, limit): """ Init resource keeper. :limit: A positive number means finite resource while 0 or negative means infinite. """ self.limit = limit self.left = limit def take(self, num=1, integrity=False): """ take resource. :num: The number to take away. :integrity: :returns: @todo """ assert num >= 0 if not self.limit: return True if integrity: ret = (self.left - num >= 0) else: ret = (self.left > 0) if self.left > 0: self.left -= num return ret def empty(self): """@todo: Docstring for empty. :returns: @todo """ return self.left <= 0 def iter_scoll(api, limit, *args, **kwargs): """ Iterating through pages of Twitter API. :api: The api to use from Tweepy :limit: The max number of tweets to scroll back in time. :*args: Args for the tweepy API. :**kwargs: Key word args for the tweepy API. :yields: A status object from Tweepy. """ _has_more = True res = ResourceKeeper(limit) if 'count' not in kwargs: kwargs['count'] = 100 while _has_more: _has_more = False for s in api(*args, **kwargs): _has_more = True max_id = s.id if not res.take(): break yield s if res.empty(): break kwargs['max_id'] = max_id - 1
Camille Triebsch is an Operations Associate for OPCM. She assists all teams in many aspects of the day-to-day operations of the firm. Previously, she worked as a Regional Program Manager at Team IMPACT. Camille received her B.A. in Sociology from UCLA.
from PyQt5 import QtCore, QtNetwork import logging import json from config import Settings logger = logging.getLogger(__name__) class ApiBase(QtCore.QObject): def __init__(self, route): QtCore.QObject.__init__(self) self.url = QtCore.QUrl(Settings.get('api') + route) self.manager = QtNetwork.QNetworkAccessManager() self.manager.finished.connect(self.onRequestFinished) self.handlers = {} # query arguments like filter=login==Rhyza def request(self, queryDict, responseHandler): query = QtCore.QUrlQuery() for key, value in queryDict.items(): query.addQueryItem(key, str(value)) url = QtCore.QUrl(self.url) url.setQuery(query) request = QtNetwork.QNetworkRequest(url) request.setRawHeader(b'User-Agent', b"FAF Client") request.setRawHeader(b'Content-Type', b'application/vnd.api+json') reply = self.manager.get(request) self.handlers[reply] = responseHandler def onRequestFinished(self, reply): if reply.error() != QtNetwork.QNetworkReply.NoError: logger.error("API request error: %s", reply.error()) else: message_bytes = reply.readAll().data() message = json.loads(message_bytes.decode('utf-8')) included = self.parseIncluded(message) meta = self.parseMeta(message) result = self.parseData(message, included) if len(meta) > 0: self.handlers[reply](result, meta) else: self.handlers[reply](result) self.handlers.pop(reply) reply.deleteLater() def parseIncluded(self, message): result = {} relationships = [] if "included" in message: for inc_item in message["included"]: if not inc_item["type"] in result: result[inc_item["type"]] = {} if "attributes" in inc_item: result[inc_item["type"]][inc_item["id"]] = inc_item["attributes"] if "relationships" in inc_item: for key, value in inc_item["relationships"].items(): relationships.append((inc_item["type"], inc_item["id"], key, value)) message.pop('included') #resolve relationships for r in relationships: result[r[0]][r[1]][r[2]] = self.parseData(r[3], result) return result def parseData(self, message, included): if "data" in message: if isinstance(message["data"], (list)): result = [] for data in message["data"]: result.append(self.parseSingleData(data, included)) return result elif isinstance(message["data"], (dict)): return self.parseSingleData(message["data"], included) else: logger.error("error in response", message) if "included" in message: logger.error("unexpected 'included' in message", message) return {} def parseSingleData(self, data, included): result = {} try: if data["type"] in included and data["id"] in included[data["type"]]: result = included[data["type"]][data["id"]] result["id"] = data["id"] if "type" not in result: result["type"] = data["type"] if "attributes" in data: for key, value in data["attributes"].items(): result[key] = value if "relationships" in data: for key, value in data["relationships"].items(): result[key] = self.parseData(value, included) except: logger.error("error parsing ", data) return result def parseMeta(self, message): result = {} if "meta" in message: result["meta"] = message["meta"] return result
System device driver windows xp. Need to Update USB 2. Jul 16 advice for Microsoft Windows 7 Computers such as Dell, · Windows 7 Forums is the largest help , support community, HP, Asus , providing friendly help , Acer a custom build. Windows Driver Downloads About Windows Drivers: This page discusses Windows Drivers how to properly identify download them. The operating system uses the driver to communicate with a hardware device such as a printer sound card, video card, network adapters etc. Here' s where to find Device Manager in Windows 10 Vista XP. I click on it it says " No driver installed" I try to install the driver with XP disk it does not find it. If the device is not listed if devices are listed under the “ Unknown devices” group, the most likely cause is a missing incorrect device driver. Jan 08 that are available for download, to obtain information about drivers that are included on the Windows XP CD- ROM , · To determine if your hard disk controller is compatible with Windows XP see the latest Windows XP Hardware Compatibility List ( HCL). Uninstall Bluetooth devices Hidden. About Windows ® XP Drivers. Notes: VCDS should install the drivers automatically. Microsoft provides drivers for several USB device classes approved by USB- IF. 11n WLAN Driver Updates for Windows 10, 8. 11n WLAN mode or if you have an 802. System device driver windows xp. Skumar: If the ways method 1 and method 2 both can not solve the MTP USB device driver the latest drivers for your Base System Device to keep your Computer up- to- date. 11 family of standards is a wireless e the best selling driver update software which acts like a one- stop shop for all of your USB peripheral driver needs. It was released to manufacturing on August 24 broadly released for retail sale on October 25 . If you have bought an off- the- shelf computer recently, chances are that it came with Windows preloaded onto the system. With just one click you can update the driver as well as the rest of the drivers in your system such the hardware, device manager under others when I plug in the cable an icon appears saying USB Device with a yellow triangle ! 0 device you need to install update 802. How to use System Restore in Windows 10, 8/ 8. 29, / Updated by Bessie Shaw to Windows Driver Solutions. TIP OF THE DAY Should you plug in a brand new device, your operating system effectively adjusts it so it can work sufficiently with more devices which might be already installed on your System. Individual drivers may be available freely by manually visiting device manufacturer websites. Microsoft- provided USB device class drivers. Exe ( from a different model number) installed worked perfectly. If a language option is given, select your language. No modern PC can and should do without sound. DriverFinder is an advanced driver scanning updating downloading utility for Windows- based PCs. 1 Vista XP. A driver is a small but essential piece of software written for a specific operating system like Microsoft Windows 10. They are available in the \ Windows\ System32\ DriverStore\ FileRepository folder. Your operating system also creates a singular setup, that include direct memory access channels in conjunction with other essentials essential for the most beneficial operating of your newly. 0 Drivers for Windows? Can' t remove Bluetooth device on Windows 10? System device driver windows xp. It identifies all the hardware in the system extracts their associated drivers from the hard disk backs them up to a location of your choice. This is actually quite a common problem among users and the good news is that there are a few solutions that you can use to fix it. 1 XP Vista. Apr 12, · Check out the system requirements to see if your PC can run Windows 7. If you have the sound driver for the sound device, ( re) install the sound. System device driver windows xp. For making sure your PC can run the Windows 8 fast smooth here is the minimum Windows 8 system requirements: Minimum Windows 8 System requirements. If the sound device is not listed check if any devices are listed in the “ Other devices” , “ Unknown devices” group most likely with a yellow exclamation mark. These instructions are only to be used in case the automatic driver installation is not successful. 10 DOS 6 Tricks Secrets Tips Tweaks Hacks Fixes Updates Upgrades games chess. I am assuming I need some kind of driver for them but I am not sure how to tell what they are what driver they will need. WikiHow is a “ wiki ” similar to Wikipedia which means that many of our articles are co- written by multiple authors. There are plenty of reasons you might need to open Device Manager in Windows but usually it' s to troubleshoot some kind of problem with your computer' s hardware. Disconnect the device from your computer. Aug 29 · Top 6 Ways to Fix No Bluetooth in Device Manager on Windows 10 8. Keep you PC up to date with DriverMax! Windows XP is a personal computer operating system produced by Microsoft as part of the Windows NT family of operating systems. MDGx AXCEL216 MAX Speed Performance Windows. 11n WLAN driver to add support for Windows systems like Windows 10 peripheral driver needs. 11n WLAN driver to add support for Windows systems like Windows 10 etc. System device driver windows xp. Development of Windows XP began in the late 1990s as " Neptune", an operating system ( OS) built on the Windows NT kernel which was intended. Here are top 3 ways to download update base system device driver for Windows 10 8. 1 Vista XP. Here you will learn how to fix it. Award- winning driver updating software, DriverMax is the optimal solution for your computer. Windows XP as well as a lot of applications use sounds to give feedback to the user to signal an event happening. System Restore will ' undo' changes to important areas of the operating B Driver Installation Instructions for VCDS and Windows XP. Mar 24, · If you have a laptop that supports 802. Use the links on this page to download the latest version of MTP USB Device drivers. To automate these USB 2. Double click the driver download to extract it. Hi I had the same problem after a clean install of win 7 to a acer 2930 laptop i found it was the card reader ( Sd xd etc) once i had downloaded the updated driver from acer website & installed base system device disappeared from device manager. Jan 07 · If Windows 10 won' t remove your Bluetooth devices there' s no need to panic. Reboot your computer. 1/ 10 32- bit and 64- bit versions. 1 Vista , applied to HP, Dell, XP, Lenovo other brand PCs. All drivers available for download have been scanned by antivirus program. How to Create a Bootable Windows XP Setup Disk on a Preinstalled / Preloaded Windows System. 11n, an addition to the 802. I have always disliked the. Follow the directions on the installation program screen. It is recommended new project should migrate to libusb- 1. Vista XP SP1 SP2 SP3 MESE OSR2 OSR1 95 NT4 NT 3. It helps you share a USB device over network among multiple computers so people from all over the world ( or your office) can use it. These drivers and their installation files are included in Windows. Reconnect the device and power it on. 1 Windows 8, Windows Vista Windows XP. This utility contains the only official version for Base system device Driver for Windows XP/ 7/ Vista/ 8/ 8. Despite the end of Microsoft' s mainstream support for the Windows ® XP operating system which was used on older versions of Windows such as Windows 95 , Windows ® XP remains a common , Windows to replace VxD, popular operating system for Windows ® computer users computing, the Windows Driver Model ( WDM) – also known at one point as the Win32 Driver Model – is a framework for device drivers that was introduced with Windows 98 Windows 3. Power off your Base System Device device. This article has also been viewed 259, 322 times. The National Instruments Device Drivers DVD provides many of the drivers that may be necessary to interface with National Instruments hardware including drivers for data acquisition motion, timing , instrument control, reconfigurable I/ O, synchronization vision. USB over Ethernet - USB Network Gate enables work with the remote USB devices over Ethernet ( Internet/ LAN/ WAN) as if they were plugged into your own machine. Check out the system requirements to see if your PC can run Windows 7. System device driver windows xp. If you have a laptop that supports 802. 0 API and use libusb Windows ( http. 0 Drivers updates you can download Driver whiz compatible with Windows 10 Vista & XP. No more wasted hours of searching for out of date et Code 43 error in Device Manager? 1, as well as the Windows NT Driver e the links on this page to download the latest version of Intel SST Audio Device ( WDM) drivers. These instructions are only to be used in case the automatic driver. Microsoft Windows operating systems includes drivers for most devices however device specific drivers may need to be downloaded . For Windows 10 8. Windows 7 Forums is the largest help support community, Acer, HP, advice for Microsoft Windows 7 Computers such as Dell, providing friendly help , Asus a custom e Also. Jan 21 · How to use System Restore in Windows 10 8/ 8. Here are 5 fixes 1. System device driver windows xp. It also explains the importance of keeping your drivers up- to- date and some of the challenges with updating Windows Drivers. If Windows 10 won' t remove your Bluetooth devices, there' s no need to panic. To create this article worked to edit , some anonymous, 22 people improve it over time. Driver Magician offers a professional solution for device drivers backup update , restoration removal in Windows operating system. One technical issue that. No more wasted hours of searching for out of date drivers. Libusb- win32 is now in Bug- Fix- Only maintenance mode. It doesn' t matter if you' re updating device drivers adjusting system resources . See, USB device class drivers included in Windows. 0 API and use libusb Windows ( st juicer for the money: My coder is trying to convince me to move to. May 09 Acer, · Windows 7 Forums is the largest help , Asus , support community, advice for Microsoft Windows 7 Computers such as Dell, HP, providing friendly help a custom build. Fix : Device Manager not showing any hardware / empty device manager problem in Windows 7 Vista XP. Works swiftly on Windows 10, Windows 8. I just installed Windows 7 Beta on my HP Pavilion Dv9000 laptop everything is working really well but in device manager I have 2 items listed under Other Devices that say Base Sytem Device. Jun 27, · I saw this on a Dell laptop. I forget which driver solved the problem. As mentioned above you must use ONLY the drivers for your EXACT laptop and version of st Microsoft Device Driver Support — DriverFinder. DriverFinder can save your time and remove any guesswork related to researching driver issues by providing instant access to the most relevant content for your computer' s hardware, even some of the old device which not supported/ provided by the device manufacturer' s website st iPhone Device Driver Support — DriverFinder. DriverFinder can save your time and remove any guesswork related to researching driver issues by providing instant access to the most relevant content for your computer' s hardware, even some of the old device which not supported/ provided by the device manufacturer' s website again. Base Device - Card Reader Software for my Sony Vaio VGN- CS26G.
# -*- coding: utf-8 -*- # Copyright (c) 2012 - 2015 Detlev Offenbach <detlev@die-offenbachs.de> # """ Module implementing the IRC configuration page. """ from __future__ import unicode_literals from .ConfigurationPageBase import ConfigurationPageBase from .Ui_IrcPage import Ui_IrcPage import Preferences class IrcPage(ConfigurationPageBase, Ui_IrcPage): """ Class implementing the IRC configuration page. """ TimeFormats = ["hh:mm", "hh:mm:ss", "h:mm ap", "h:mm:ss ap"] DateFormats = ["yyyy-MM-dd", "dd.MM.yyyy", "MM/dd/yyyy", "yyyy MMM. dd", "dd MMM. yyyy", "MMM. dd, yyyy"] def __init__(self): """ Constructor """ super(IrcPage, self).__init__() self.setupUi(self) self.setObjectName("IrcPage") self.timeFormatCombo.addItems(IrcPage.TimeFormats) self.dateFormatCombo.addItems(IrcPage.DateFormats) # set initial values # timestamps self.timestampGroup.setChecked(Preferences.getIrc("ShowTimestamps")) self.showDateCheckBox.setChecked( Preferences.getIrc("TimestampIncludeDate")) self.timeFormatCombo.setCurrentIndex( self.timeFormatCombo.findText(Preferences.getIrc("TimeFormat"))) self.dateFormatCombo.setCurrentIndex( self.dateFormatCombo.findText(Preferences.getIrc("DateFormat"))) # colours self.initColour("NetworkMessageColour", self.networkButton, Preferences.getIrc, byName=True) self.initColour("ServerMessageColour", self.serverButton, Preferences.getIrc, byName=True) self.initColour("ErrorMessageColour", self.errorButton, Preferences.getIrc, byName=True) self.initColour("TimestampColour", self.timestampButton, Preferences.getIrc, byName=True) self.initColour("HyperlinkColour", self.hyperlinkButton, Preferences.getIrc, byName=True) self.initColour("ChannelMessageColour", self.channelButton, Preferences.getIrc, byName=True) self.initColour("OwnNickColour", self.ownNickButton, Preferences.getIrc, byName=True) self.initColour("NickColour", self.nickButton, Preferences.getIrc, byName=True) self.initColour("JoinChannelColour", self.joinButton, Preferences.getIrc, byName=True) self.initColour("LeaveChannelColour", self.leaveButton, Preferences.getIrc, byName=True) self.initColour("ChannelInfoColour", self.infoButton, Preferences.getIrc, byName=True) # notifications self.notificationsGroup.setChecked( Preferences.getIrc("ShowNotifications")) self.joinLeaveCheckBox.setChecked(Preferences.getIrc("NotifyJoinPart")) self.messageCheckBox.setChecked(Preferences.getIrc("NotifyMessage")) self.ownNickCheckBox.setChecked(Preferences.getIrc("NotifyNick")) # IRC text colors self.initColour("IrcColor0", self.ircColor0Button, Preferences.getIrc, byName=True) self.initColour("IrcColor1", self.ircColor1Button, Preferences.getIrc, byName=True) self.initColour("IrcColor2", self.ircColor2Button, Preferences.getIrc, byName=True) self.initColour("IrcColor3", self.ircColor3Button, Preferences.getIrc, byName=True) self.initColour("IrcColor4", self.ircColor4Button, Preferences.getIrc, byName=True) self.initColour("IrcColor5", self.ircColor5Button, Preferences.getIrc, byName=True) self.initColour("IrcColor6", self.ircColor6Button, Preferences.getIrc, byName=True) self.initColour("IrcColor7", self.ircColor7Button, Preferences.getIrc, byName=True) self.initColour("IrcColor8", self.ircColor8Button, Preferences.getIrc, byName=True) self.initColour("IrcColor9", self.ircColor9Button, Preferences.getIrc, byName=True) self.initColour("IrcColor10", self.ircColor10Button, Preferences.getIrc, byName=True) self.initColour("IrcColor11", self.ircColor11Button, Preferences.getIrc, byName=True) self.initColour("IrcColor12", self.ircColor12Button, Preferences.getIrc, byName=True) self.initColour("IrcColor13", self.ircColor13Button, Preferences.getIrc, byName=True) self.initColour("IrcColor14", self.ircColor14Button, Preferences.getIrc, byName=True) self.initColour("IrcColor15", self.ircColor15Button, Preferences.getIrc, byName=True) # Automatic User Information Lookup self.whoGroup.setChecked(Preferences.getIrc("AutoUserInfoLookup")) self.whoUsersSpinBox.setValue(Preferences.getIrc("AutoUserInfoMax")) self.whoIntervalSpinBox.setValue( Preferences.getIrc("AutoUserInfoInterval")) # Markers self.markWhenHiddenCheckBox.setChecked( Preferences.getIrc("MarkPositionWhenHidden")) self.initColour("MarkerLineForegroundColour", self.markerForegroundButton, Preferences.getIrc, byName=True) self.initColour("MarkerLineBackgroundColour", self.markerBackgroundButton, Preferences.getIrc, byName=True) # Shutdown self.confirmShutdownCheckBox.setChecked( Preferences.getIrc("AskOnShutdown")) def save(self): """ Public slot to save the IRC configuration. """ # timestamps Preferences.setIrc("ShowTimestamps", self.timestampGroup.isChecked()) Preferences.setIrc( "TimestampIncludeDate", self.showDateCheckBox.isChecked()) Preferences.setIrc("TimeFormat", self.timeFormatCombo.currentText()) Preferences.setIrc("DateFormat", self.dateFormatCombo.currentText()) # notifications Preferences.setIrc( "ShowNotifications", self.notificationsGroup.isChecked()) Preferences.setIrc( "NotifyJoinPart", self.joinLeaveCheckBox.isChecked()) Preferences.setIrc("NotifyMessage", self.messageCheckBox.isChecked()) Preferences.setIrc("NotifyNick", self.ownNickCheckBox.isChecked()) # Automatic User Information Lookup Preferences.setIrc("AutoUserInfoLookup", self.whoGroup.isChecked()) Preferences.setIrc("AutoUserInfoMax", self.whoUsersSpinBox.value()) Preferences.setIrc( "AutoUserInfoInterval", self.whoIntervalSpinBox.value()) # Markers Preferences.setIrc( "MarkPositionWhenHidden", self.markWhenHiddenCheckBox.isChecked()) # Shutdown Preferences.setIrc( "AskOnShutdown", self.confirmShutdownCheckBox.isChecked()) # colours self.saveColours(Preferences.setIrc) def create(dlg): """ Module function to create the configuration page. @param dlg reference to the configuration dialog @return reference to the instantiated page (ConfigurationPageBase) """ page = IrcPage() return page
Right now, somewhere in the world, there is a university ‘Introduction to Philosophy’ course where the professor, thinking themselves quite clever is posing the following ethical dilemma: If you could go back in time, would you kill Hitler before he ever came to power? This question has become almost a stock component of philosophical speculation, and of course, it has its own interesting history. The ‘Kill Baby Hitler Conundrum’ actually was first articulated in a Science-Fiction short story published in 1941. Yes, 1941. Before anyone knew exactly the extent of the evil, Hitler’s wibby-wobbly time-travel-induced death was already a matter for speculation. The story, aptly, is called ‘I Killed Hitler,’ and it appeared in that year’s Weird Science. It was written by a man named Roger Sherman Hoar and it takes a somewhat unexpected turn. In the story, an American painter called up for the draft goes back in time to kill a young Hitler at the age of eleven. Time travel being what it is, things don’t work out quite as expected. By the end of the story, Hitler’s assassin has himself, through a series of twists and turns, assumed the dictator’s place. The story, it seems, is a cautionary tale against seeing the past as a realm through which we can rectify the flaws of the present. Unfortunately, time travel as envisioned by much of science fiction is an impossibility. We do know that time can be bent, altered, and in certain conditions, can appear very different than we are used to. Yet one constant remains: we cannot, under any condition, change the past. Our fascination with time travel may seem far from where we last left Moses, mid-speech. Yet, the words that begin this week’s parashah have a great deal in common with the Oedipal twist of 1941’s “I Killed Hitler.’ The first verse, Deuteronomy 11:26, reads: “Look! Today I am putting in front of you blessing and curse.” From today, we have before us a blessing and a curse. Now of course, as Moses goes on to explain, the promised blessing is secured through observing God’s laws, and the curse as a result of shunning them. In this way, what Moses is telling us is that we stand at a fork in the road. We can go one way, towards God and Torah and the Jewish community, and find blessing in it. We can go another way, and find curses in it. Now, it is really fair to say that everything in life is either a blessing or a curse? Is there no middle-ground? We make choices every day; Some are small and seemingly insignificant and some are incredibly important. Does each choice yield either blessing or curse with no mix in between? Every year when I come across this verse it seems like a very oversimplified duality. Blessing or curse. Holy or profane. But - perhaps there’s more to it. Re’eh - Behold - pay good attention, so that you will not behave like most others who relate to everything half-heartedly, always trying to find middle ground. Rather, I set before you this day a blessing and a curse, two opposite extremes. The blessing is an extreme in that it provides you with more than you need, whereas the curse is another extreme, making sure that you have less than your basic needs. You have the choice of both before you; all you have to do is make a choice. Seforno suggests that the extremes presented to us are a call to never indulge in mediocrity. We must not only be good, we must be extremely good. Perhaps this is a lot to ask; But, after all, doesn’t the Torah often ask quite a bit of us? So this verse which opens our parashah seems to have two lessons for us. The first, I think, is this: the choices before us are before us, meaning, in front of us. In Hebrew the word for before/in front of is lifné, a contraction of el (to) and panim (face). Thus to say that the choice between blessing and curse is before us, means that it is one which we will face. From this moment, to the next, to tomorrow, to next week, to next year, and perhaps to our next lifetime - we will be faced with choice after choice, drawing us toward either blessing or curse. The second lesson we can learn from this is that to live a life of blessing requires commitment. We cannot be half-blessed or half-cursed. Although we live in a world in which we often aim to see nuance and subtlety, some things may actually be binaries. Perhaps we must choose one: blessing or curse. Seforno reminded us that we have to pay attention, we have to be careful, we have to look at the path in front of us in order to know which it is. That struggle, to discern which path will lead us toward blessing, although difficult, is far better than always trying to find a middle ground. It is harder and potentially more hazardous, but, as we are promised, may be far more rewarding as well. The Midrash [Sifré 53:1:2 / Tanhuma Re’eh 3] makes what I think is a beautiful analogy to help us understand this choice with which we are faced. Imagine the following scene with me: you are standing on a woodland path, walking forward. It is the rare British summer day where everything is peaceful and warm and sunny. As you walk, the path you are on reaches a fork. First, look to the left: there you see the path unencumbered, cleared of all debris. A bit beyond where you can see, it bends. Now look at the fork on the right: there you can see that the path has lots of thorn-bushes, some of which have overgrown the banks and intrude on the path. It looks like it would be much slower going, and potentially hazardous with all the debris blocking the way. Yet, it too curves away from you beyond your sight. You have to make a choice based on what you can see. What do you do? The midrash goes on tell us that these two paths are just like the blessing and the curse we read about. However, the critical information is the very part that is absent. It explains that our left-hand path, the one that begins totally clear actually turns that corner into very dangerous terrain. Beyond the bend the brambles and thorns that you cannot see are far worse than what’s visible on the right-hand path. Meanwhile, that right-hand path, which appears hazardous and difficult, curves around the bend to a beautiful and clear ramble through some lovely countryside. Standing at the crossroads, at the fork, as we were, we cannot know where the path will go, we can only know what is in front of us. Yet, what the Torah is asking us is precisely that: to see beyond what appears and to choose the path of blessing. The path that leads towards blessing may be more difficult - it may involve commitments and restrictions and limitations that feel like thorns, but just around the bend it leads to somewhere beautiful. Meanwhile, the path which seems easy, which asks little of us, may lead us into danger and oblivion. Our Sages tell us in the Talmud [Bavli Tamid 32a] that one who is wise is ‘one who can see the future.’ They don’t mean clairvoyancy of course! They mean that wisdom is about being able to see beyond what is just in front of us. Wisdom is being able to look at the forked paths with which we are faced and know that the one that begins in brambles may end in a serene and clear journey. The first word of our parashah tells us to re’eh ; It is a command - “Look!” “See!” “Behold!” Look - you have before you a choice, between blessing and curse. Between the easy road and the hard road. The only choice we can make is to move forward, on one or the other. It is not just the only choice we can make, it is the only choice there is to make. We cannot go backwards. We cannot retreat into the forest from which we came or go back in time and remove a great evil from the world. We cannot, as the protagonist of ‘I Killed Hitler’ discovers, alter the path or the choices we have already made. All we can do is move forward, fork after fork, looking at what is before us and choosing: blessing or curse, blessing or curse, blessing or curse. Look. Pay attention to your life. Every moment in it is profoundly mixed. Every moment contains a blessing and a curse. Everything depends on our seeing our lives with clear eyes, seeing the potential blessing in each moment as well as the potential curse, choosing the former, forswearing the latter … We learn a number of things from this. We learn that this business of choosing good over evil, life over death, is precisely a matter of life and death. Our lives quite literally depend on it.
import sublime, sublime_plugin, re, threading miva_error_status_key = 'miva_pos_calculator_error' class MvtPosCalculatorCommand( sublime_plugin.TextCommand ): def run( self, edit ): selections = self.view.sel() for selection in selections: # get region from start of file to first point in selection search_region = sublime.Region( 0, selection.a ) # get actual text of region search_text = self.view.substr( search_region ) # check what language you are in is_mvt = self.view.match_selector( selection.a, 'text.mvt' ) is_mv = self.view.match_selector( selection.a, 'text.mv' ) # find all matches of "open" tags if ( is_mvt ): open_tags = re.findall( r'(?i)(<)(mvt:)(foreach)\b', search_text ) close_tags = re.findall( r'(?i)(<\/)(mvt:)(foreach)\b', search_text ) elif ( is_mv ): open_tags = re.findall( r'(?i)(<Mv)(FOR|FOREACH)\b', search_text ) close_tags = re.findall( r'(?i)(<\/Mv)(FOR|FOREACH)\b', search_text ) else: continue # calculate the difference between the number of open and closed tags open_close_difference = len( open_tags ) - len( close_tags ) # check the difference if ( open_close_difference >= 1 ): # generate output string output = 'l.pos' + str( open_close_difference ); # Replace the Variable selection with the generated l.posX self.view.replace( edit, selection, output ) else: # Output error message self.view.set_status( miva_error_status_key, 'No valid <mvt:foreach> tags detected' ) threading.Timer( 3, self.view.erase_status, args=[miva_error_status_key] ).start()
What Makes A Merchant Cash Advance A Trustworthy Funding Option? At Synergy Merchants, we understand that many Canadian business owners may be hesitant to enrol in a merchant cash advance program. We’re aware that the hesitancy simply comes from a lack of understanding about how merchant cash advances work. Business owners are used to bank loans – and they are just as used to getting turned down by their banks when they apply for such loans. This is why we make the process of securing a merchant cash advance so easy for Canadian business owners. Because of how easy it is, however, it often comes with a “too good to be true” stigma. This is why our team of licensed funding specialists work so hard to put our clients’ minds at ease. It’s important that they trust us. And it’s also important that they trust the process of receiving and paying back a merchant cash advance. Why should you trust Synergy Merchants for your business funding options? What we have to offer is a product of great quality. Not only will be you able to secure money for your business much quicker and easier than if you were to apply to your bank for a business loan, but you’ll find that out program is much better for your business in the long run. Firstly, we will not put you through the ringer the way your bank would. You are not required to put up any collateral. We will not do a credit check and you won’t even necessarily have to divulge what you need your money for. We advance you cash based on your monthly credit card and debit card sales. Reviewing your monthly sales is all we truly need to do in order to approve your business for the money. Secondly, you will not have to worry about an accruing interest rate or making monthly minimum payments. Instead of interest, you are charged a one-time fee that is collected along with the rest of your cash advance through a small percentage of your future credit card and debit card sales. There is no such thing as being late in making payments since it’s your sales that determine how much I’m paid back over time. Why should you trust Synergy Merchants over a “big name” company? Big name businesses aren’t necessarily better businesses. Take the food industry, for example. The most popular restaurants in the world are bound to serve the food that is the worst for your health. You know the ones. You can pretty much buy meals from them at any time of day or night without having to get out of the comfort of your car. And while drive-thru-equipped fast food restaurants continue to dominate their marketplace, there are certainly far better eateries to enjoy meals within. You get the point. Brand names are generally trusted in spite of the fact that what they offer may be of subpar quality. And when it comes to financing your business for the purpose of its growth, things aren’t much different. Those well known banks are bound to put you through quite the long and drawn out process before you know whether or not you’ll be approved for your business loan. And, as you may have already experienced, many business owners get their applications declined.
import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='nirvaris-profile', version='3.0', packages=['n_profile'], include_package_data=True, license='MIT License', # example license description='A simple Django app using django auth with custom UI', long_description=README, url='https://github.com/nirvaris/nirvaris-profile', author='Nirvaris', author_email='contact@nirvaris.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', # example license 'Operating System :: OS Independent', 'Programming Language :: Python', # Replace these appropriately if you are stuck on Python 2. 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
As you know, the BCD Initiative advocates for a safe, inclusive, and community-oriented transformation in the Bend Central District. We aren't just here to talk about the community vision - we want to show you how public investment has catalyzed real changes in other cities around Oregon. Join us on Thursday, March 21st at Deschutes Brewery Taproom for a panel discussion on the basics of Urban Renewal Areas and Tax Increment Financing and hear from practitioners in the field about successful projects across Oregon. The landowner/developer has revised the proposed site plan from what was presented last spring. Now the Platypus Pub building would be demolished and the proposal is to build one 7,000 sq. ft. drive-thru building and a parking lot on the corner of Third Street and NE Lafayette. You can learn more about the proposal by clicking here, following the link for “Public Viewer for Planning Permits,” and searching for PZ-18-0233. Since many people were unable to make it to the informational meeting held by the developer due to heavy snowfall and closures, we suggest writing in to ask the city to elevate the application to a Type II Decision so that there will be a Public Hearing where the city and developer can consider more public input. Send comments to kswenson@bendoregon.gov. The BURA Urban Renewal Agency (BURA) Advisory Board will meet monthly over the next two years while a new Urban Renewal Feasibility Study is completed. The Advisory Board will provide recommendations to BURA relating to the potential use of urban renewal for the central area of Bend to accomplish the goals and policies outlined in the Bend Comprehensive Plan. Give Input on Transit Planning! Cascades East Transit (CET) is developing a Regional Transit Master Plan and needs your input to prioritize near-term public transportation projects. Light refreshments and activities for children will be provided. Stay up-to-date on the planning process by visiting CETTransitPlan.com. To request information in an alternative format or to have an interpreter on site during the open house events, please call Rachel Zakem at 541-504-3310 or send an email to rzakem@coic.org.
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Document.popular_count' db.add_column(u'documents_document', 'popular_count', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) # Adding field 'Document.share_count' db.add_column(u'documents_document', 'share_count', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) def backwards(self, orm): # Deleting field 'Document.popular_count' db.delete_column(u'documents_document', 'popular_count') # Deleting field 'Document.share_count' db.delete_column(u'documents_document', 'share_count') models = { u'actstream.action': { 'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'}, 'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}), 'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': u"orm['contenttypes.ContentType']"}), 'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}), 'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 489164)'}), 'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 495334)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 494806)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'base.contactrole': { 'Meta': {'unique_together': "(('contact', 'resource', 'role'),)", 'object_name': 'ContactRole'}, 'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Profile']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.ResourceBase']"}), 'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Role']"}) }, u'base.resourcebase': { 'Meta': {'object_name': 'ResourceBase'}, 'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'bbox_x0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_x1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_y0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_y1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.TopicCategory']", 'null': 'True', 'blank': 'True'}), 'constraints_other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'constraints_use': ('django.db.models.fields.CharField', [], {'default': "'copyright'", 'max_length': '255'}), 'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['people.Profile']", 'through': u"orm['base.ContactRole']", 'symmetrical': 'False'}), 'csw_anytext': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'csw_insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), 'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '256'}), 'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.isotc211.org/2005/gmd'", 'max_length': '64'}), 'csw_type': ('django.db.models.fields.CharField', [], {'default': "'dataset'", 'max_length': '32'}), 'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'gmd:MD_Metadata'", 'max_length': '32'}), 'csw_wkt_geometry': ('django.db.models.fields.TextField', [], {'default': "'POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'"}), 'data_quality_statement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_type': ('django.db.models.fields.CharField', [], {'default': "'publication'", 'max_length': '255'}), 'distribution_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'distribution_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'edition': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'keywords_region': ('django.db.models.fields.CharField', [], {'default': "'USA'", 'max_length': '3'}), 'language': ('django.db.models.fields.CharField', [], {'default': "'eng'", 'max_length': '3'}), 'maintenance_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'metadata_uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'metadata_xml': ('django.db.models.fields.TextField', [], {'default': '\'<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>\'', 'null': 'True', 'blank': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'purpose': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'spatial_representation_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'srid': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '255'}), 'supplemental_information': ('django.db.models.fields.TextField', [], {'default': "u'No information provided'"}), 'temporal_extent_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'temporal_extent_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'thumbnail': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Thumbnail']", 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'}) }, u'base.thumbnail': { 'Meta': {'object_name': 'Thumbnail'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'thumb_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'thumb_spec': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'version': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}) }, u'base.topiccategory': { 'Meta': {'ordering': "('name',)", 'object_name': 'TopicCategory'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'documents_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'layers_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'maps_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'documents.document': { 'Meta': {'object_name': 'Document', '_ormbases': [u'base.ResourceBase']}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}), 'doc_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'extension': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'popular_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'resourcebase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['base.ResourceBase']", 'unique': 'True', 'primary_key': 'True'}), 'share_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, u'people.profile': { 'Meta': {'object_name': 'Profile'}, 'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), 'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}), 'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, u'people.role': { 'Meta': {'object_name': 'Role'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, u'taggit.tag': { 'Meta': {'object_name': 'Tag'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}) }, u'taggit.taggeditem': { 'Meta': {'object_name': 'TaggedItem'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"}) } } complete_apps = ['documents']
Secretary of State for the Environment, Food and Rural Affairs, Michael Gove, has appointed The Rt Hon the Lord Blencathra as Deputy Chair of Natural England (NE) until the end of his term as an NE Board Member on 11 March 2021. This follows the announcement of his appointment to the NE Board in March 2018. Non-executive board members of Natural England have collective responsibility for the strategic direction and overall performance of the organisation. They make sure that Natural England is properly and effectively managed and provide stewardship for the public funds entrusted to the organisation. Lord Blencathra will continue to receive remuneration at the rate of £328 per day although there will be in an increase to his current time commitment of 36 days per annum. Lord Blencathra is a Member of the House of Lords where he chairs a Select Committee and has been appointed to the Council of Europe. He was formerly Minister of State at the Home Office and a Minister at the Department of the Environment in the 1990s. At the Earth Summit in Rio in 1992 he led for the UK and launched the Darwin Initiative. He formerly represented England’s largest rural constituency as the Member of Parliament for Penrith and The Border. He joined the Natural England Board in March 2018. Lord Blencathra has multiple sclerosis and uses a wheelchair.
from ...base_types import UMOGOutputNode import bpy import numpy as np import math from mathutils import Vector class DissolveLimitedNode(bpy.types.Node, UMOGOutputNode): bl_idname = "umog_DissolveLimitedNode" bl_label = "Dissolve Limited" assignedType = "Object" delimitOptions = bpy.props.EnumProperty(items= (('NORMAL', 'Normal', 'Delimit by face directions.'), ('MATERIAL ', 'Material', 'Delimit by face material.'), ('SEAM', 'Seam', 'Delimit by edge seams.'), ('SHARP', 'Sharp', 'Delimit by sharp edges.'), ('UV', 'UV', 'Delimit by UV coordinates.') ), name="Delimit Operation", default = {"NORMAL"}, options = {"ENUM_FLAG"}) def draw(self, layout): layout.prop(self, "delimitOptions", "Delimit Operation") def create(self): self.newInput(self.assignedType, "Object") self.newInput("VertexGroup", "Vertex Group") self.newInput("Float", "Angle Limit", value = 0.001, minValue = 0.0, maxValue= 180) self.newInput("Boolean", "All Boundries") socket = self.newOutput(self.assignedType, "Output") socket.display.refreshableIcon = False socket.display.packedIcon = False socket = self.newOutput("VertexGroup", "Vertex Group") socket.display.refreshableIcon = False socket.display.packedIcon = False self.width = 200 def refresh(self): if self.inputs[0].value == '': self.inputs[1].value = '' self.inputs[1].object = '' else: self.inputs[1].object = self.inputs[0].value self.outputs[0].value = self.inputs[0].value self.outputs[0].refresh() self.outputs[1].value = self.inputs[1].value self.outputs[1].refresh() def execute(self, refholder): if self.inputs[1].value == '': self.inputs[0].setSelected() overrideContext = self.inputs[0].setViewEditMode(selectAll = 'SELECT') else: self.inputs[1].setSelected() overrideContext = self.inputs[1].select() angleLimit = math.radians(self.inputs[2].value) boundries = self.inputs[3].value bpy.ops.mesh.dissolve_limited(angle_limit=angleLimit, use_dissolve_boundaries=boundries, delimit=self.delimitOptions) self.inputs[0].setViewObjectMode() def write_keyframe(self, refholder, frame): pass def preExecute(self, refholder): pass def postBake(self, refholder): pass
Colorful wall clocks amazing beautiful clock living room bar cafe personality loft and. Colorful wall clocks desire diy retro clock shelterness along with. Colorful wall clocks gorgeous cubical colors clock by parazitgoodz society and. Colorful wall clocks property large over inches in diameter the clock depot pertaining to. Colorful wall clocks invigorate jeri s organizing decluttering news that are easy regarding. Colorful wall clocks to buy. Wall clocks colorful. Colorful contemporary wall clocks. Unique colorful wall clocks. Big colorful rustic wall clocks. Large colorful kitchen wall clocks. Colorful wall clock. Colorful wall clocks amazing wooden shape round rs unit id regarding.
# -*-coding:UTF-8 -* from events.event import Event from missions.mission import Mission class PositioningMission(Mission): def __init__(self, robot, can, ui): super(self.__class__,self).__init__(robot, can, ui) self.state = -1 def start(self): if self.state == -1: self.create_timer(500) self.missions["threshold"].activate(1, False) self.missions["threshold"].activate(2, False) self.missions["threshold"].activate(8, False) self.state += 1 def process_event(self, e): if self.state == 0: if e.name == "timer": self.state = 2 self.missions["speed"].start( -20) elif self.state == 2: if e.name == "bump" and e.state == "close": self.state += 1 self.create_timer(700) elif e.name == "bump" and e.pos == "alim" \ and e.state == "close": self.missions["speed"].start(-20) elif self.state == 3: if e.name == "timer": self.state += 1 self.missions["speed"].stop(self) elif self.state == 4: if e.name == "speed" and e.type == "done": self.state += 1 self.missions["forward"].start(self, 1800) elif self.state == 5: if e.name == "forward" and e.type == "done": self.state += 1 self.missions["rotate"].start(self, 9000) elif self.state == 6: if e.name == "rotate" and e.type == "done": self.state += 1 self.missions["speed"].start(-20) elif self.state == 7: if e.name == "bump" and e.state == "close": self.state += 0.5 self.create_timer(700) elif self.state == 7.5: if e.name == "timer": self.state += 0.5 self.missions["speed"].stop(self) elif self.state == 8: if e.name == "speed" and e.type == "done": self.state += 2 self.missions["forward"].start(self, 6000) elif self.state == 10: if e.name == "forward" and e.type == "done": self.state += 1 self.logger.info("Petit en attente de positionnement de Gros") elif self.state == 11: if (e.name == "robot" and e.type == "ready") \ or (e.name == "bump" and e.state == "close"): self.state += 1 self.missions["forward"].start(self, -3100) elif self.state == 12: if e.name == "forward" and e.type == "done": self.state = 0 self.missions["threshold"].activate(1, True) self.missions["threshold"].activate(2, True) self.missions["threshold"].activate(8, True) self.logger.info("Petit en position !") self.send_event(Event("positioning", "done"))
Romantic bedroom decoration ideas for newly married couples best is just one of the many collections of pictures or photos that are on this website. Romantic bedroom decoration ideas for newly married couples best is posted on the category Bedroom Decorating Ideas in the awanshop.co website. This post of "romantic bedroom decoration ideas for newly married couples best" was published on 01-08-2018 by admin and has been viewed 154,164 times. We hope you can find what you need here. We always effort to show a picture with HD resolution or at least with perfect images. Romantic bedroom decoration ideas for newly married couples best can be beneficial inspiration for those who seek an image according specific categories, you can find it in this site. Finally all pictures we have been displayed in this site will inspire you all. Thank you for visiting. Romantic bedroom decoration ideas for newly married couples best is posted in the category Bedroom Decorating Ideas and published on 01-08-2018 by paul. Image source: bestdecorhub.com. Click on the title to see more.. Couples bedroom decor decorating ideas for married interior newly is posted in the category Bed Decoration Ideas and published on 01-08-2018 by paul. Image source: orlandomallguide.com. Click on the title to see more.. Romantic bedroom decoration ideas for newly married couples best is posted in the category Bed Decoration Ideas and published on 01-08-2018 by dave. Image source: bestdecorhub.com. Click on the title to see more.. Bedroom decorating ideas for newly married couples lovely couple is posted in the category Great Bedroom Ideas and published on 24-08-2018 by jonshon. Image source: stayholdinthai.com. Click on the title to see more.. 41 best of romantic ideas for him in the bedroom is posted in the category Ideas For The Bedroom and published on 09-02-2018 by martin. Image source: irwinrealestatepros.com. Click on the title to see more.. Romantic bedroom decoration ideas for newly married couples best is posted in the category Interior Room Decoration and published on 13-04-2018 by martin. Image source: bestdecorhub.com. Click on the title to see more.. Romantic bedroom decoration ideas for newly married couples best is posted in the category The Best Bedroom Design and published on 09-05-2018 by dave. Image source: bestdecorhub.com. Click on the title to see more..
## @file # This file is used to be the main entrance of EOT tool # # Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # from __future__ import absolute_import import Common.LongFilePathOs as os, time, glob import Common.EdkLogger as EdkLogger import Eot.EotGlobalData as EotGlobalData from optparse import OptionParser from Common.StringUtils import NormPath from Common import BuildToolError from Common.Misc import GuidStructureStringToGuidString from collections import OrderedDict as sdict from Eot.Parser import * from Eot.InfParserLite import EdkInfParser from Common.StringUtils import GetSplitValueList from Eot import c from Eot import Database from array import array from Eot.Report import Report from Common.BuildVersion import gBUILD_VERSION from Eot.Parser import ConvertGuid from Common.LongFilePathSupport import OpenLongFilePath as open import struct import uuid import copy import codecs from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID gGuidStringFormat = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" gIndention = -4 class Image(array): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = _HEADER_.size def __new__(cls, *args, **kwargs): return array.__new__(cls, 'B') def __init__(self, ID=None): if ID is None: self._ID_ = str(uuid.uuid1()).upper() else: self._ID_ = ID self._BUF_ = None self._LEN_ = None self._OFF_ = None self._SubImages = sdict() # {offset: Image()} array.__init__(self) def __repr__(self): return self._ID_ def __len__(self): Len = array.__len__(self) for Offset in self._SubImages.keys(): Len += len(self._SubImages[Offset]) return Len def _Unpack(self): self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _Pack(self, PadByte=0xFF): raise NotImplementedError def frombuffer(self, Buffer, Offset=0, Size=None): self._BUF_ = Buffer self._OFF_ = Offset # we may need the Size information in advance if it's given self._LEN_ = Size self._LEN_ = self._Unpack() def empty(self): del self[0:] def GetField(self, FieldStruct, Offset=0): return FieldStruct.unpack_from(self, Offset) def SetField(self, FieldStruct, Offset, *args): # check if there's enough space Size = FieldStruct.size if Size > len(self): self.extend([0] * (Size - len(self))) FieldStruct.pack_into(self, Offset, *args) def _SetData(self, Data): if len(self) < self._HEADER_SIZE_: self.extend([0] * (self._HEADER_SIZE_ - len(self))) else: del self[self._HEADER_SIZE_:] self.extend(Data) def _GetData(self): if len(self) > self._HEADER_SIZE_: return self[self._HEADER_SIZE_:] return None Data = property(_GetData, _SetData) ## CompressedImage() class # # A class for Compressed Image # class CompressedImage(Image): # UncompressedLength = 4-byte # CompressionType = 1-byte _HEADER_ = struct.Struct("1I 1B") _HEADER_SIZE_ = _HEADER_.size _ORIG_SIZE_ = struct.Struct("1I") _CMPRS_TYPE_ = struct.Struct("4x 1B") def __init__(self, CompressedData=None, CompressionType=None, UncompressedLength=None): Image.__init__(self) if UncompressedLength is not None: self.UncompressedLength = UncompressedLength if CompressionType is not None: self.CompressionType = CompressionType if CompressedData is not None: self.Data = CompressedData def __str__(self): global gIndention S = "algorithm=%s uncompressed=%x" % (self.CompressionType, self.UncompressedLength) for Sec in self.Sections: S += '\n' + str(Sec) return S def _SetOriginalSize(self, Size): self.SetField(self._ORIG_SIZE_, 0, Size) def _GetOriginalSize(self): return self.GetField(self._ORIG_SIZE_)[0] def _SetCompressionType(self, Type): self.SetField(self._CMPRS_TYPE_, 0, Type) def _GetCompressionType(self): return self.GetField(self._CMPRS_TYPE_)[0] def _GetSections(self): try: TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) except: TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) SectionList = [] Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary except: break SectionList.append(Sec) return SectionList UncompressedLength = property(_GetOriginalSize, _SetOriginalSize) CompressionType = property(_GetCompressionType, _SetCompressionType) Sections = property(_GetSections) ## Ui() class # # A class for Ui # class Ui(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 def __init__(self): Image.__init__(self) def __str__(self): return self.String def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetUiString(self): return codecs.utf_16_decode(self[0:-2].tostring())[0] String = property(_GetUiString) ## Depex() class # # A class for Depex # class Depex(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 _GUID_ = struct.Struct("1I2H8B") _OPCODE_ = struct.Struct("1B") _OPCODE_STRING_ = { 0x00 : "BEFORE", 0x01 : "AFTER", 0x02 : "PUSH", 0x03 : "AND", 0x04 : "OR", 0x05 : "NOT", 0x06 : "TRUE", 0x07 : "FALSE", 0x08 : "END", 0x09 : "SOR" } _NEXT_ = { -1 : _OPCODE_, # first one in depex must be an opcdoe 0x00 : _GUID_, #"BEFORE", 0x01 : _GUID_, #"AFTER", 0x02 : _GUID_, #"PUSH", 0x03 : _OPCODE_, #"AND", 0x04 : _OPCODE_, #"OR", 0x05 : _OPCODE_, #"NOT", 0x06 : _OPCODE_, #"TRUE", 0x07 : _OPCODE_, #"FALSE", 0x08 : None, #"END", 0x09 : _OPCODE_, #"SOR" } def __init__(self): Image.__init__(self) self._ExprList = [] def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention S = '\n' for T in self.Expression: if T in self._OPCODE_STRING_: S += Indention + self._OPCODE_STRING_[T] if T not in [0x00, 0x01, 0x02]: S += '\n' else: S += ' ' + gGuidStringFormat % T + '\n' gIndention -= 4 return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetExpression(self): if self._ExprList == []: Offset = 0 CurrentData = self._OPCODE_ while Offset < len(self): Token = CurrentData.unpack_from(self, Offset) Offset += CurrentData.size if len(Token) == 1: Token = Token[0] if Token in self._NEXT_: CurrentData = self._NEXT_[Token] else: CurrentData = self._GUID_ else: CurrentData = self._OPCODE_ self._ExprList.append(Token) if CurrentData is None: break return self._ExprList Expression = property(_GetExpression) # # FirmwareVolume() class # # A class for Firmware Volume # class FirmwareVolume(Image): # Read FvLength, Attributes, HeaderLength, Checksum _HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H") _HEADER_SIZE_ = _HEADER_.size _FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3" _GUID_ = struct.Struct("16x 1I2H8B") _LENGTH_ = struct.Struct("16x 16x 1Q") _SIG_ = struct.Struct("16x 16x 8x 1I") _ATTR_ = struct.Struct("16x 16x 8x 4x 1I") _HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H") _CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H") def __init__(self, Name=''): Image.__init__(self) self.Name = Name self.FfsDict = sdict() self.OrderedFfsDict = sdict() self.UnDispatchedFfsDict = sdict() self.ProtocolList = sdict() def CheckArchProtocol(self): for Item in EotGlobalData.gArchProtocolGuids: if Item.lower() not in EotGlobalData.gProtocolList: return False return True def ParseDepex(self, Depex, Type): List = None if Type == 'Ppi': List = EotGlobalData.gPpiList if Type == 'Protocol': List = EotGlobalData.gProtocolList DepexStack = [] DepexList = [] DepexString = '' FileDepex = None CouldBeLoaded = True for Index in range(0, len(Depex.Expression)): Item = Depex.Expression[Index] if Item == 0x00: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE']) elif Item == 0x01: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'AFTER %s' % Guid, [Guid, 'AFTER']) elif Item == 0x02: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid.lower() in List: DepexStack.append(True) DepexList.append(Guid) else: DepexStack.append(False) DepexList.append(Guid) continue elif Item == 0x03 or Item == 0x04: DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop())) elif Item == 0x05: DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop())) elif Item == 0x06: DepexStack.append(True) DepexList.append('TRUE') DepexString = DepexString + 'TRUE' + ' ' elif Item == 0x07: DepexStack.append(False) DepexList.append('False') DepexString = DepexString + 'FALSE' + ' ' elif Item == 0x08: if Index != len(Depex.Expression) - 1: CouldBeLoaded = False else: CouldBeLoaded = DepexStack.pop() else: CouldBeLoaded = False if DepexList != []: DepexString = DepexList[0].strip() return (CouldBeLoaded, DepexString, FileDepex) def Dispatch(self, Db=None): if Db is None: return False self.UnDispatchedFfsDict = copy.copy(self.FfsDict) # Find PeiCore, DexCore, PeiPriori, DxePriori first FfsSecCoreGuid = None FfsPeiCoreGuid = None FfsDxeCoreGuid = None FfsPeiPrioriGuid = None FfsDxePrioriGuid = None for FfsID in list(self.UnDispatchedFfsDict.keys()): Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x03: FfsSecCoreGuid = FfsID continue if Ffs.Type == 0x04: FfsPeiCoreGuid = FfsID continue if Ffs.Type == 0x05: FfsDxeCoreGuid = FfsID continue if Ffs.Guid.lower() == PEI_APRIORI_GUID.lower(): FfsPeiPrioriGuid = FfsID continue if Ffs.Guid.lower() == DXE_APRIORI_GUID.lower(): FfsDxePrioriGuid = FfsID continue # Parse SEC_CORE first if FfsSecCoreGuid is not None: self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid) self.LoadPpi(Db, FfsSecCoreGuid) # Parse PEI first if FfsPeiCoreGuid is not None: self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid) self.LoadPpi(Db, FfsPeiCoreGuid) if FfsPeiPrioriGuid is not None: # Load PEIM described in priori file FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid) if len(FfsPeiPriori.Sections) == 1: Section = FfsPeiPriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadPpi(Db, GuidString) self.DisPatchPei(Db) # Parse DXE then if FfsDxeCoreGuid is not None: self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid) self.LoadProtocol(Db, FfsDxeCoreGuid) if FfsDxePrioriGuid is not None: # Load PEIM described in priori file FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid) if len(FfsDxePriori.Sections) == 1: Section = FfsDxePriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadProtocol(Db, GuidString) self.DisPatchDxe(Db) def LoadProtocol(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Protocol' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: SqlCommand = """select Value2 from Inf where BelongsToFile = (select DISTINCT BelongsToFile from Inf where Value1 = (select SourceFileFullPath from Report where GuidValue like '%s' and ItemMode = 'Callback')) and Value1 = 'FILE_GUID'""" % Record[0] CallBackSet = Db.TblReport.Exec(SqlCommand) if CallBackSet != []: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid else: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid def LoadPpi(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Ppi' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid def DisPatchDxe(self, Db): IsInstalled = False ScheduleList = sdict() for FfsID in list(self.UnDispatchedFfsDict.keys()): CouldBeLoaded = False DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x07: # Get Depex IsFoundDepex = False for Section in Ffs.Sections.values(): # Find Depex if Section.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol') break # Not find Depex if not IsFoundDepex: CouldBeLoaded = self.CheckArchProtocol() DepexString = '' FileDepex = None # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString if FileDepex is not None: ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0]) else: ScheduleList[FfsID] = NewFfs else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString for FfsID in ScheduleList.keys(): NewFfs = ScheduleList.pop(FfsID) FfsName = 'UnKnown' self.OrderedFfsDict[FfsID] = NewFfs self.LoadProtocol(Db, FfsID) SqlCommand = """select Value2 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s) and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001) RecordSet = Db.TblReport.Exec(SqlCommand) if RecordSet != []: FfsName = RecordSet[0][0] if IsInstalled: self.DisPatchDxe(Db) def DisPatchPei(self, Db): IsInstalled = False for FfsID in list(self.UnDispatchedFfsDict.keys()): CouldBeLoaded = True DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x06 or Ffs.Type == 0x08: # Get Depex for Section in Ffs.Sections.values(): if Section.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi') break # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString self.OrderedFfsDict[FfsID] = NewFfs self.LoadPpi(Db, FfsID) else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString if IsInstalled: self.DisPatchPei(Db) def __str__(self): global gIndention gIndention += 4 FvInfo = '\n' + ' ' * gIndention FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum) FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict]) gIndention -= 4 return FvInfo + FfsInfo def _Unpack(self): Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0] self.empty() self.extend(self._BUF_[self._OFF_:self._OFF_ + Size]) # traverse the FFS EndOfFv = Size FfsStartAddress = self.HeaderSize LastFfsObj = None while FfsStartAddress < EndOfFv: FfsObj = Ffs() FfsObj.frombuffer(self, FfsStartAddress) FfsId = repr(FfsObj) if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \ or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0): if LastFfsObj is not None: LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj) else: if FfsId in self.FfsDict: EdkLogger.error("FV", 0, "Duplicate GUID in FFS", ExtraData="\t%s @ %s\n\t%s @ %s" \ % (FfsObj.Guid, FfsObj.Offset, self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset)) self.FfsDict[FfsId] = FfsObj if LastFfsObj is not None: LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj) FfsStartAddress += len(FfsObj) # # align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1)) # The next FFS must be at the latest next 8-byte aligned address # FfsStartAddress = (FfsStartAddress + 7) & (~7) LastFfsObj = FfsObj def _GetAttributes(self): return self.GetField(self._ATTR_, 0)[0] def _GetSize(self): return self.GetField(self._LENGTH_, 0)[0] def _GetChecksum(self): return self.GetField(self._CHECKSUM_, 0)[0] def _GetHeaderLength(self): return self.GetField(self._HLEN_, 0)[0] def _GetFileSystemGuid(self): return gGuidStringFormat % self.GetField(self._GUID_, 0) Attributes = property(_GetAttributes) Size = property(_GetSize) Checksum = property(_GetChecksum) HeaderSize = property(_GetHeaderLength) FileSystemGuid = property(_GetFileSystemGuid) ## GuidDefinedImage() class # # A class for GUID Defined Image # class GuidDefinedImage(Image): _HEADER_ = struct.Struct("1I2H8B 1H 1H") _HEADER_SIZE_ = _HEADER_.size _GUID_ = struct.Struct("1I2H8B") _DATA_OFFSET_ = struct.Struct("16x 1H") _ATTR_ = struct.Struct("18x 1H") CRC32_GUID = "FC1BCDB0-7D31-49AA-936A-A4600D9DD083" TIANO_COMPRESS_GUID = 'A31280AD-481E-41B6-95E8-127F4C984779' LZMA_COMPRESS_GUID = 'EE4E5898-3914-4259-9D6E-DC7BD79403CF' def __init__(self, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None): Image.__init__(self) if SectionDefinitionGuid is not None: self.SectionDefinitionGuid = SectionDefinitionGuid if DataOffset is not None: self.DataOffset = DataOffset if Attributes is not None: self.Attributes = Attributes if Data is not None: self.Data = Data def __str__(self): S = "guid=%s" % (gGuidStringFormat % self.SectionDefinitionGuid) for Sec in self.Sections: S += "\n" + str(Sec) return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _SetAttribute(self, Attribute): self.SetField(self._ATTR_, 0, Attribute) def _GetAttribute(self): return self.GetField(self._ATTR_)[0] def _SetGuid(self, Guid): self.SetField(self._GUID_, 0, Guid) def _GetGuid(self): return self.GetField(self._GUID_) def _SetDataOffset(self, Offset): self.SetField(self._DATA_OFFSET_, 0, Offset) def _GetDataOffset(self): return self.GetField(self._DATA_OFFSET_)[0] def _GetSections(self): SectionList = [] Guid = gGuidStringFormat % self.SectionDefinitionGuid if Guid == self.CRC32_GUID: # skip the CRC32 value, we don't do CRC32 verification here Offset = self.DataOffset - 4 while Offset < len(self): Sec = Section() try: Sec.frombuffer(self, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) elif Guid == self.TIANO_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Framework', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass elif Guid == self.LZMA_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Lzma', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass return SectionList Attributes = property(_GetAttribute, _SetAttribute) SectionDefinitionGuid = property(_GetGuid, _SetGuid) DataOffset = property(_GetDataOffset, _SetDataOffset) Sections = property(_GetSections) ## Section() class # # A class for Section # class Section(Image): _TypeName = { 0x00 : "<unknown>", 0x01 : "COMPRESSION", 0x02 : "GUID_DEFINED", 0x10 : "PE32", 0x11 : "PIC", 0x12 : "TE", 0x13 : "DXE_DEPEX", 0x14 : "VERSION", 0x15 : "USER_INTERFACE", 0x16 : "COMPATIBILITY16", 0x17 : "FIRMWARE_VOLUME_IMAGE", 0x18 : "FREEFORM_SUBTYPE_GUID", 0x19 : "RAW", 0x1B : "PEI_DEPEX" } _SectionSubImages = { 0x01 : CompressedImage, 0x02 : GuidDefinedImage, 0x17 : FirmwareVolume, 0x13 : Depex, 0x1B : Depex, 0x15 : Ui } # Size = 3-byte # Type = 1-byte _HEADER_ = struct.Struct("3B 1B") _HEADER_SIZE_ = _HEADER_.size # SubTypeGuid # _FREE_FORM_SUBTYPE_GUID_HEADER_ = struct.Struct("1I2H8B") _SIZE_ = struct.Struct("3B") _TYPE_ = struct.Struct("3x 1B") def __init__(self, Type=None, Size=None): Image.__init__(self) self._Alignment = 1 if Type is not None: self.Type = Type if Size is not None: self.Size = Size def __str__(self): global gIndention gIndention += 4 SectionInfo = ' ' * gIndention if self.Type in self._TypeName: SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size) else: SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size) for Offset in self._SubImages.keys(): SectionInfo += ", " + str(self._SubImages[Offset]) gIndention -= 4 return SectionInfo def _Unpack(self): self.empty() Type, = self._TYPE_.unpack_from(self._BUF_, self._OFF_) Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) if Type not in self._SectionSubImages: # no need to extract sub-image, keep all in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) else: # keep header in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._HEADER_SIZE_]) # # use new Image object to represent payload, which may be another kind # of image such as PE32 # PayloadOffset = self._HEADER_SIZE_ PayloadLen = self.Size - self._HEADER_SIZE_ Payload = self._SectionSubImages[self.Type]() Payload.frombuffer(self._BUF_, self._OFF_ + self._HEADER_SIZE_, PayloadLen) self._SubImages[PayloadOffset] = Payload return Size def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _GetAlignment(self): return self._Alignment def _SetAlignment(self, Alignment): self._Alignment = Alignment AlignmentMask = Alignment - 1 # section alignment is actually for payload, so we need to add header size PayloadOffset = self._OFF_ + self._HEADER_SIZE_ if (PayloadOffset & (~AlignmentMask)) == 0: return NewOffset = (PayloadOffset + AlignmentMask) & (~AlignmentMask) while (NewOffset - PayloadOffset) < self._HEADER_SIZE_: NewOffset += self._Alignment def tofile(self, f): self.Size = len(self) Image.tofile(self, f) for Offset in self._SubImages: self._SubImages[Offset].tofile(f) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Alignment = property(_GetAlignment, _SetAlignment) ## Ffs() class # # A class for Ffs Section # class Ffs(Image): _FfsFormat = "24B%(payload_size)sB" # skip IntegrityCheck _HEADER_ = struct.Struct("1I2H8B 2x 1B 1B 3B 1B") _HEADER_SIZE_ = _HEADER_.size _NAME_ = struct.Struct("1I2H8B") _INT_CHECK_ = struct.Struct("16x 1H") _TYPE_ = struct.Struct("18x 1B") _ATTR_ = struct.Struct("19x 1B") _SIZE_ = struct.Struct("20x 3B") _STATE_ = struct.Struct("23x 1B") FFS_ATTRIB_FIXED = 0x04 FFS_ATTRIB_DATA_ALIGNMENT = 0x38 FFS_ATTRIB_CHECKSUM = 0x40 _TypeName = { 0x00 : "<unknown>", 0x01 : "RAW", 0x02 : "FREEFORM", 0x03 : "SECURITY_CORE", 0x04 : "PEI_CORE", 0x05 : "DXE_CORE", 0x06 : "PEIM", 0x07 : "DRIVER", 0x08 : "COMBINED_PEIM_DRIVER", 0x09 : "APPLICATION", 0x0A : "SMM", 0x0B : "FIRMWARE_VOLUME_IMAGE", 0x0C : "COMBINED_SMM_DXE", 0x0D : "SMM_CORE", 0x0E : "MM_STANDALONE", 0x0F : "MM_CORE_STANDALONE", 0xc0 : "OEM_MIN", 0xdf : "OEM_MAX", 0xe0 : "DEBUG_MIN", 0xef : "DEBUG_MAX", 0xf0 : "FFS_MIN", 0xff : "FFS_MAX", 0xf0 : "FFS_PAD", } def __init__(self): Image.__init__(self) self.FreeSpace = 0 self.Sections = sdict() self.Depex = '' self.__ID__ = None def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention FfsInfo = Indention FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \ (Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment) SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()]) gIndention -= 4 return FfsInfo + SectionInfo + "\n" def __len__(self): return self.Size def __repr__(self): return self.__ID__ def _Unpack(self): Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) # Pad FFS may use the same GUID. We need to avoid it. if self.Type == 0xf0: self.__ID__ = str(uuid.uuid1()).upper() else: self.__ID__ = self.Guid # Traverse the SECTION. RAW and PAD do not have sections if self.Type not in [0xf0, 0x01] and Size > 0 and Size < 0xFFFFFF: EndOfFfs = Size SectionStartAddress = self._HEADER_SIZE_ while SectionStartAddress < EndOfFfs: SectionObj = Section() SectionObj.frombuffer(self, SectionStartAddress) #f = open(repr(SectionObj), 'wb') #SectionObj.Size = 0 #SectionObj.tofile(f) #f.close() self.Sections[SectionStartAddress] = SectionObj SectionStartAddress += len(SectionObj) SectionStartAddress = (SectionStartAddress + 3) & (~3) def Pack(self): pass def SetFreeSpace(self, Size): self.FreeSpace = Size def _GetGuid(self): return gGuidStringFormat % self.Name def _SetName(self, Value): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 self.SetField(self._NAME_, 0, Value) def _GetName(self): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 return self.GetField(self._NAME_) def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _SetAttributes(self, Value): self.SetField(self._ATTR_, 0, Value) def _GetAttributes(self): return self.GetField(self._ATTR_)[0] def _GetFixed(self): if (self.Attributes & self.FFS_ATTRIB_FIXED) != 0: return True return False def _GetCheckSum(self): if (self.Attributes & self.FFS_ATTRIB_CHECKSUM) != 0: return True return False def _GetAlignment(self): return (self.Attributes & self.FFS_ATTRIB_DATA_ALIGNMENT) >> 3 def _SetState(self, Value): self.SetField(self._STATE_, 0, Value) def _GetState(self): return self.GetField(self._STATE_)[0] Name = property(_GetName, _SetName) Guid = property(_GetGuid) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Attributes = property(_GetAttributes, _SetAttributes) Fixed = property(_GetFixed) Checksum = property(_GetCheckSum) Alignment = property(_GetAlignment) State = property(_GetState, _SetState) ## MultipleFv() class # # A class for Multiple FV # class MultipleFv(FirmwareVolume): def __init__(self, FvList): FirmwareVolume.__init__(self) self.BasicInfo = [] for FvPath in FvList: Fd = None FvName = os.path.splitext(os.path.split(FvPath)[1])[0] if FvPath.strip(): Fd = open(FvPath, 'rb') Buf = array('B') try: Buf.fromfile(Fd, os.path.getsize(FvPath)) except EOFError: pass Fv = FirmwareVolume(FvName) Fv.frombuffer(Buf, 0, len(Buf)) self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size]) self.FfsDict.update(Fv.FfsDict) ## Class Eot # # This class is used to define Eot main entrance # # @param object: Inherited from object class # class Eot(object): ## The constructor # # @param self: The object pointer # def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \ IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None, FvFileList="", MapFileList="", Report='Report.html', Dispatch=None): # Version and Copyright self.VersionNumber = ("0.02" + " " + gBUILD_VERSION) self.Version = "%prog Version " + self.VersionNumber self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved." self.Report = Report self.IsInit = IsInit self.SourceFileList = SourceFileList self.IncludeDirList = IncludeDirList self.DecFileList = DecFileList self.GuidList = GuidList self.LogFile = LogFile self.FvFileList = FvFileList self.MapFileList = MapFileList self.Dispatch = Dispatch # Check workspace environment if "EFI_SOURCE" not in os.environ: if "EDK_SOURCE" not in os.environ: pass else: EotGlobalData.gEDK_SOURCE = os.path.normpath(os.getenv("EDK_SOURCE")) else: EotGlobalData.gEFI_SOURCE = os.path.normpath(os.getenv("EFI_SOURCE")) EotGlobalData.gEDK_SOURCE = os.path.join(EotGlobalData.gEFI_SOURCE, 'Edk') if "WORKSPACE" not in os.environ: EdkLogger.error("EOT", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", ExtraData="WORKSPACE") else: EotGlobalData.gWORKSPACE = os.path.normpath(os.getenv("WORKSPACE")) EotGlobalData.gMACRO['WORKSPACE'] = EotGlobalData.gWORKSPACE EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gEFI_SOURCE EotGlobalData.gMACRO['EDK_SOURCE'] = EotGlobalData.gEDK_SOURCE # Parse the options and args if CommandLineOption: self.ParseOption() if self.FvFileList: for FvFile in GetSplitValueList(self.FvFileList, ' '): FvFile = os.path.normpath(FvFile) if not os.path.isfile(FvFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % FvFile) EotGlobalData.gFV_FILE.append(FvFile) else: EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "The fv file list of target platform was not specified") if self.MapFileList: for MapFile in GetSplitValueList(self.MapFileList, ' '): MapFile = os.path.normpath(MapFile) if not os.path.isfile(MapFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile) EotGlobalData.gMAP_FILE.append(MapFile) # Generate source file list self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList) # Generate guid list of dec file list self.ParseDecFile(self.DecFileList) # Generate guid list from GUID list file self.ParseGuidList(self.GuidList) # Init Eot database EotGlobalData.gDb = Database.Database(Database.DATABASE_PATH) EotGlobalData.gDb.InitDatabase(self.IsInit) # Build ECC database self.BuildDatabase() # Parse Ppi/Protocol self.ParseExecutionOrder() # Merge Identifier tables self.GenerateQueryTable() # Generate report database self.GenerateReportDatabase() # Load Fv Info self.LoadFvInfo() # Load Map Info self.LoadMapInfo() # Generate Report self.GenerateReport() # Convert log file self.ConvertLogFile(self.LogFile) # DONE EdkLogger.quiet("EOT FINISHED!") # Close Database EotGlobalData.gDb.Close() ## ParseDecFile() method # # parse DEC file and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param DecFileList: A list of all DEC files # def ParseDecFile(self, DecFileList): if DecFileList: path = os.path.normpath(DecFileList) lfr = open(path, 'rb') for line in lfr: path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if os.path.exists(path): dfr = open(path, 'rb') for line in dfr: line = CleanString(line) list = line.split('=') if len(list) == 2: EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip()) ## ParseGuidList() method # # Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param GuidList: A list of all GUID and its value # def ParseGuidList(self, GuidList): Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList) if os.path.isfile(Path): for Line in open(Path): if Line.strip(): (GuidName, GuidValue) = Line.split() EotGlobalData.gGuidDict[GuidName] = GuidValue ## ConvertLogFile() method # # Parse a real running log file to get real dispatch order # The result is saved to old file name + '.new' # # @param self: The object pointer # @param LogFile: A real running log file name # def ConvertLogFile(self, LogFile): newline = [] lfr = None lfw = None if LogFile: lfr = open(LogFile, 'rb') lfw = open(LogFile + '.new', 'wb') for line in lfr: line = line.strip() line = line.replace('.efi', '') index = line.find("Loading PEIM at ") if index > -1: newline.append(line[index + 55 : ]) continue index = line.find("Loading driver at ") if index > -1: newline.append(line[index + 57 : ]) continue for line in newline: lfw.write(line + '\r\n') if lfr: lfr.close() if lfw: lfw.close() ## GenerateSourceFileList() method # # Generate a list of all source files # 1. Search the file list one by one # 2. Store inf file name with source file names under it like # { INF file name: [source file1, source file2, ...]} # 3. Search the include list to find all .h files # 4. Store source file list to EotGlobalData.gSOURCE_FILES # 5. Store INF file list to EotGlobalData.gINF_FILES # # @param self: The object pointer # @param SourceFileList: A list of all source files # @param IncludeFileList: A list of all include files # def GenerateSourceFileList(self, SourceFileList, IncludeFileList): EdkLogger.quiet("Generating source files list ... ") mSourceFileList = [] mInfFileList = [] mDecFileList = [] mFileList = {} mCurrentInfFile = '' mCurrentSourceFileList = [] if SourceFileList: sfl = open(SourceFileList, 'r') for line in sfl: line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if line[-2:].upper() == '.C' or line[-2:].upper() == '.H': if line not in mCurrentSourceFileList: mCurrentSourceFileList.append(line) mSourceFileList.append(line) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % line) if line[-4:].upper() == '.INF': if mCurrentInfFile != '': mFileList[mCurrentInfFile] = mCurrentSourceFileList mCurrentSourceFileList = [] mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line)) EotGlobalData.gOP_INF.write('%s\n' % mCurrentInfFile) if mCurrentInfFile not in mFileList: mFileList[mCurrentInfFile] = mCurrentSourceFileList # Get all include files from packages if IncludeFileList: ifl = open(IncludeFileList, 'rb') for line in ifl: if not line.strip(): continue newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) for Root, Dirs, Files in os.walk(str(newline)): for File in Files: FullPath = os.path.normpath(os.path.join(Root, File)) if FullPath not in mSourceFileList and File[-2:].upper() == '.H': mSourceFileList.append(FullPath) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % FullPath) if FullPath not in mDecFileList and File.upper().find('.DEC') > -1: mDecFileList.append(FullPath) EotGlobalData.gSOURCE_FILES = mSourceFileList EotGlobalData.gOP_SOURCE_FILES.close() EotGlobalData.gINF_FILES = mFileList EotGlobalData.gOP_INF.close() ## GenerateReport() method # # Generate final HTML report # # @param self: The object pointer # def GenerateReport(self): EdkLogger.quiet("Generating report file ... ") Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch) Rep.GenerateReport() ## LoadMapInfo() method # # Load map files and parse them # # @param self: The object pointer # def LoadMapInfo(self): if EotGlobalData.gMAP_FILE != []: EdkLogger.quiet("Parsing Map file ... ") EotGlobalData.gMap = ParseMapFile(EotGlobalData.gMAP_FILE) ## LoadFvInfo() method # # Load FV binary files and parse them # # @param self: The object pointer # def LoadFvInfo(self): EdkLogger.quiet("Parsing FV file ... ") EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE) EotGlobalData.gFV.Dispatch(EotGlobalData.gDb) for Protocol in EotGlobalData.gProtocolList: EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s\n' %Protocol) ## GenerateReportDatabase() method # # Generate data for the information needed by report # 1. Update name, macro and value of all found PPI/PROTOCOL GUID # 2. Install hard coded PPI/PROTOCOL # # @param self: The object pointer # def GenerateReportDatabase(self): EdkLogger.quiet("Generating the cross-reference table of GUID for Ppi/Protocol ... ") # Update Protocol/Ppi Guid SqlCommand = """select DISTINCT GuidName from Report""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: GuidName = Record[0] GuidMacro = '' GuidMacro2 = '' GuidValue = '' # Find guid value defined in Dec file if GuidName in EotGlobalData.gGuidDict: GuidValue = EotGlobalData.gGuidDict[GuidName] SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Search defined Macros for guid name SqlCommand ="""select DISTINCT Value, Modifier from Query where Name like '%s'""" % GuidName GuidMacroSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) # Ignore NULL result if not GuidMacroSet: continue GuidMacro = GuidMacroSet[0][0].strip() if not GuidMacro: continue # Find Guid value of Guid Macro SqlCommand ="""select DISTINCT Value from Query2 where Value like '%%%s%%' and Model = %s""" % (GuidMacro, MODEL_IDENTIFIER_MACRO_DEFINE) GuidValueSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) if GuidValueSet != []: GuidValue = GuidValueSet[0][0] GuidValue = GuidValue[GuidValue.find(GuidMacro) + len(GuidMacro) :] GuidValue = GuidValue.lower().replace('\\', '').replace('\r', '').replace('\n', '').replace('l', '').strip() GuidValue = GuidStructureStringToGuidString(GuidValue) SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Update Hard Coded Ppi/Protocol SqlCommand = """select DISTINCT GuidValue, ItemType from Report where ModuleID = -2 and ItemMode = 'Produced'""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: if Record[1] == 'Ppi': EotGlobalData.gPpiList[Record[0].lower()] = -2 if Record[1] == 'Protocol': EotGlobalData.gProtocolList[Record[0].lower()] = -2 ## GenerateQueryTable() method # # Generate two tables improve query performance # # @param self: The object pointer # def GenerateQueryTable(self): EdkLogger.quiet("Generating temp query table for analysis ... ") for Identifier in EotGlobalData.gIdentifierTableList: SqlCommand = """insert into Query (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where (Model = %s or Model = %s)""" \ % (Identifier[0], MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION) EotGlobalData.gDb.TblReport.Exec(SqlCommand) SqlCommand = """insert into Query2 (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where Model = %s""" \ % (Identifier[0], MODEL_IDENTIFIER_MACRO_DEFINE) EotGlobalData.gDb.TblReport.Exec(SqlCommand) ## ParseExecutionOrder() method # # Get final execution order # 1. Search all PPI # 2. Search all PROTOCOL # # @param self: The object pointer # def ParseExecutionOrder(self): EdkLogger.quiet("Searching Ppi/Protocol ... ") for Identifier in EotGlobalData.gIdentifierTableList: ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled = \ -1, '', '', -1, '', '', '', '', '', '', '', '', 0 SourceFileID = Identifier[0].replace('Identifier', '') SourceFileFullPath = Identifier[1] Identifier = Identifier[0] # Find Ppis ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallPpi', '->InstallPpi', 'PeiInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.ReInstallPpi', '->ReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocatePpi', '->LocatePpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Ppi', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) # Find Protocols ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallProtocolInterface', '.ReInstallProtocolInterface', '->InstallProtocolInterface', '->ReInstallProtocolInterface', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallMultipleProtocolInterfaces', '->InstallMultipleProtocolInterfaces', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocateProtocol', '->LocateProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.HandleProtocol', '->HandleProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.RegisterProtocolNotify', '->RegisterProtocolNotify', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) # Hard Code EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiSecPlatformInformationPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiNtLoadAsDllPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtPeiLoadFileGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtAutoScanPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtFwhPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtThunkPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiPlatformTypePpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiFrequencySelectionCpuPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiCachePpiGuid', '', '', '', 0) EotGlobalData.gDb.Conn.commit() ## BuildDatabase() methoc # # Build the database for target # # @param self: The object pointer # def BuildDatabase(self): # Clean report table EotGlobalData.gDb.TblReport.Drop() EotGlobalData.gDb.TblReport.Create() # Build database if self.IsInit: self.BuildMetaDataFileDatabase(EotGlobalData.gINF_FILES) EdkLogger.quiet("Building database for source code ...") c.CreateCCodeDB(EotGlobalData.gSOURCE_FILES) EdkLogger.quiet("Building database for source code done!") EotGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EotGlobalData.gDb) ## BuildMetaDataFileDatabase() method # # Build the database for meta data files # # @param self: The object pointer # @param Inf_Files: A list for all INF files # def BuildMetaDataFileDatabase(self, Inf_Files): EdkLogger.quiet("Building database for meta data files ...") for InfFile in Inf_Files: if not InfFile: continue EdkLogger.quiet("Parsing %s ..." % str(InfFile)) EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '') EotGlobalData.gDb.Conn.commit() EdkLogger.quiet("Building database for meta data files done!") ## ParseOption() method # # Parse command line options # # @param self: The object pointer # def ParseOption(self): (Options, Target) = self.EotOptionParser() # Set log level self.SetLogLevel(Options) if Options.FvFileList: self.FvFileList = Options.FvFileList if Options.MapFileList: self.MapFileList = Options.FvMapFileList if Options.SourceFileList: self.SourceFileList = Options.SourceFileList if Options.IncludeDirList: self.IncludeDirList = Options.IncludeDirList if Options.DecFileList: self.DecFileList = Options.DecFileList if Options.GuidList: self.GuidList = Options.GuidList if Options.LogFile: self.LogFile = Options.LogFile if Options.keepdatabase: self.IsInit = False ## SetLogLevel() method # # Set current log level of the tool based on args # # @param self: The object pointer # @param Option: The option list including log level setting # def SetLogLevel(self, Option): if Option.verbose is not None: EdkLogger.SetLevel(EdkLogger.VERBOSE) elif Option.quiet is not None: EdkLogger.SetLevel(EdkLogger.QUIET) elif Option.debug is not None: EdkLogger.SetLevel(Option.debug + 1) else: EdkLogger.SetLevel(EdkLogger.INFO) ## EotOptionParser() method # # Using standard Python module optparse to parse command line option of this tool. # # @param self: The object pointer # # @retval Opt A optparse.Values object containing the parsed options # @retval Args Target of build command # def EotOptionParser(self): Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Eot.exe", usage = "%prog [options]") Parser.add_option("-m", "--makefile filename", action="store", type="string", dest='MakeFile', help="Specify a makefile for the platform.") Parser.add_option("-c", "--dsc filename", action="store", type="string", dest="DscFile", help="Specify a dsc file for the platform.") Parser.add_option("-f", "--fv filename", action="store", type="string", dest="FvFileList", help="Specify fv file list, quoted by \"\".") Parser.add_option("-a", "--map filename", action="store", type="string", dest="MapFileList", help="Specify map file list, quoted by \"\".") Parser.add_option("-s", "--source files", action="store", type="string", dest="SourceFileList", help="Specify source file list by a file") Parser.add_option("-i", "--include dirs", action="store", type="string", dest="IncludeDirList", help="Specify include dir list by a file") Parser.add_option("-e", "--dec files", action="store", type="string", dest="DecFileList", help="Specify dec file list by a file") Parser.add_option("-g", "--guid list", action="store", type="string", dest="GuidList", help="Specify guid file list by a file") Parser.add_option("-l", "--log filename", action="store", type="string", dest="LogFile", help="Specify real execution log file") Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Eot database will not be cleaned except report information if this option is specified.") Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\ "including library instances selected, final dependency expression, "\ "and warning messages, etc.") Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") (Opt, Args)=Parser.parse_args() return (Opt, Args) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': # Initialize log system EdkLogger.Initialize() EdkLogger.IsRaiseError = False EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n") StartTime = time.clock() Eot = Eot(CommandLineOption=False, SourceFileList=r'C:\TestEot\Source.txt', GuidList=r'C:\TestEot\Guid.txt', FvFileList=r'C:\TestEot\FVRECOVERY.Fv') FinishTime = time.clock() BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime)))) EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
A long butterfly spread is a neutral position that’s used when a trader believes that the price of an underlying is going to stay within a relatively tight range. Setup: This spread is typically created using a ratio of 1-2-1 (1 ITM option, 2 ATM options, 1 OTM option). Max Profit: The distance between the short strike and long strike, less the debit paid. At tastytrade, we tend to buy Call or Put Butterfly spreads to take advantage of the non-movement of an underlying stock. This is a low probability trade, but we use this strategy when implied volatility is high, as the butterfly spread then trades cheaper. The spread trades cheaper in this situation since the price of the In-The-Money option consists primarily of intrinsic value. Therefore selling the ATM options covers a higher percentage of the cost of purchasing both of the long options. When do we close Butterflies? Since achieving maximum profit on a Butterfly is highly unlikely, the profit target on this position is generally lower. A reasonable profit target on a Long Butterfly is 25-50% of the maximum profit. When do we manage Butterflies? Long Butterfly spreads are low probability, low risk trades. For this reason, losses generally aren’t managed.
#!/usr/bin/python # A very simple process to combine the floating base estimate # with the kinematics and output the combined message # input: POSE_BODY and ATLAS_STATE, output: EST_ROBOT_STATE # # currently this only works/used for Thor Mang import os,sys import lcm import time from lcm import LCM from math import * import numpy as np import numpy.random as random home_dir =os.getenv("HOME") #print home_dir sys.path.append(home_dir + "/drc/software/build/lib/python2.7/site-packages") sys.path.append(home_dir + "/drc/software/build/lib/python2.7/dist-packages") #sys.path.append(home_dir + "/otherprojects/pronto-distro/build/lib/python2.7/site-packages") #sys.path.append(home_dir + "/otherprojects/pronto-distro/build/lib/python2.7/dist-packages") from pronto.robot_state_t import robot_state_t ######################################################################################## def timestamp_now (): return int (time.time () * 1000000) # hyq: joint_name_list = ['lf_haa_joint', 'lf_hfe_joint', 'lf_kfe_joint', 'rf_haa_joint', 'rf_hfe_joint', 'rf_kfe_joint', 'lh_haa_joint', 'lh_hfe_joint', 'lh_kfe_joint', 'rh_haa_joint', 'rh_hfe_joint', 'rh_kfe_joint', 'ptu_pan', 'ptu_tilt'] def send_state(): o = robot_state_t() o.utime = timestamp_now () o.num_joints = len(joint_name_list) #o.joint_name = ["" for x in range(o.num_joints)] o.joint_name = joint_name_list o.joint_position = random.rand(o.num_joints) -0.5 #o.joint_position = [0.0, 0.0, -1.3962634015945001, 0.0, 0.0, -1.3962634015945001, 0.0, -2.220446049250313e-16, 1.3962634015945001, 0.0, -2.220446049250313e-16, 1.3962634015945001, 0.0, 1.1102230246251565e-16, 0,0] o.joint_velocity = [0]*o.num_joints o.joint_effort = [0]*o.num_joints o.pose.translation.x =0; o.pose.translation.y =0; o.pose.translation.z =1; o.pose.rotation.w = 1; o.pose.rotation.x = 0 o.pose.rotation.y = 0 o.pose.rotation.z = 0 lc.publish("EST_ROBOT_STATE",o.encode()) #################################################################### lc = lcm.LCM() print "started" send_state()
Want a total makeover of your house? Want the colors of your house to exude your personality? Well you can find quality painters in Australia to do a good job for you for as low as 30$ per hour. They take good care of both interior and exterior finishes doing a great job too when it comes to putting in place overlying layers of paint. Most of the residential painters are qualified professionals working full time and available to cater for all your expectations. Your imagination is the beginning of their experience. You might probably have had it with the old beat up state of your house and definitely want to change since appearance is everything. It is at this point that you should consider contacting residential painters that have a mix of a special set of skills and the right toolkit to achieve the most beautiful aspects you have in mind. They will definitely also take into great consideration of what you have in mind so that they may do the work as per your wishes. To make sure that you get the finished product to your desired level of satisfaction, the Australian residential painters are equipped with the best work tools that are easily accessible. This ensures the work produced is durable and of very good quality. Highest quality services are provided to their clients at an affordable cost and they can always give you the overall quotation of the services and can always shift to accommodate your budget depending on the scope of work to be done. Before hiring a residential painter, first consult with color palette advisors to shed light on the whole process and will also give you with the necessary information regarding the progress. Creativity is also a key quality amongst the residential painters. Employers more often than not fear that the residential painters are afraid of heights but this should not be amongst your worries since most residential painters are not sole proprietors but are in registered companies and this assures you of a high level of professionalism. Having this in mind, the residential painters can perform painting on roofs or any other elevated surfaces with such ease and agility. Some details are often ignored when acquiring the services of residential painters but this should not be the case with you. First ensure that the residential painter(s) selected has good work ethic and public liability. Also conduct a thorough test for lead paint on your property learning the dangers and implications associated with paint that has lead in it for the safe removal of this surface coating. Lastly don’t be a part of the Australians that waste paint. 100 million litres of paint are bought in Australia every year and 5% of this goes to waste. Get the service of residential painters Perth. And be a part of the transformation!
import hmac import json import urllib import hashlib import requests from urllib import parse from datetime import datetime class LocalBitcoin: baseurl = 'https://localbitcoins.com' def __init__(self, hmac_auth_key, hmac_auth_secret, debug = False): self.hmac_auth_key = hmac_auth_key self.hmac_auth_secret = hmac_auth_secret self.debug = debug """ Returns public user profile information """ def getAccountInfo(self, username): return self.sendRequest('/api/account_info/' + username + '/', '', 'get') """ Returns recent notifications. """ def getNotifications(self): return self.sendRequest('/api/notifications/', '', 'get') """ Return the information of the currently logged in user (the owner of authentication token). """ def getMyself(self): return self.sendRequest('/api/myself/', '', 'get') """ Checks the given PIN code against the user's currently active PIN code. You can use this method to ensure the person using the session is the legitimate user. """ def checkPinCode(self, code): return self.sendRequest('/api/pincode/', {'code': code}, 'post') """ Return open and active contacts """ def getDashboard(self): return self.sendRequest('/api/dashboard/', '', 'get') """ Return released (successful) contacts """ def getDashboardReleased(self): return self.sendRequest('/api/dashboard/released/', '', 'get') """ Return canceled contacts """ def getDashboardCanceled(self): return self.sendRequest('/api/dashboard/canceled/', '', 'get') """ Return closed contacts, both released and canceled """ def getDashboardClosed(self): return self.sendRequest('/api/dashboard/closed/', '', 'get') """ Releases the escrow of contact specified by ID {contact_id}. On success there's a complimentary message on the data key. """ def contactRelease(self, contact_id): return self.sendRequest('/api/contact_release/' + contact_id + '/', '', 'post') """ Releases the escrow of contact specified by ID {contact_id}. On success there's a complimentary message on the data key. """ def contactReleasePin(self, contact_id, pincode): return self.sendRequest('/api/contact_release_pin/' + contact_id + '/', {'pincode': pincode}, 'post') """ Reads all messaging from the contact. Messages are on the message_list key. On success there's a complimentary message on the data key. attachment_* fields exist only if there is an attachment. """ def getContactMessages(self, contact_id): return self.sendRequest('/api/contact_messages/' + contact_id + '/', '', 'get') """ Marks a contact as paid. It is recommended to access this API through /api/online_buy_contacts/ entries' action key. """ def markContactAsPaid(self, contact_id): return self.sendRequest('/api/contact_mark_as_paid/' + contact_id + '/', '', 'get') """ Post a message to contact """ def postMessageToContact(self, contact_id, message, document=None): return self.sendRequest('/api/contact_message_post/' + contact_id + '/', {'msg': message}, 'post') """ Starts a dispute with the contact, if possible. You can provide a short description using topic. This helps support to deal with the problem. """ def startDispute(self, contact_id, topic = None): topic = '' if topic != None: topic = {'topic': topic} return self.sendRequest('/api/contact_dispute/' + contact_id + '/', topic, 'post') """ Cancels the contact, if possible """ def cancelContact(self, contact_id): return self.sendRequest('/api/contact_cancel/' + contact_id + '/', '', 'post') """ Attempts to fund an unfunded local contact from the seller's wallet. """ def fundContact(self, contact_id): return self.sendRequest('/api/contact_fund/' + contact_id + '/', '', 'post') """ Attempts to create a contact to trade bitcoins. Amount is a number in the advertisement's fiat currency. Returns the API URL to the newly created contact at actions.contact_url. Whether the contact was able to be funded automatically is indicated at data.funded. Only non-floating LOCAL_SELL may return unfunded, all other trade types either fund or fail. """ def createContact(self, contact_id, ammount, message = None): post = '' if message == None: post = {'ammount': ammount} else: post = {'ammount': ammount, 'message': message} return self.sendRequest('/api/contact_create/' + contact_id + '/', post, 'post') """ Gets information about a single contact you are involved in. Same fields as in /api/contacts/. """ def getContactInfo(self, contact_id): return self.sendRequest('/api/contact_info/' + contact_id + '/', '', 'get') """ contacts is a comma-separated list of contact IDs that you want to access in bulk. The token owner needs to be either a buyer or seller in the contacts, contacts that do not pass this check are simply not returned. A maximum of 50 contacts can be requested at a time. The contacts are not returned in any particular order. """ def getContactsInfo(self, contacts): return self.sendRequest('/api/contact_info/', {'contacts': contacts}, 'get') """ Returns maximum of 50 newest trade messages. Messages are ordered by sending time, and the newest one is first. The list has same format as /api/contact_messages/, but each message has also contact_id field. """ def getRecentMessages(self): return self.sendRequest('/api/recent_messages/', '', 'get') """ Gives feedback to user. Possible feedback values are: trust, positive, neutral, block, block_without_feedback as strings. You may also set feedback message field with few exceptions. Feedback block_without_feedback clears the message and with block the message is mandatory. """ def postFeedbackToUser(self, username, feedback, message = None): post = {'feedback': feedback} if message != None: post = {'feedback': feedback, 'msg': message} return self.sendRequest('/api/feedback/' + username + '/', post, 'post') """ Gets information about the token owner's wallet balance. """ def getWallet(self): return self.sendRequest('/api/wallet/', '', 'get') """ Same as /api/wallet/ above, but only returns the message, receiving_address_list and total fields. (There's also a receiving_address_count but it is always 1: only the latest receiving address is ever returned by this call.) Use this instead if you don't care about transactions at the moment. """ def getWalletBallance(self): return self.sendRequest('/api/wallet-balance/', '', 'get') """ Sends amount bitcoins from the token owner's wallet to address. Note that this API requires its own API permission called Money. On success, this API returns just a message indicating success. It is highly recommended to minimize the lifetime of access tokens with the money permission. Call /api/logout/ to make the current token expire instantly. """ def walletSend(self, ammount, address): return self.sendRequest('/api/wallet-send/', {'ammount': ammount, 'address': address}, 'post') """ As above, but needs the token owner's active PIN code to succeed. Look before you leap. You can check if a PIN code is valid without attempting a send with /api/pincode/. Security concern: To get any security beyond the above API, do not retain the PIN code beyond a reasonable user session, a few minutes at most. If you are planning to save the PIN code anyway, please save some headache and get the real no-pin-required money permission instead. """ def walletSendWithPin(self, ammount, address, pincode): return self.sendRequest('/api/wallet-send-pin/', {'ammount': ammount, 'address': address, 'pincode': pincode}, 'post') """ Gets an unused receiving address for the token owner's wallet, its address given in the address key of the response. Note that this API may keep returning the same (unused) address if called repeatedly. """ def getWalletAddress(self): return self.sendRequest('/api/wallet-addr/', '', 'post') """ Expires the current access token immediately. To get a new token afterwards, public apps will need to reauthenticate, confidential apps can turn in a refresh token. """ def logout(self): return self.sendRequest('/api/logout/', '', 'post') """ Lists the token owner's all ads on the data key ad_list, optionally filtered. If there's a lot of ads, the listing will be paginated. Refer to the ad editing pages for the field meanings. List item structure is like so: """ def getOwnAds(self): return self.sendRequest('/api/ads/', '', 'post') """ This endpoint lets you edit an ad given the ad id and all the required fiends as designated by the API. If you just want to update the equation there is a better endpoint for that, this one takes a lot of LBC resources. """ def editAd(self, ad_id, lat, bank_name, price_equation, lon, countrycode, opening_hours, msg, max_amount, track_max_amount, visible): return self.sendRequest('/api/ad/' + ad_id + '/', {'lat': lat,'bank_name': bank_name,'price_equation': price_equation,'lon': lon,'countrycode': countrycode, 'opening_hours': opening_hours, 'msg': msg, 'max_amount': max_amount, 'track_max_amount': track_max_amount, 'visible': visible}, 'post') """ Creates a new invoice under the LBC merchant services page. """ def newInvoice(self, currency, amount, description): return self.sendRequest('/api/merchant/new_invoice/', {'currency': currency, 'amount': amount, 'description': description,}, 'post') """ Marks a users id as verified based on an open contact id. """ def markIdentityVerified(self, contact_id): return self.sendRequest('/api/contact_mark_identified/' + contact_id + '/', '', 'post') """ Get all the details of an ad based on its ID, can be any ad. """ def getAd(self, ad_id): return self.sendRequest('/api/ad-get/' + ad_id + '/', '', 'get') """ Change an ad's pricing equation to something else. """ def changeEquation(self, ad_id, equation): return self.sendRequest('/api/ad-equation/{ad_id}/'.format(ad_id=ad_id), {'price_equation': equation}, 'post') """ Main driver. """ def sendRequest(self, endpoint, params, method): params_encoded = '' if params != '': params_encoded = parse.urlencode(params) if method == 'get': params_encoded = '?' + params_encoded now = datetime.utcnow() epoch = datetime.utcfromtimestamp(0) delta = now - epoch nonce = int(delta.total_seconds() * 1000) message = str(nonce) + self.hmac_auth_key + endpoint + params_encoded signature = hmac.new(bytes(self.hmac_auth_secret, 'latin-1'), msg = bytes(message , 'latin-1'), digestmod = hashlib.sha256).hexdigest().upper() headers = {} headers['Apiauth-key'] = self.hmac_auth_key headers['Apiauth-Nonce'] = str(nonce) headers['Apiauth-Signature'] = signature if method == 'get': response = requests.get(self.baseurl + endpoint, headers = headers, params = params) else: response = requests.post(self.baseurl + endpoint, headers = headers, data = params) if self.debug == True: print('REQUEST: ' + self.baseurl + endpoint) print('PARAMS: ' + str(params)) print('METHOD: ' + method) print('RESPONSE: ' + response.text) return json.loads(response.text)['data']
Florence has been a center for women artists for five centuries. Who were the city's most dynamic female protagonists? They continue to inspire us today. Do you think that many of Artemisia’s female protagonists look alike? They are often self portraits. Elisabeth Chaplin is the artist with the most works in Florence’s collections. Siries flourished in eighteenth-century Florence where she was a portraitist for Tuscan nobility. A representative of the troubadour style, De Fauveau championed a revival for medieval art. Pincherle loved experimenting and had a passion for using dramatic color in her works. This Florentine painter and poetess was known for teaching other women. A major exponent of Magic Realism. Mum’s the word for Tuscan artist Amalia Ciardi Duprè. Carla Accardi is Italy’s Grand Dame of Abstractionism. Cuban art in Florence, with Amelia Peláez, a visual arts pioneer in Latin America. Lazzari ‘measures up’ as an exponent of avant-garde Rationalism. A main player in Cuba’s ‘Geometric Revolution’. A Tuscan painter who resisted the currents of the post-Macchiaioli and the post-Impressionist styles. Undoubtedly one of the most important women painting in Italy in the 1900s. English painter Costa is 'genius loci' at Florence's Il Palmerino. A strong drawing hand and the heart of a colorist. A painter whose attention to detail makes for a tactile visual experience. Ever-political, a sculptress of protest, struggle… and hope.
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot import config from buildbot.process import buildstep from buildbot.status.results import FAILURE from buildbot.status.results import RETRY from buildbot.status.results import SUCCESS from buildbot.steps.source import svn from buildbot.steps.transfer import _FileReader from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.properties import ConstantRenderable from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest class TestSVN(sourcesteps.SourceStepMixin, unittest.TestCase): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file1"> <wc-status props="none" item="unversioned"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file2"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ svn_st_xml_corrupt = """<?xml version="1.0"?> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ svn_st_xml_empty = """<?xml version="1.0"?> <status> <target path="."> </target> </status>""" svn_info_stdout_xml = """<?xml version="1.0"?> <info> <entry kind="dir" path="." revision="100"> <url>http://svn.red-bean.com/repos/test</url> <repository> <root>http://svn.red-bean.com/repos/test</root> <uuid>5e7d134a-54fb-0310-bd04-b611643e5c25</uuid> </repository> <wc-info> <schedule>normal</schedule> <depth>infinity</depth> </wc-info> <commit revision="90"> <author>sally</author> <date>2003-01-15T23:35:12.847647Z</date> </commit> </entry> </info>""" svn_info_stdout_xml_nonintegerrevision = """<?xml version="1.0"?> <info> <entry kind="dir" path="." revision="a10"> <url>http://svn.red-bean.com/repos/test</url> <repository> <root>http://svn.red-bean.com/repos/test</root> <uuid>5e7d134a-54fb-0310-bd04-b611643e5c25</uuid> </repository> <wc-info> <schedule>normal</schedule> <depth>infinity</depth> </wc-info> <commit revision="a10"> <author>sally</author> <date>2003-01-15T23:35:12.847647Z</date> </commit> </entry> </info>""" def setUp(self): return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_slaveVersionIsOlderThan(self, result): self.patch(svn.SVN, 'slaveVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN()) def test_incorrect_mode(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN(repourl='http://svn.local/app/trunk', mode='invalid')) def test_incorrect_method(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN(repourl='http://svn.local/app/trunk', method='invalid')) def test_corrupt_xml(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_corrupt) + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_revision_noninteger(self): svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml_nonintegerrevision) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', 'a10', 'SVN') d = self.runStep() def _checkType(): revision = self.step.getProperty('got_revision') self.assertRaises(ValueError, lambda: int(revision)) d.addCallback(lambda _: _checkType()) return d def test_revision_missing(self): """Fail if 'revision' tag isnt there""" svn_info_stdout = self.svn_info_stdout_xml.replace('entry', 'Blah') svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_mode_incremental(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', timeout=1, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_renderable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_canonical(self): self.setupStep( svn.SVN(repourl='http://svn.local/trunk/test app', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/trunk/test%20app</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable_svninfo_mismatch(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', # expecting ../trunk/app stdout="""<?xml version="1.0"?><url>http://svn.local/branch/foo/app</url>""") + 0, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_win32path(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.build.path_module = namedModule("ntpath") self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() def test_mode_incremental_preferLastChangedRev(self): """Give the last-changed rev if 'preferLastChangedRev' is set""" self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '90', 'SVN') return self.runStep() def test_mode_incremental_preferLastChangedRev_butMissing(self): """If 'preferLastChangedRev' is set, but missing, fall back to the regular revision value.""" svn_info_stdout = self.svn_info_stdout_xml.replace('commit', 'Blah') self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_keep_on_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_old_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_slaveVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file1', 'logEnviron': True}) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file2', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_new_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_slaveVersionIsOlderThan(False) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2'], logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=16384, maxsize=None, reader=ExpectRemoteRef(_FileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=16384, maxsize=None, reader=ExpectRemoteRef(_FileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', timeout=1, mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', timeout=1, command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', '--revision', '100', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_auth(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export', username='svn_username', password='svn_password')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + 0, ExpectShell(workdir='', command=['svn', 'export', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX'), 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_with_env(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_logEnviron(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], logEnviron=False) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_command_fails(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_bogus_svnversion(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><entry kind="dir" path="/a/b/c" revision="1"><url>http://svn.local/app/trunk</url></entry>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout='1x0y0') + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_cpdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_slave_connection_lost(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, status_text=["update", "exception", "slave", "lost"]) return self.runStep() def test_empty_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() def test_omit_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() # # svn.SVN.svnUriCanonicalize() test method factory # # given input string and expected result create a test method that # will call svn.SVN.svnUriCanonicalize() with the input and check # that expected result is returned # # @param input: test input # @param exp: expected result # def _makeSUCTest(input, exp): return lambda self: self.assertEqual( svn.SVN.svnUriCanonicalize(input), exp) class TestGetUnversionedFiles(unittest.TestCase): def test_getUnversionedFiles_does_not_list_externals(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals(["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_does_not_list_missing(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="missing_file"> <wc-status props="none" item="missing"></wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals([], unversioned_files) def test_getUnversionedFiles_corrupted_xml(self): svn_st_xml_corrupt = """<?xml version="1.0"?> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ self.assertRaises(buildstep.BuildStepFailed, lambda: list(svn.SVN.getUnversionedFiles(svn_st_xml_corrupt, []))) def test_getUnversionedFiles_no_path(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals([], unversioned_files) def test_getUnversionedFiles_no_item(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals(["svn_external_path/unversioned_file"], unversioned_files) test_svnUriCanonicalize_empty = _makeSUCTest( "", "") test_svnUriCanonicalize_canonical = _makeSUCTest( "http://foo.com/bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_scheme = _makeSUCTest( "hTtP://foo.com/bar", "http://foo.com/bar") test_svnUriCanonicalize_trailing_dot = _makeSUCTest( "http://foo.com./bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_hostname = _makeSUCTest( "http://foO.COm/bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_hostname_with_user = _makeSUCTest( "http://Jimmy@fOO.Com/bar", "http://Jimmy@foo.com/bar") test_svnUriCanonicalize_lc_hostname_with_user_pass = _makeSUCTest( "http://Jimmy:Sekrit@fOO.Com/bar", "http://Jimmy:Sekrit@foo.com/bar") test_svnUriCanonicalize_trailing_slash = _makeSUCTest( "http://foo.com/bar/", "http://foo.com/bar") test_svnUriCanonicalize_trailing_slash_scheme = _makeSUCTest( "http://", "http://") test_svnUriCanonicalize_trailing_slash_hostname = _makeSUCTest( "http://foo.com/", "http://foo.com") test_svnUriCanonicalize_trailing_double_slash = _makeSUCTest( "http://foo.com/x//", "http://foo.com/x") test_svnUriCanonicalize_double_slash = _makeSUCTest( "http://foo.com/x//y", "http://foo.com/x/y") test_svnUriCanonicalize_slash = _makeSUCTest( "/", "/") test_svnUriCanonicalize_dot = _makeSUCTest( "http://foo.com/x/./y", "http://foo.com/x/y") test_svnUriCanonicalize_dot_dot = _makeSUCTest( "http://foo.com/x/../y", "http://foo.com/y") test_svnUriCanonicalize_double_dot_dot = _makeSUCTest( "http://foo.com/x/y/../../z", "http://foo.com/z") test_svnUriCanonicalize_dot_dot_root = _makeSUCTest( "http://foo.com/../x/y", "http://foo.com/x/y") test_svnUriCanonicalize_quote_spaces = _makeSUCTest( "svn+ssh://user@host:123/My Stuff/file.doc", "svn+ssh://user@host:123/My%20Stuff/file.doc") test_svnUriCanonicalize_remove_port_80 = _makeSUCTest( "http://foo.com:80/bar", "http://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_80 = _makeSUCTest( "https://foo.com:80/bar", "https://foo.com:80/bar") # not http test_svnUriCanonicalize_remove_port_443 = _makeSUCTest( "https://foo.com:443/bar", "https://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_443 = _makeSUCTest( "svn://foo.com:443/bar", "svn://foo.com:443/bar") # not https test_svnUriCanonicalize_remove_port_3690 = _makeSUCTest( "svn://foo.com:3690/bar", "svn://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_3690 = _makeSUCTest( "http://foo.com:3690/bar", "http://foo.com:3690/bar") # not svn test_svnUriCanonicalize_dont_remove_port_other = _makeSUCTest( "https://foo.com:2093/bar", "https://foo.com:2093/bar") test_svnUriCanonicalize_quote_funny_chars = _makeSUCTest( "http://foo.com/\x10\xe6%", "http://foo.com/%10%E6%25") test_svnUriCanonicalize_overquoted = _makeSUCTest( "http://foo.com/%68%65%6c%6c%6f%20%77%6f%72%6c%64", "http://foo.com/hello%20world")
Mama P booked a day off work in the Easter Hols and we headed up to The Bridal Outlet in Northallerton. There’s no appointment necessary so we rocked up at about 11am and had no time to wait at all (though there was quite a queue forming when I’d finished so if you head up there, it definitely pays to go early). ⁃ When it’s your turn they’ll take the dresses into the changing room and call you in. (You do get to try on more than 3, but those are just the starting point, for you and your consultant to get an idea of what you do/don’t like). Browsing through all the dresses was probably the hardest part – they’re all in protective translucent bags, and although you can see the top part clearly, you can’t see the skirt, so it’s a struggle to imagine what it’ll be like actually on. But it was fun regardless! I don’t want to give away any details about dresses I tried on (yet), but maybe in a year or so when I’m actually married! Main reason being, I don’t want anyone (particularly Ed) to have any inkling of what I’ll be wearing. So that’s where the description of the experience ends, soz! But I can tell you, that the dress I bought was the FIRST DRESS I picked up off the rack. And I love that it ended up working like that, coz it was the same for my mum 😍 I didn’t go with the intention of buying my dress that day as I said, I went expecting to use it as a fact finding mission; to find out what I did/didn’t like, or what did/didn’t suit me…but I fell in love with my dress. And the second time it was on, I knew it was the one. The feeling really is as incredible as people say. It was a very surreal experience, I couldn’t believe I was stood in a wedding dress – it really is a pinch yourself moment. If I’m honest, I think I’m going to struggle to comprehend that I’m getting married right up until the big day itself, maybe even after! At the risk of sounding super cringey, our whole relationship has been fairytale-like and I still can’t believe how lucky I am (for those who know us, it was a LONG time coming) 😂So trying to get my head around the fact I’m going to be Mrs Leigh in less than a year is WEIRD. …and it’s been hanging up in my old bedroom at Mum and Dad’s ever since! And yes, I have sneaked a peek at it virtually every time I’ve popped round there in the last 3 months! It was such a special day, and every part of it was perfect. I got to share the precious experience with my wonderful Mama P, and that was just incredible – honestly, I can’t put into words just HOW incredible it was, I’m so lucky to have such an amazing relationship with her. I’d never have had it any other way, but she deserves the shout out coz I love her so damn much. The choice of dresses at The Bridal Factory was top notch, the customer service was impeccable and the help given by my consultant was amazing – honest, informative, helpful…Jamie was a star. I would highly recommend the Outlet to anyone in the North wanting a friendly, magical wedding dress experience. And until the next time I get my dress on, I get my fix being judgmental of all the dresses that come on my TV screen!
#!/usr/bin/python -tt # -*- coding: utf-8 -*- ''' Copyright 2014-2015 Teppo Perä Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' from pytraits.core.errors import SingletonError class Singleton(type): """ Turn the class to immutable singleton. >>> class Example(object, metaclass=Singleton): ... pass ... >>> a = Example() >>> b = Example() >>> id(a) == id(b) True Having your instance as a singleton is faster than creating from scratch >>> import timeit >>> class MySingleton(object, metaclass=Singleton): ... def __init__(self): ... self._store = dict(one=1, two=2, three=3, four=4) ... >>> class NonSingleton(object): ... def __init__(self): ... self._store = dict(one=1, two=2, three=3, four=4) ... >>> #timeit.timeit(NonSingleton) > timeit.timeit(MySingleton) True >>> MySingleton().new_item = False Traceback (most recent call last): ... errors.SingletonError: Singletons are immutable """ def __call__(self, *args, **kwargs): try: return self.__instance except AttributeError: def immutable_object(*args): raise SingletonError() self.__instance = super(Singleton, self).__call__(*args, **kwargs) self.__setitem__ = immutable_object self.__setattr__ = immutable_object return self.__instance if __name__ == "__main__": import doctest doctest.testmod()
Democrats are getting a little giddy in their attempt to impeach President Trump (over absolutely nothing) because they do not like him. Sorry to be the one to tell you, but Trump isn’t going to be impeached. In fact, I am going to go out on a limb here and say that he actually has a great chance of being elected for another four years in 2020! Representative Al Green introduced four articles of impeachment on Wednesday, but stopped short of “forcing a vote.” He wants to give his colleagues and the public to have time to review the articles before he pushed for a vote. Green announced his intention on the House floor to offer the impeachment articles as a privileged resolution, which under House rules allows any member to force a vote within two legislative days. The GOP presiding officer moved to consider Green’s resolution less than an hour later, but Green didn’t show up, allowing the window for consideration to pass for now. Green could have forced a procedural vote to challenge the GOP’s ruling, which would have served as the first vote in Congress related to impeaching Trump. The Hill reports that the articles state that Trump “has undermined the integrity of his office with impunity” and allegedly “brought disrepute on the presidency,” however they do not point out anywhere that Trump has actually committed a crime. Isn’t that kinda what you need to impeach the president? Instead, the articles state that Trump is “fueling an alt-right hate machine” that is “causing immediate injury to American society;” brought “shame and dishonor to the office of the presidency by associating the majesty and dignity of the presidency with causes rooted in white supremacy;” engaged in deceit by making the false claim that three to five million people voted illegally in the 2016 presidential election; and encouraged police brutality during a July speech to law enforcement.
#!/usr/bin/env python3 # -*- coding:UTF-8 -*- """Convert a JSON file to a CSV file to be imported on AlumnForce website This is the reciprocal of convert_csv_to_json.py. """ import argparse import csv import json import sys from csv_format import ALUMNFORCE_FIELDS JSON_TO_CSV_FIELDS = dict((x[1], (x[0], x[2], i)) for i, x in enumerate(ALUMNFORCE_FIELDS)) assert len(ALUMNFORCE_FIELDS) == len(JSON_TO_CSV_FIELDS) class AlumnForceDataJ2C(object): """Data extracted from a JSON to produce data importted on AlumnForce website""" def __init__(self): self.fields = set() # content is a list of dicts "json field"->value self.content = [] @classmethod def import_json_file(cls, json_file_path): """Create AlumnForce data from a JSON file""" with open(json_file_path, 'r') as json_stream: return cls.import_json_stream(json_stream) @classmethod def import_json_stream(cls, json_file): """Create AlumnForce data from a JSON stream""" data = cls() for record in json.load(json_file): flat_record = data.flatten_json_fields(record) for record_val in flat_record: if record_val[0] not in data.fields: data.fields.add(record_val[0]) data.content.append(dict(flat_record)) return data @classmethod def flatten_json_fields(cls, json_record, prefixkey=None): result = [] for key, value in json_record.items(): fullkey = (prefixkey + key) if prefixkey else key field_properties = JSON_TO_CSV_FIELDS.get(fullkey) if field_properties is not None: if field_properties[1] is not None: # Encode the JSON value to CSV value = field_properties[1].encode(value) result.append((fullkey, value)) elif isinstance(value, dict): # sub-dict result += cls.flatten_json_fields(value, fullkey + '.') else: raise ValueError("Unknown json field %r" % fullkey) return result def csv_dump(self, csv_file, **kwargs): """Dump all the CSV data""" # Sort the fields by their rank in ALUMNFORCE_FIELDS columns = sorted(self.fields, key=lambda f: JSON_TO_CSV_FIELDS[f][2]) writer = csv.writer(csv_file, delimiter=',', quotechar='"', escapechar='\\', quoting=csv.QUOTE_MINIMAL) writer.writerow((JSON_TO_CSV_FIELDS[f][0] for f in columns)) for row in self.content: writer.writerow(row.get(f) for f in columns) def main(): parser = argparse.ArgumentParser(description="Convert AF CSV to JSON") parser.add_argument('file', nargs='?', help="JSON file to read (or standard input)") parser.add_argument('-o', '--output', type=str, help="CSV file to write (or standard output)") args = parser.parse_args() if args.file: data = AlumnForceDataJ2C.import_json_file(args.file) else: data = AlumnForceDataJ2C.import_json_stream(sys.stdin) if args.output and args.output != '-': with open(args.output, 'w') as fcsv: data.csv_dump(fcsv) else: data.csv_dump(sys.stdout) if __name__ == '__main__': main()
”WE NEED to hear the voices of the poor.” It is an oft-used sound-bite. The problems begin when we don’t like what the poor are saying. For the most part, the Church of England has reacted to the election of Donald Trump (News, 11 November) and the UK’s vote to leave the EU (News, 1 July) (the “Trump-Brexit phenomenon”) by jumping on to the middle-class Est­abl­­ishment bandwagon of outrage and horror. As if set to auto-pilot, the C of E has joined in with those who are decrying the collapse of the liberal consensus and bemoaning a new mood of division in our public life. But, before we shout, we need to pay proper attention to the voices of those whose votes have caused this revolution, whether or not we like what we hear. On both sides of the Atlantic, there has been an almighty cry of anger from a dispossessed and mar­­­­­­­­ginalised working class — the s­o-called “victims of globalisation”. Such people feel frozen out of the post-crash economy, their wages shrinking in real terms while the rich get ever richer. They are routinely accused of xenophobia, or worse, when they express concerns about changes imposed on their com­munities by those who live far away. In the UK, they feel abandoned by the institutions that were formed to represent them: austerity-stricken local government, the Labour Party, and the demutualised building soc­i­eties. If the C of E was still adequately present in areas of deprivation, it would not have been surprised at the revolution in popular politics that this anger caused (Comment, 1 July). But it has become so discon­nected from many of these communities that it no longer hears what they are saying, let alone amplifies their voices to the nation. And, until the Church re-invests in urban ministry, places the best leaders in the most deprived parishes, and returns to the estates it has abandoned, these voices will continue to go un­­heard. THE Church’s agenda is being set not by the poor, but by academia, the moneyed elites, and certain sections of the secular media. It is their preoccupations that dictate the terms of the Church’s debate, and that pose the questions that it expends its energy on answering. We then listen to the poor on condition that what they say backs up our own pre-conceived argu­ments. They have become for us an illustration, or a theological idea — anything other than people. An example is the debate on human sexuality. This is indeed an important debate, but it has come to dominate the Church’s agenda to an extraordinary extent, pushing almost everything else to the bottom of the list. By prioritising this one issue to such an extent, we risk failing to hear other cries of pain. When Pope Francis said that he wanted a “poor Church for the poor”, he was not just encouraging rich Christians to give a little more to the local foodbank, or to fund a few more top-down charitable pro­jects, many of which bolster pre-existing relationships of power and collude with innately unjust power struc­tures. Instead, he was calling the Church to allow its agenda to be set by the voices of the dispossessed and forgotten, not by the powerful. We may not always like what we hear. But if we want to answer with the Good News the questions people are asking, rather than those that we find it convenient to answer, it is some­thing that we must do. LORD GLASMAN, a pioneer of the community-organising movement and an architect of the Living Wage, argues, from a strong evidence base, that to understand the self-identity and concerns of most working people in this country, we need to focus on three things: family, place, and work. The Established Church has historically had a great deal to say about these areas of life, but has now fallen strangely silent. Across many communities, ex­­tended family life remains very strong. For all its frustrations, it is where most people find support, self-identity, and purpose. But too many Anglicans seem embarrassed to stand up for the sanctity of the family. This is often motivated by a laudable desire not to exclude minorities. But the danger is that the Church is failing to address or uphold an area of life that is a core preoccupation for the majority of people. Place includes not just local com­munity but the nation. It is dangerous to read too many detailed conclusions from the EU referen­dum, but a constant refrain of the Leave campaign which re­­sonated with voters was the need to “take back control of our country”. It was less an anti-immigration vote than a patriotic vote from people who were fed up with having pride in their nation, its flag, and its armed forces misrepresented as intolerance or racism. All too often, middle-class clergy squirm nervously during Remem­brance Sunday, and excise any hymns that hint of nationalism. But surely an Established Church has a part to play in finding a new and unifying national narrative that is patriotic, besides tolerant and in­­clusive. We have a lot to learn from our extraordinary armed-forces chap­­­­­­lains, whose work is too often forgotten by the wider Church. The centrality of work is evid­enced by the frustration that many working-class communities feel to­­wards those perceived to be taking unfair advantage of the benefits system, and by the intense anger of those who, despite working hard, are still unable to feed their chil­dren. But, again, the Church is failing to make a stand for the dignity of work and for justice in the workplace. One rarely hears a sermon on work. that they should change their minds. For example, listening hard to why someone voted for Brexit, and seeking to understand the complex range of factors behind his or her decision, does not mean that one auto­matically has to agree with Brexit. frust­ra­tion at structures and institu­tions that have abandoned them, and at a middle-class culture that misun­derstands or misrepresents their heart­felt concerns. If, as Christians, we can re-engage, listen to the questions, and offer some answers, we will not just be playing our part in re-unifying a nation. We may find that people also start listening afresh to the gospel that we pro­claim. The Rt Revd Philip North is the Bishop of Burnley, in the diocese of Blackburn. Are these the leaders that we really want?
""" Model to store script data. """ import json import datetime from django.db import models from django.contrib.auth.models import User from tagging.fields import TagField import tagging import autoslug from nodetree import script class JsonTextField(models.TextField): def to_python(self, value): return value def validate(self, value, *args, **kwargs): super(models.TextField, self).validate(value, *args, **kwargs) try: json.loads(value) except ValueError: raise models.exceptions.ValidationError("Data must be valid JSON") class Preset(models.Model): user = models.ForeignKey(User, related_name="presets") tags = tagging.fields.TagField() name = models.CharField(max_length=100, unique=True) slug = autoslug.AutoSlugField(populate_from="name", unique=True) description = models.TextField(blank=True) public = models.BooleanField(default=True) created_on = models.DateField(editable=False) updated_on = models.DateField(editable=False, null=True, blank=True) data = JsonTextField() profile = models.ForeignKey("Profile", related_name="presets", null=True, blank=True) def __unicode__(self): """ String representation. """ return self.name def save(self): if not self.id: self.created_on = datetime.datetime.now() else: self.updated_on = datetime.datetime.now() super(Preset, self).save() def get_absolute_url(self): """URL to view an object detail""" return "/presets/show/%s/" % self.slug def get_update_url(self): """url to update an object detail""" return "/presets/edit/%s/" % self.slug def get_delete_url(self): """url to update an object detail""" return "/presets/delete/%s/" % self.slug @classmethod def get_list_url(cls): """URL to view the object list""" return "/presets/list/" @classmethod def get_create_url(cls): """URL to create a new object""" return "/presets/create/" TEST_PROFILE = { "must_exist" : [ { "attr": "stage", "value": "recognize", "unique": False, }, { "attr": "stage", "value": "input", "unique": True, }, ], } class Profile(models.Model): """Preset profile. This defines a class of presets to which the information in the preset must conform.""" name = models.CharField(max_length=255) slug = autoslug.AutoSlugField(populate_from="name", unique=True) tags = tagging.fields.TagField() description = models.TextField(blank=True) created_on = models.DateField(editable=False) updated_on = models.DateField(editable=False, null=True, blank=True) data = JsonTextField() def __unicode__(self): """ String representation. """ return self.name def save(self): if not self.id: self.created_on = datetime.datetime.now() else: self.updated_on = datetime.datetime.now() super(Profile, self).save() def validate_preset(self, data): this = json.loads(self.data) tree = script.Script(data) errors = [] for name, preds in this.iteritems(): for pred in preds: perrors = self.validate_predicate(name, pred, tree) if perrors: errors.extend(perrors) return errors def validate_predicate(self, name, pred, tree): errors = [] if name == "must_exist": attr = pred.get("attr") value = pred.get("value") unique = pred.get("unique") nodes = tree.get_nodes_by_attr(attr, value) if not nodes: errors.append("A node with attr '%s'='%s' must exist" % (attr, value)) elif len(nodes) > 1: errors.append("Node with attr '%s'='%s' must be unique" % (attr, value)) return errors def get_absolute_url(self): """URL to view an object detail""" return "/profiles/show/%s/" % self.slug def get_update_url(self): """url to update an object detail""" return "/profiles/edit/%s/" % self.slug def get_delete_url(self): """url to update an object detail""" return "/profiles/delete/%s/" % self.slug @classmethod def get_list_url(cls): """URL to view the object list""" return "/profiles/list/" @classmethod def get_create_url(cls): """URL to create a new object""" return "/profiles/create/"
KELLY REALTY I, LLC is an entity registered at MADISON county with company number 4384939. KELLY REALTY I, LLC located at the address 7323 Lake View Court Hamilton, New York, 13346. Company is incorporated on April 6, 2013. Current status of the company is active. KELLY PROPERTIES OF ROCHESTER, INC.
""" cd %HOME% mkdir tmp cd tmp wget http://www.woogerworks.com/files/cockatrice.weeklybuilds/Cockatrice-WindowsClient.exe Cockatrice-WindowsClient.exe """ def install_ansicon(): """ lets windows console display ansii References: http://www.liferay.com/web/igor.spasic/blog/-/blogs/enable-ansi-colors-in-windows-command-prompt """ #http://adoxa.altervista.org/ansicon/dl.php?f=ansicon pass def install_cockatrice(): cockatrice_url = 'http://www.woogerworks.com/files/cockatrice.weeklybuilds/Cockatrice-WindowsClient.exe' import utool as ut fpath = ut.grab_file_url(cockatrice_url) # run setup script ut.cmd(fpath) # press enter a few times import win32com.client as w32 shell = w32.Dispatch("WScript.Shell") shell.AppActivate('Cockatrice Setup') shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") # need msvcp120.dll #https://www.microsoft.com/en-us/download/details.aspx?id=40784 #import win32gui #import win32api #import win32con #def window_handle(Title): # hwnd = win32gui.FindWindowEx(0, 0, 0, Title) # return hwnd #def click_btn(hwnd, Button): # hbutton = win32gui.FindWindowEx(hwnd, 0, "Button", Button) # if hbutton != 0: # win32api.PostMessage(hbutton, win32con.WM_LBUTTONDOWN, 0, 0) # win32api.PostMessage(hbutton, win32con.WM_LBUTTONUP, 0, 0) # return True # return None #click_btn(hwnd, "&Install") #window_title = 'Cockatrice Setup' #hwnd = win32gui.FindWindowEx(0, 0, 0, window_title) #assert hwnd != 0 #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "Cancel") #print(btnHnd) #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "Next") #print(btnHnd) #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "") #button_name = 'Next' #hbutton = win32gui.FindWindowEx(hwnd, 0, "Button", button_name) #assert hbutton != 0, 'could not find button'
ygoy Sites > Dental Problems > Uncategorized > What is Dental Bonding? Dental bonding is a temporary treatment for restoring cracked teeth, decayed teeth and gaps between the teeth. It comes under cosmetic dentistry. Dental bonding is the least expensive treatment out of all the tooth decay treatment procedures. Dental Bonding is a type of composite resin prepared to fill in and restore a decayed tooth or change the color of a tooth or reshape a tooth or make few cosmetic improvements. A thin coated plastic material is applied on the front surface of the tooth, then the cosmetic dentist applies the bonding material already prepared. He will shape, sculpt and color the bonding as per the requirements. A high intensity light is used to harden the plastic, later the surface will be polished to get a perfect shape. For improving the appearance of the discolored tooth. ← What are the Different Types of Dental Restoration?
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from .tools import * from .utils import * class DatabaseError(Exception): def __init__(self, content): Exception.__init__(self) if os.path.exists("blog.db"): db = Connection("blog.db") else: raise DatabaseError("Database file not found !") def gat_page(id): page = db.get("SELECT * FROM pages WHERE id = ?", id) return page def get_all_pages(): pages = db.query("SELECT * FROM pages") return pages def get_article(id): article = db.get("SELECT * FROM articles WHERE id = ?;", id) return article def get_articles(page, post_per_page): articles = db.query("SELECT * FROM articles ORDER BY id DESC LIMIT ?, ?;",(page - 1) * post_per_page, post_per_page) return articles def get_all_articles(): articles = db.query("SELECT * FROM articles ORDER BY id DESC;") return articles def get_article_count(): count = db.query('''SELECT COUNT(*) AS count FROM articles''') return count[0].count def get_tag_articles(tag_name): sql = """SELECT * FROM articles AS a INNER JOIN tags AS t ON a.id = t.article_id WHERE t.name = ? ORDER BY id DESC;""" articles = db.query(sql, tag_name) return articles def get_all_tags(): tags = db.query("SELECT name, COUNT(name) AS num FROM tags GROUP BY name ORDER BY num DESC;") return tags def creat_page(**kwargs): count = db.query('''SELECT COUNT(*) AS count FROM pages''') if count[0].count < 5: sql = '''INSERT INTO pages (title, content) VALUES (?,?);''' id = db.execute(sql, kwargs["title"], kwargs["content"]) return id else: return False def update_page(id, **kwargs): sql = '''UPDATE pages SET title=?, content=? WHERE id=?;''' db.execute(sql, kwargs["title"], kwargs["content"], id) return True def delete_page(id): db.execute("DELETE FROM pages WHERE id=?;", id) return True def creat_article(**kwargs): sql = '''INSERT INTO articles (title, content, tag, datetime) VALUES (?,?,?,?);''' id = db.execute(sql, kwargs["title"], kwargs["content"], kwargs["tags"], get_datetime()) tags = [tag.strip() for tag in kwargs["tags"].split(",")] for tag in tags: db.execute("INSERT INTO tags (name, article_id) VALUES (?,?);", tag, id) return id def update_article(id, **kwargs): db.execute("DELETE FROM tags WHERE article_id=?;", id) sql = '''UPDATE articles SET title=?, content=?, tag=? WHERE id=?;''' db.execute(sql, kwargs["title"], kwargs["content"], kwargs["tags"], id) tags = [tag.strip() for tag in kwargs["tags"].split(",")] for tag in tags: db.execute("INSERT INTO tags (name, article_id) VALUES (?,?);", tag, id) return True def delete_article(id): db.execute("DELETE FROM articles WHERE id=?;", id) db.execute("DELETE FROM tags WHERE article_id=?;", id) return True def update_token(username, token): sql = '''UPDATE admin_config SET token=? WHERE username=?;''' db.execute(sql, token, username) return True def verify_user(username, password_md5): information = db.get("SELECT * FROM admin_config WHERE username = ?;", username) if information and information.password == password_md5: return True else: return False def verify_token(username, token): information = db.get("SELECT * FROM admin_config WHERE username = ?;", username) if information.token == token: return True else: return False def change_password(username, n_password_md5): sql = '''UPDATE admin_config SET password=? WHERE username=?;''' db.execute(sql, n_password_md5, username) return True
This chapter examines the local conditions, traditions, and forms of urban settlement in Cyprus during the Iron Age. It explains that almost to the very end of the Middle Bronze Age, Cyprus had remained a closed rural society, though it was by then completely surrounded by Mediterranean urban states and it was only by 1100 BC that new social and economic structures started to dictate the establishment and development of new population and power centers. The archaeological evidence of 800–600 BC stands testimony to the culmination of a long process of social evolution and urbanization.
#!/usr/bin/env python # Hello queue example using Python and RabbitMQ # Copyright (c) 2014, Jesse J. Cook # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the {organization} nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import pika import logging import time def cb(ch, method, properties, body): print " [x] Received %r" % body time.sleep(body.count('.')) print " [x] Done" ch.basic_ack(delivery_tag = method.delivery_tag) logging.basicConfig() conn = pika.BlockingConnection(pika.ConnectionParameters('localhost')) chan = conn.channel() chan.queue_declare(queue='task_queue', durable=True) chan.basic_qos(prefetch_count=1) chan.basic_consume(cb, queue='task_queue') print ' [*] Waiting for messages. To exit press CTRL+C' try: chan.start_consuming() except: print 'Consumption terminated' conn.close()
Hotel Solutions completed a Hotel Futures Study for Bedford in 2014. This was an up-date of a study previously undertaken in 2009. The study sought to assess the future potential for hotel development in Bedford Borough to inform the content of the emerging Local Plan to 2032 in relation to policies on hotel development. In line with national trends and recovery post-recession, the study found that the performance of Bedford’s hotels had turned a corner in 2014, boosted by increased activity in the film production sector as well as revitalisation of the construction industry. Bedford is set to see substantial levels of growth – 19,000 new homes, 18,000 new jobs and a population increase of 33,000. Much of this is focused on the Growth Area to the South West of the town, and in sectors that are productive for hotel roomnights. Significant improvements to transport and other infrastructure will support this growth, and create new opportunities for development, including potential hotel sites close to drivers of demand both within the town centre and around its major arterial roads and business parks. In terms of potential in Bedford town itself, the study identified scope for boutique hotels through the conversion of suitable characterful properties or the re-positioning of existing hotels. Potential was also identified for new budget hotels in the town centre. In addition, the business parks along the A421, where much of the corporate market demand will come from, were identified as having potential for upper tier budget or 3 star hotel development. This was a Borough-wide study that also took in a significant rural area with other demand drivers, presenting a range of potential hotel development opportunities. These included country house hotels in Rural North Bedford, given suitable properties for conversion; the opening of hotels on golf courses, and hotel accommodation associated with watersports centres, wedding venues and corporate activity centres. Post-study Bedford has seen active interest from a number of hotel operators and developers in both town centre and out of centre locations, and the Council is using its own land holdings and planning tools to influence and steer hotel development. A 100-bedroom Premier Inn budget hotel is currently planned for the Riverside North mixed-use development scheme in the town centre. ‘The report fully met our brief, was of a high quality, and was produced within agreed timescales. We have commissioned Hotel Solutions in the past and would be happy to do so again’.
from collections import namedtuple import tensorflow as tf class TimedNTM(object): """Timed Neural Turing Machine Inspired by paper: https://arxiv.org/pdf/1410.5401.pdf Implementation inspired by: https://github.com/carpedm20/NTM-tensorflow/blob/master/ntm_cell.py See our paper for details: https://arxiv.org/abs/1811.11707 """ def __init__(self, attn_shift_range, sparse_attention, name): """Construct the `TimedNTM`. Args: attn_shift_range: Python int. A time range within which to attend to the memory by location sparse_attention: Python bool. If `True` use sparsemax instead of softmax for probs name: Name to use when creating ops. """ # interpolation gate self.name = "timed_ntm_" + name self._inter_gate = tf.layers.Dense( units=1, activation=tf.sigmoid, name=self.name + "/inter_gate" ) # if use sparsemax instead of softmax for probs self._sparse_attention = sparse_attention if sparse_attention: # sparsemax doesn't support inf self._inf = float(5000) else: self._inf = float("inf") # shift weighting if range is provided if attn_shift_range: self._shift_weight = tf.layers.Dense( units=2 * attn_shift_range + 1, activation=tf.nn.softmax, name=self.name + "/shift_weight", ) else: self._shift_weight = None # sharpening parameter self._gamma_sharp = tf.layers.Dense( units=1, activation=lambda a: tf.nn.softplus(a) + 1, bias_initializer=tf.constant_initializer(1), name=self.name + "/gamma_sharp", ) def __call__(self, attn_inputs, scores, scores_state, mask): # apply exponential moving average with interpolation gate weight # to scores from previous time which are equal to probs at this point # different from original NTM where it is applied after softmax i_g = self._inter_gate(attn_inputs) # scores limited by time scores = tf.concat( [i_g * scores[:, :-1] + (1 - i_g) * scores_state, scores[:, -1:]], 1 ) next_scores_state = scores if mask is not None: # apply mask to scores if self._shift_weight is not None: # rearrange scores to make them continuous for convolution scores = tf.map_fn( self._rearrange_fn, [scores, mask], dtype=scores.dtype ) else: scores = tf.where(mask > 0, scores, -self._inf * tf.ones_like(scores)) # create probabilities for attention if self._sparse_attention: probs = tf.contrib.sparsemax.sparsemax(scores) else: probs = tf.nn.softmax(scores) if self._shift_weight is not None: s_w = self._shift_weight(attn_inputs) # we want to go back in time during convolution conv_probs = tf.reverse(probs, axis=[1]) # preare probs for tf.nn.depthwise_conv2d # [in_width, in_channels=batch] conv_probs = tf.transpose(conv_probs, [1, 0]) # [batch=1, in_height=1, in_width=time+1, in_channels=batch] conv_probs = conv_probs[tf.newaxis, tf.newaxis, :, :] # [filter_height=1, filter_width=2*attn_shift_range+1, # in_channels=batch, channel_multiplier=1] conv_s_w = tf.transpose(s_w, [1, 0]) conv_s_w = conv_s_w[tf.newaxis, :, :, tf.newaxis] # perform 1d convolution # [batch=1, out_height=1, out_width=time+1, out_channels=batch] conv_probs = tf.nn.depthwise_conv2d_native( conv_probs, conv_s_w, [1, 1, 1, 1], "SAME" ) conv_probs = conv_probs[0, 0, :, :] conv_probs = tf.transpose(conv_probs, [1, 0]) probs = tf.reverse(conv_probs, axis=[1]) if mask is not None: # arrange probs back to their original time order probs = tf.map_fn( self._arrange_back_fn, [probs, mask], dtype=probs.dtype ) # sharpening g_sh = self._gamma_sharp(attn_inputs) powed_probs = tf.pow(probs, g_sh) probs = powed_probs / (tf.reduce_sum(powed_probs, 1, keepdims=True) + 1e-32) return probs, next_scores_state def _rearrange_fn(self, list_tensor_1d_mask_1d): """Rearranges tensor_1d to put all the values where mask_1d=1 to the right and where mask_1d=0 to the left and sets them to -infinity""" tensor_1d, mask_1d = list_tensor_1d_mask_1d partitioned_tensor = tf.dynamic_partition(tensor_1d, mask_1d, 2) partitioned_tensor[0] = -self._inf * tf.ones_like(partitioned_tensor[0]) return tf.concat(partitioned_tensor, 0) @staticmethod def _arrange_back_fn(list_tensor_1d_mask_1d): """Arranges back tensor_1d to restore original order modified by `_rearrange_fn` according to mask_1d: - number of 0s in mask_1d values on the left are set to their corresponding places where mask_1d=0, - number of 1s in mask_1d values on the right are set to their corresponding places where mask_1d=1""" tensor_1d, mask_1d = list_tensor_1d_mask_1d mask_indices = tf.dynamic_partition( tf.range(tf.shape(tensor_1d)[0]), mask_1d, 2 ) mask_sum = tf.reduce_sum(mask_1d, axis=0) partitioned_tensor = [ tf.zeros_like(tensor_1d[:-mask_sum]), tensor_1d[-mask_sum:], ] return tf.dynamic_stitch(mask_indices, partitioned_tensor) def _compute_time_attention( attention_mechanism, attn_inputs, attention_state, # time is added to calculate time attention time, timed_ntm, time_mask, ignore_mask, attention_layer, ): """Computes the attention and alignments limited by time for a given attention_mechanism. Modified helper method from tensorflow.""" scores, _ = attention_mechanism(attn_inputs, state=attention_state) # take only scores from current and past times timed_scores = scores[:, : time + 1] timed_scores_state = attention_state[:, :time] # get mask for past times timed_time_mask = time_mask[:, :time] if ignore_mask is not None: timed_time_mask *= 1 - ignore_mask[:, :time] # set mask for current time to 1 timed_time_mask = tf.concat([timed_time_mask, tf.ones_like(time_mask[:, :1])], 1) # pass these scores to NTM probs, next_scores_state = timed_ntm( attn_inputs, timed_scores, timed_scores_state, timed_time_mask ) # concatenate probs with zeros to get new alignments zeros = tf.zeros_like(scores) # remove current time from attention alignments = tf.concat([probs[:, :-1], zeros[:, time:]], 1) # Reshape from [batch_size, memory_time] to [batch_size, 1, memory_time] expanded_alignments = tf.expand_dims(alignments, 1) # Context is the inner product of alignments and values along the # memory time dimension. # alignments shape is # [batch_size, 1, memory_time] # attention_mechanism.values shape is # [batch_size, memory_time, memory_size] # the batched matmul is over memory_time, so the output shape is # [batch_size, 1, memory_size]. # we then squeeze out the singleton dim. context = tf.matmul(expanded_alignments, attention_mechanism.values) context = tf.squeeze(context, [1]) if attention_layer is not None: attention = attention_layer(tf.concat([attn_inputs, context], 1)) else: attention = context # return current time to attention alignments = tf.concat([probs, zeros[:, time + 1 :]], 1) next_attention_state = tf.concat([next_scores_state, zeros[:, time + 1 :]], 1) return attention, alignments, next_attention_state # noinspection PyProtectedMember class TimeAttentionWrapperState( namedtuple( "TimeAttentionWrapperState", tf.contrib.seq2seq.AttentionWrapperState._fields + ("all_time_masks", "all_cell_states"), ) ): # added """Modified from tensorflow's tf.contrib.seq2seq.AttentionWrapperState see there for description of the parameters Additional fields: - `all_time_masks`: A mask applied to a memory that filters certain time steps - `all_cell_states`: All states of the wrapped `RNNCell` at all the previous time steps. """ def clone(self, **kwargs): """Copied from tensorflow's tf.contrib.seq2seq.AttentionWrapperState see there for description of the parameters""" def with_same_shape(old, new): """Check and set new tensor's shape.""" if isinstance(old, tf.Tensor) and isinstance(new, tf.Tensor): return tf.contrib.framework.with_same_shape(old, new) return new return tf.contrib.framework.nest.map_structure( with_same_shape, self, super(TimeAttentionWrapperState, self)._replace(**kwargs), ) class TimeAttentionWrapper(tf.contrib.seq2seq.AttentionWrapper): """Custom AttentionWrapper that takes into account time when calculating attention. Attention is calculated before calling rnn cell. Modified from tensorflow's tf.contrib.seq2seq.AttentionWrapper. See our paper for details: https://arxiv.org/abs/1811.11707 """ def __init__( self, cell, attention_mechanism, sequence_len, attn_shift_range=0, sparse_attention=False, attention_layer_size=None, alignment_history=False, rnn_and_attn_inputs_fn=None, ignore_mask=None, cell_input_fn=None, index_of_attn_to_copy=None, likelihood_fn=None, tensor_not_to_copy=None, output_attention=False, initial_cell_state=None, name=None, attention_layer=None, ): """Construct the `TimeAttentionWrapper`. See the super class for the original arguments description. Additional args: sequence_len: Python integer. Maximum length of the sequence, used to create appropriate TensorArray for all cell states in TimeAttentionWrapperState attn_shift_range: Python integer (`0` by default). A time range within which to attend to the memory by location in Neural Turing Machine. sparse_attention: Python bool. A flag to use sparsemax (if `True`) instead of softmax (if `False`, default) for probabilities inputs_and_attn_inputs_fn: (optional) A `callable`. A function that creates inputs and attention inputs tensors. ignore_mask: (optional) Boolean Tensor. Determines which time steps to ignore in attention index_of_attn_to_copy: (optional) Python integer. An index of attention mechanism that picks which part of attention tensor to use for copying to output, the default is `None`, which turns off copying mechanism. Copy inspired by: https://arxiv.org/pdf/1603.06393.pdf likelihood_fn: (optional) A `callable`. A method to perform likelihood calculation to filter time step in copy mechanism. Returns a tuple of binary likelihood and likelihood tensor_not_to_copy: (optional) A Tensor. A tensor, which shouldn't be copied from previous time steps Modified args: output_attention: Python bool. If `True`, the output at each time step is the concatenated cell outputs, attention values and additional values described in `additional_output_size()`, used in copy mechanism. """ super(TimeAttentionWrapper, self).__init__( cell, attention_mechanism, attention_layer_size, alignment_history, cell_input_fn, output_attention, initial_cell_state, name, attention_layer, ) self._sequence_len = sequence_len if not isinstance(attn_shift_range, list): # attn_shift_range might not be a list attn_shift_range = [attn_shift_range] self._timed_ntms = [TimedNTM(attn_shift_range[0], sparse_attention, name="0")] if self._is_multi: # if there are several attention mechanisms, # create additional TimedNTMs for them if len(attn_shift_range) == 1: # original attn_shift_range might not be a list attn_shift_range *= len(attention_mechanism) elif len(attn_shift_range) != len(attention_mechanism): raise ValueError( "If provided, `attn_shift_range` must contain exactly one " "integer per attention_mechanism, saw: {} vs {}" "".format(len(attn_shift_range), len(attention_mechanism)) ) for i in range(1, len(attention_mechanism)): self._timed_ntms.append( TimedNTM(attn_shift_range[i], sparse_attention, name=str(i)) ) if rnn_and_attn_inputs_fn is None: rnn_and_attn_inputs_fn = self._default_rnn_and_attn_inputs_fn else: if not callable(rnn_and_attn_inputs_fn): raise TypeError( "`rnn_and_attn_inputs_fn` must be callable, saw type: {}" "".format(type(rnn_and_attn_inputs_fn).__name__) ) self._rnn_and_attn_inputs_fn = rnn_and_attn_inputs_fn if not isinstance(ignore_mask, list): self._ignore_mask = [tf.cast(ignore_mask, tf.int32)] else: self._ignore_mask = [tf.cast(i_m, tf.int32) for i_m in ignore_mask] self._index_of_attn_to_copy = index_of_attn_to_copy self._likelihood_fn = likelihood_fn self._tensor_not_to_copy = tensor_not_to_copy @staticmethod def _default_rnn_and_attn_inputs_fn(inputs, cell_state): if isinstance(cell_state, tf.contrib.rnn.LSTMStateTuple): return inputs, tf.concat([inputs, cell_state.h], -1) else: return inputs, tf.concat([inputs, cell_state], -1) @staticmethod def additional_output_size(): """Number of additional outputs: likelihoods: attn_likelihood, state_likelihood debugging info: current_time_prob, bin_likelihood_not_to_copy, bin_likelihood_to_copy **Method should be static** """ return 2 + 3 @property def output_size(self): if self._output_attention: if self._index_of_attn_to_copy is not None: # output both raw rnn cell_output and # cell_output with copied attention # together with attention vector itself # and additional output return ( 2 * self._cell.output_size + self._attention_layer_size + self.additional_output_size() ) else: return self._cell.output_size + self._attention_layer_size else: return self._cell.output_size @property def state_size(self): """The `state_size` property of `TimeAttentionWrapper`. Returns: A `TimeAttentionWrapperState` tuple containing shapes used by this object. """ # use AttentionWrapperState from superclass state_size = super(TimeAttentionWrapper, self).state_size all_cell_states = self._cell.state_size return TimeAttentionWrapperState( cell_state=state_size.cell_state, time=state_size.time, attention=state_size.attention, alignments=state_size.alignments, attention_state=state_size.attention_state, alignment_history=state_size.alignment_history, all_time_masks=self._sequence_len, all_cell_states=all_cell_states, ) def zero_state(self, batch_size, dtype): """Modified from tensorflow's zero_state see there for description of the parameters""" # use AttentionWrapperState from superclass zero_state = super(TimeAttentionWrapper, self).zero_state(batch_size, dtype) with tf.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]): # store time masks all_time_masks = tf.TensorArray( tf.int32, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, tf.zeros([batch_size, self.state_size.all_time_masks], tf.int32)) # store all cell states into a tensor array to allow # copy mechanism to go back in time if isinstance(self._cell.state_size, tf.contrib.rnn.LSTMStateTuple): all_cell_states = tf.contrib.rnn.LSTMStateTuple( tf.TensorArray( dtype, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, zero_state.cell_state.c), tf.TensorArray( dtype, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, zero_state.cell_state.h), ) else: all_cell_states = tf.TensorArray( dtype, size=0, dynamic_size=False, clear_after_read=False ).write(0, zero_state.cell_state) return TimeAttentionWrapperState( cell_state=zero_state.cell_state, time=zero_state.time, attention=zero_state.attention, alignments=zero_state.alignments, attention_state=zero_state.attention_state, alignment_history=zero_state.alignment_history, all_time_masks=all_time_masks, all_cell_states=all_cell_states, ) def call(self, inputs, state): """Perform a step of attention-wrapped RNN. The order has changed: - Step 1: Calculate attention inputs based on the previous cell state and current inputs - Step 2: Score the output with `attention_mechanism`. - Step 3: Calculate the alignments by passing the score through the `normalizer` and limit them by time. - Step 4: Calculate the context vector as the inner product between the alignments and the attention_mechanism's values (memory). - Step 5: Calculate the attention output by concatenating the cell output and context through the attention layer (a linear layer with `attention_layer_size` outputs). - Step 6: Mix the `inputs` and `attention` output via `cell_input_fn` to get cell inputs. - Step 7: Call the wrapped `cell` with these cell inputs and its previous state. - Step 8: (optional) Maybe copy output and cell state from history Args: inputs: (Possibly nested tuple of) Tensor, the input at this time step. state: An instance of `TimeAttentionWrapperState` containing tensors from the previous time step. Returns: A tuple `(attention_or_cell_output, next_state)`, where: - `attention_or_cell_output` depending on `output_attention`. - `next_state` is an instance of `TimeAttentionWrapperState` containing the state calculated at this time step. Raises: TypeError: If `state` is not an instance of `TimeAttentionWrapperState`. """ if not isinstance(state, TimeAttentionWrapperState): raise TypeError( "Expected state to be instance of " "TimeAttentionWrapperState. " "Received type {} instead.".format(type(state)) ) # Step 1: Calculate attention based on # the previous output and current input cell_state = state.cell_state rnn_inputs, attn_inputs = self._rnn_and_attn_inputs_fn(inputs, cell_state) cell_batch_size = attn_inputs.shape[0].value or tf.shape(attn_inputs)[0] error_message = ( "When applying AttentionWrapper %s: " % self.name + "Non-matching batch sizes between the memory " "(encoder output) and the query (decoder output). " "Are you using " "the BeamSearchDecoder? " "You may need to tile your memory input via " "the tf.contrib.seq2seq.tile_batch function with argument " "multiple=beam_width." ) with tf.control_dependencies( self._batch_size_checks(cell_batch_size, error_message) ): attn_inputs = tf.identity(attn_inputs, name="checked_attn_inputs") if self._is_multi: previous_attention_state = state.attention_state previous_alignment_history = state.alignment_history else: previous_attention_state = [state.attention_state] previous_alignment_history = [state.alignment_history] all_alignments = [] all_attentions = [] all_attention_states = [] maybe_all_histories = [] prev_time_masks = self._read_from_tensor_array(state.all_time_masks, state.time) prev_time_mask = prev_time_masks[:, -1, :] for i, attention_mechanism in enumerate(self._attention_mechanisms): # Steps 2 - 5 are performed inside `_compute_time_attention` (attention, alignments, next_attention_state) = _compute_time_attention( attention_mechanism, attn_inputs, previous_attention_state[i], # time is added to calculate time attention state.time, self._timed_ntms[i], # provide boolean masks, to ignore some time steps prev_time_mask, self._ignore_mask[i], self._attention_layers[i] if self._attention_layers else None, ) alignment_history = ( previous_alignment_history[i].write(state.time, alignments) if self._alignment_history else () ) all_attention_states.append(next_attention_state) all_alignments.append(alignments) all_attentions.append(attention) maybe_all_histories.append(alignment_history) attention = tf.concat(all_attentions, 1) # Step 6: Mix the `inputs` and `attention` output via # `cell_input_fn` to get cell inputs. cell_inputs = self._cell_input_fn(rnn_inputs, attention) # Step 7: Call the wrapped `cell` with these cell inputs and # its previous state. cell_output, next_cell_state = self._cell(cell_inputs, cell_state) prev_all_cell_states = state.all_cell_states time_mask = tf.concat( [ prev_time_mask[:, : state.time], tf.ones_like(prev_time_mask[:, :1]), prev_time_mask[:, state.time + 1 :], ], 1, ) if self._index_of_attn_to_copy is not None: # Step 8: Maybe copy output and cell state from history # get relevant previous outputs from history attn_to_copy = all_attentions[self._index_of_attn_to_copy] # copy them to current output cell_output_with_attn = cell_output + attn_to_copy memory_probs = self._get_memory_probs(all_alignments, state.time) # check that we do not pay attention to `tensor_not_to_copy` bin_likelihood_not_to_copy, _ = self._likelihood_fn( cell_output_with_attn, self._tensor_not_to_copy ) # recalculate probs memory_probs *= 1 - bin_likelihood_not_to_copy history_alignments = self._history_alignments(memory_probs) # get previous output from the history prev_output = self._prev_output( cell_output_with_attn, history_alignments, state.time ) # check that current output is close to # the one in the history to which we pay attention to bin_likelihood_to_copy, _ = self._likelihood_fn( cell_output_with_attn, prev_output ) # recalculate probs memory_probs *= bin_likelihood_to_copy history_alignments = self._history_alignments(memory_probs) current_time_prob = history_alignments[:, -1:] # create additional likelihoods to maximize attn_likelihood = self._additional_likelihood( attn_to_copy, prev_output, current_time_prob ) state_likelihood = self._additional_likelihood( cell_output + tf.stop_gradient(attn_to_copy), prev_output, current_time_prob, ) # recalculate time_mask time_mask = self._apply_alignments_to_history( tf.cast(history_alignments, time_mask.dtype), prev_time_masks[:, :-1, :], time_mask, ) # recalculate new next_cell_state based on history_alignments next_cell_state = self._new_next_cell_state( prev_all_cell_states, next_cell_state, cell_output_with_attn, history_alignments, state.time, ) all_cell_states = self._all_cell_states( prev_all_cell_states, next_cell_state, state.time ) if self._output_attention: # concatenate cell outputs, attention, additional likelihoods # and copy_attn_debug output = tf.concat( [ cell_output_with_attn, cell_output, attention, # additional likelihoods attn_likelihood, state_likelihood, # copy_attn_debug bin_likelihood_not_to_copy, bin_likelihood_to_copy, current_time_prob, ], 1, ) else: output = cell_output_with_attn else: # do not waste resources on storing history all_cell_states = prev_all_cell_states if self._output_attention: output = tf.concat([cell_output, attention], 1) else: output = cell_output all_time_masks = state.all_time_masks.write(state.time + 1, time_mask) next_state = TimeAttentionWrapperState( time=state.time + 1, cell_state=next_cell_state, attention=attention, attention_state=self._item_or_tuple(all_attention_states), alignments=self._item_or_tuple(all_alignments), alignment_history=self._item_or_tuple(maybe_all_histories), all_time_masks=all_time_masks, all_cell_states=all_cell_states, ) return output, next_state # helper for TensorArray @staticmethod def _read_from_tensor_array(tensor_array, time): """TensorArray time reader""" return tf.transpose(tensor_array.gather(tf.range(0, time + 1)), [1, 0, 2]) # helper methods for copy mechanism def _get_memory_probs(self, all_alignments, time): """Helper method to get memory_probs from all_alignments""" memory_probs = tf.stop_gradient( all_alignments[self._index_of_attn_to_copy][:, :time] ) # binarize memory_probs only if max value is larger than margin=0.1 memory_probs_max = tf.reduce_max(memory_probs, axis=1, keepdims=True) memory_probs_max = tf.where( memory_probs_max > 0.1, memory_probs_max, -memory_probs_max ) return tf.where( tf.equal(memory_probs, memory_probs_max), tf.ones_like(memory_probs), tf.zeros_like(memory_probs), ) @staticmethod def _history_alignments(memory_probs): """Helper method to apply binary mask to memory_probs""" current_time_prob = 1 - tf.reduce_sum(memory_probs, 1, keepdims=True) return tf.concat([memory_probs, current_time_prob], 1) @staticmethod def _apply_alignments_to_history(alignments, history_states, state): """Helper method to apply attention probabilities to rnn history copied from tf's `_compute_attention(...)`""" expanded_alignments = tf.stop_gradient(tf.expand_dims(alignments, 1)) history_states = tf.concat([history_states, tf.expand_dims(state, 1)], 1) # Context is the inner product of alignments and values along the # memory time dimension. # expanded_alignments shape is # [batch_size, 1, memory_time] # history_states shape is # [batch_size, memory_time, memory_size] # the batched matmul is over memory_time, so the output shape is # [batch_size, 1, memory_size]. # we then squeeze out the singleton dim. return tf.squeeze(tf.matmul(expanded_alignments, history_states), [1]) def _prev_output(self, state, alignments, time): """Helper method to get previous output from memory""" # get all previous outputs from appropriate # attention mechanism's memory limited by current time prev_outputs = tf.stop_gradient( self._attention_mechanisms[self._index_of_attn_to_copy].values[:, :time, :] ) # multiply by alignments to get one vector from one time step return self._apply_alignments_to_history(alignments, prev_outputs, state) def _additional_likelihood(self, output, prev_output, current_time_prob): """Helper method to create additional likelihood to maximize""" _, likelihood = self._likelihood_fn(output, tf.stop_gradient(prev_output)) return tf.where(current_time_prob < 0.5, likelihood, tf.ones_like(likelihood)) def _new_hidden_state(self, prev_all_cell_states, new_state, alignments, time): """Helper method to look into rnn history""" # reshape to (batch, time, memory_time) and # do not include current time because # we do not want to pay attention to it, # but we need to read it instead of # adding conditional flow if time == 0 prev_cell_states = self._read_from_tensor_array(prev_all_cell_states, time)[ :, :-1, : ] return self._apply_alignments_to_history( alignments, prev_cell_states, new_state ) def _new_next_cell_state( self, prev_all_cell_states, next_cell_state, new_cell_output, alignments, time ): """Helper method to recalculate new next_cell_state""" if isinstance(next_cell_state, tf.contrib.rnn.LSTMStateTuple): next_cell_state_c = self._new_hidden_state( prev_all_cell_states.c, next_cell_state.c, alignments, time ) next_cell_state_h = self._new_hidden_state( prev_all_cell_states.h, new_cell_output, alignments, time ) return tf.contrib.rnn.LSTMStateTuple(next_cell_state_c, next_cell_state_h) else: return self._new_hidden_state( prev_all_cell_states, alignments, new_cell_output, time ) @staticmethod def _all_cell_states(prev_all_cell_states, next_cell_state, time): """Helper method to recalculate all_cell_states tensor array""" if isinstance(next_cell_state, tf.contrib.rnn.LSTMStateTuple): return tf.contrib.rnn.LSTMStateTuple( prev_all_cell_states.c.write(time + 1, next_cell_state.c), prev_all_cell_states.h.write(time + 1, next_cell_state.h), ) else: return prev_all_cell_states.write(time + 1, next_cell_state) class ChronoBiasLayerNormBasicLSTMCell(tf.contrib.rnn.LayerNormBasicLSTMCell): """Custom LayerNormBasicLSTMCell that allows chrono initialization of gate biases. See super class for description. See https://arxiv.org/abs/1804.11188 for details about chrono initialization """ def __init__( self, num_units, forget_bias=1.0, input_bias=0.0, activation=tf.tanh, layer_norm=True, norm_gain=1.0, norm_shift=0.0, dropout_keep_prob=1.0, dropout_prob_seed=None, out_layer_size=None, reuse=None, ): """Initializes the basic LSTM cell Additional args: input_bias: float, The bias added to input gates. out_layer_size: (optional) integer, The number of units in the optional additional output layer. """ super(ChronoBiasLayerNormBasicLSTMCell, self).__init__( num_units, forget_bias=forget_bias, activation=activation, layer_norm=layer_norm, norm_gain=norm_gain, norm_shift=norm_shift, dropout_keep_prob=dropout_keep_prob, dropout_prob_seed=dropout_prob_seed, reuse=reuse, ) self._input_bias = input_bias self._out_layer_size = out_layer_size @property def output_size(self): return self._out_layer_size or self._num_units @property def state_size(self): return tf.contrib.rnn.LSTMStateTuple(self._num_units, self.output_size) @staticmethod def _dense_layer(args, layer_size): """Optional out projection layer""" proj_size = args.get_shape()[-1] dtype = args.dtype weights = tf.get_variable("kernel", [proj_size, layer_size], dtype=dtype) bias = tf.get_variable("bias", [layer_size], dtype=dtype) out = tf.nn.bias_add(tf.matmul(args, weights), bias) return out def call(self, inputs, state): """LSTM cell with layer normalization and recurrent dropout.""" c, h = state args = tf.concat([inputs, h], 1) concat = self._linear(args) dtype = args.dtype i, j, f, o = tf.split(value=concat, num_or_size_splits=4, axis=1) if self._layer_norm: i = self._norm(i, "input", dtype=dtype) j = self._norm(j, "transform", dtype=dtype) f = self._norm(f, "forget", dtype=dtype) o = self._norm(o, "output", dtype=dtype) g = self._activation(j) if (not isinstance(self._keep_prob, float)) or self._keep_prob < 1: g = tf.nn.dropout(g, self._keep_prob, seed=self._seed) new_c = c * tf.sigmoid(f + self._forget_bias) + g * tf.sigmoid( i + self._input_bias ) # added input_bias # do not do layer normalization on the new c, # because there are no trainable weights # if self._layer_norm: # new_c = self._norm(new_c, "state", dtype=dtype) new_h = self._activation(new_c) * tf.sigmoid(o) # added dropout to the hidden state h if (not isinstance(self._keep_prob, float)) or self._keep_prob < 1: new_h = tf.nn.dropout(new_h, self._keep_prob, seed=self._seed) # add postprocessing of the output if self._out_layer_size is not None: with tf.variable_scope("out_layer"): new_h = self._dense_layer(new_h, self._out_layer_size) new_state = tf.contrib.rnn.LSTMStateTuple(new_c, new_h) return new_h, new_state
Investors are searching to Tulum real estatefor the Yucatan Peninsula’s newest boom, having a special concentrate on Mexico land for purchase. Apart from investors’ interest, reports of the strong Mexican economy for 2011 will also be a great indicator. Lately Mexico’s economic success was showcase through the country’s President, Felipe Calderon, in the 15th Annual Latin American Conference using the Santander Group, among the region’s major banks. Throughout his message, Calderón stated that Mexico has returned on the growth path, having a strong economy, armored finance, in a position to withstand any exterior turbulence, and it is especially to benefit from global economic recovery. So how exactly does this affect purchase of land in Tulum? Mexico’s ongoing growth implies that large-scale foreign investment continuously flow in to the country, bring more infrastructure and growing business possibilities, especially in tourism. Tulum’s property boom is going to be directly associated with the prosperity of tourism in the region and also the ongoing rise in services and infrastructure that foreign investment has introduced. Combine this by having an worldwide airport terminal already evolving within the preparation stages. This results in great news for individuals who own land in Tulum. It was the precise message the President of Mexico stressed to worldwide entrepreneurs and investors within the Santander conference. Also, he emphasized that Mexico’s tremendous economic strength is supported by foreign reserves and financial sources available, totaling almost 200 billion dollars, allowing the nation to manage any challenge, and supply conditions for advancement and new development. Within this context, stated the nation is much better positioned than ever before to benefit from global economic recovery, which, although still only beginning, is clearly visible. Additionally to some dedication to bring economic recovery towards the average Mexican citizen and unemployed families, obama also stated that his government will improve conditions of legal certainty for individuals who live or purchase Mexico. For individuals thinking about purchasing Tulum property, there could not be much better news.
# -*- coding: utf-8 -*- ############################################################################################### # # MediaPortal for Dreambox OS # # Coded by MediaPortal Team (c) 2013-2017 # # This plugin is open source but it is NOT free software. # # This plugin may only be distributed to and executed on hardware which # is licensed by Dream Property GmbH. This includes commercial distribution. # In other words: # It's NOT allowed to distribute any parts of this plugin or its source code in ANY way # to hardware which is NOT licensed by Dream Property GmbH. # It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way # on hardware which is NOT licensed by Dream Property GmbH. # # This applies to the source code as a whole as well as to parts of it, unless # explicitely stated otherwise. # # If you want to use or modify the code or parts of it, # you have to keep OUR license and inform us about the modifications, but it may NOT be # commercially distributed other than under the conditions noted above. # # As an exception regarding execution on hardware, you are permitted to execute this plugin on VU+ hardware # which is licensed by satco europe GmbH, if the VTi image is used on that hardware. # # As an exception regarding modifcations, you are NOT permitted to remove # any copy protections implemented in this plugin or change them for means of disabling # or working around the copy protections, unless the change has been explicitly permitted # by the original authors. Also decompiling and modification of the closed source # parts is NOT permitted. # # Advertising with this plugin is NOT allowed. # For other uses, permission from the authors is necessary. # ############################################################################################### from Plugins.Extensions.MediaPortal.plugin import _ from Plugins.Extensions.MediaPortal.resources.imports import * from Plugins.Extensions.MediaPortal.resources.choiceboxext import ChoiceBoxExt myagent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0' BASE_NAME = "NaughtyAmerica.com" class naughtyamericaGenreScreen(MPScreen): def __init__(self, session): MPScreen.__init__(self, session, skin='MP_Plugin') self["actions"] = ActionMap(["MP_Actions"], { "ok" : self.keyOK, "0" : self.closeAll, "cancel" : self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "yellow" : self.keyChangeCats }, -1) self['title'] = Label(BASE_NAME) self['ContentTitle'] = Label("Genre:") self['F3'] = Label(_("Mode")) self.keyLocked = True self.suchString = '' self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.catmode = 'category' self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.genreliste = [] self.keyLocked = True url = "https://tour.naughtyamerica.com/fantasy/%s" % self.catmode getPage(url, agent=myagent).addCallback(self.genreData).addErrback(self.dataError) def genreData(self, data): parse = re.search('class="categories">(.*?)class="clear"', data, re.S) Cats = re.findall('href="(.*?fantasy\/(.*?))(?:\?nats.*?)".*?>(.*?)</a', parse.group(1), re.S) if Cats: for (Url, Title, TitleText) in Cats: if not "..." in TitleText: Title = TitleText else: Title = Title.replace('-porn-videos','').replace('-',' ').title() if not Title in ("VR Porn", "Virtual Reality"): Url = Url + "?page=" self.genreliste.append((decodeHtml(Title), Url, None)) self.genreliste.sort() self.genreliste.insert(0, ("Newest", 'https://tour.naughtyamerica.com/new-porn-videos?page=', None)) self.genreliste.insert(0, ("--- Search ---", "callSuchen", None)) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) self.ml.moveToIndex(0) self.keyLocked = False self.showInfos() def keyOK(self): if self.keyLocked: return if not config.mediaportal.premiumize_use.value: message = self.session.open(MessageBoxExt, _("%s only works with enabled MP premiumize.me option (MP Setup)!" % BASE_NAME), MessageBoxExt.TYPE_INFO, timeout=10) return Name = self['liste'].getCurrent()[0][0] if Name == "--- Search ---": self.suchen() else: Link = self['liste'].getCurrent()[0][1] self.session.open(naughtyamericaFilmScreen, Link, Name) def SuchenCallback(self, callback = None, entry = None): if callback is not None and len(callback): self.suchString = callback Name = "--- Search ---" Link = self.suchString.replace(' ', '+') self.session.open(naughtyamericaFilmScreen, Link, Name) def keyChangeCats(self): rangelist = [['Category', 'category'], ['Who', 'who'], ['Where', 'where']] self.session.openWithCallback(self.ChangeCats, ChoiceBoxExt, title=_('Select Action'), list = rangelist) def ChangeCats(self, result): if result: self.catmode = result[1] self.layoutFinished() class naughtyamericaFilmScreen(MPScreen, ThumbsHelper): def __init__(self, session, Link, Name): self.Link = Link self.Name = Name MPScreen.__init__(self, session, skin='MP_PluginDescr') ThumbsHelper.__init__(self) self["actions"] = ActionMap(["MP_Actions"], { "ok" : self.keyOK, "0" : self.closeAll, "cancel" : self.keyCancel, "5" : self.keyShowThumb, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "nextBouquet" : self.keyPageUp, "prevBouquet" : self.keyPageDown, "green" : self.keyPageNumber }, -1) self['title'] = Label(BASE_NAME) self['ContentTitle'] = Label("Genre: %s" % self.Name) self['F2'] = Label(_("Page")) self['Page'] = Label(_("Page:")) self.keyLocked = True self.page = 1 self.lastpage = 1 self.filmliste = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.keyLocked = True self['name'].setText(_('Please wait...')) self.filmliste = [] if re.match(".*?Search", self.Name): url = "https://tour.naughtyamerica.com/search?term=%s&page=%s" % (self.Link, str(self.page)) else: url = "%s%s" % (self.Link, str(self.page)) getPage(url, agent=myagent).addCallback(self.loadData).addErrback(self.dataError) def loadData(self, data): self.getLastPage(data, 'class="pagination(.*?)</div>', '.*(?:\/|>)(\d+)') parse = re.search('content-main(.*?)(id="fantasySideBar"|id="wrapper-footer)', data, re.S) Movies = re.findall('a\shref="(https?://tour.naughtyamerica.com.*?)"\stitle="(.*?)".*?img.*?src="(.*?)"(.*?)class="entry-date">(.*?)</p', parse.group(1), re.S) if Movies: for (Url, Title, Image, VrCheck, Date) in Movies: if not "vr-icon" in VrCheck: if Image.startswith('//'): Image = 'http:' + Image self.filmliste.append((decodeHtml(Title), Url, Image, Date)) if len(self.filmliste) == 0: self.filmliste.append((_('No videos found!'), '', None, '')) self.ml.setList(map(self._defaultlistleft, self.filmliste)) self.ml.moveToIndex(0) self.keyLocked = False self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, self.page, int(self.lastpage), mode=1) self.showInfos() def showInfos(self): title = self['liste'].getCurrent()[0][0] pic = self['liste'].getCurrent()[0][2] date = self['liste'].getCurrent()[0][3] self['handlung'].setText(date) self['name'].setText(title) CoverHelper(self['coverArt']).getCover(pic) def keyOK(self): if self.keyLocked: return Link = self['liste'].getCurrent()[0][1] get_stream_link(self.session).check_link(Link, self.play) def play(self, url): title = self['liste'].getCurrent()[0][0] self.session.open(SimplePlayer, [(title, url.replace('%2F','%252F').replace('%3D','%253D').replace('%2B','%252B'))], showPlaylist=False, ltype='naughtyamerica')
You are visiting the placeholder page for Shawn Carter. This page is here because someone used our placeholder utility to look for Shawn Carter. We created this page automatically in hopes Shawn Carter would find it. If you are not Shawn Carter, but are an alumni of Gallatin High School Gallatin, TN, register on this site for free now.
import numpy as np import numpy.linalg as la import scipy.linalg as sla import timeit # This file provides many different methods for solving Ax=b # Economy QR Decomposition def qrsolve(a,b): q, r = la.qr(a,mode='reduced') return np.dot(np.dot(la.inv(r),la.inv(q)),b) # LU (non-pivot) Decomposition def lusolve(a,b): l, u = sla.lu(a,permute_l=True) return np.dot(np.dot(la.inv(u),la.inv(l)),b) # Regular inverse solve def solve(a,b): return np.dot(la.inv(a),b) # Cholesky Factorization Solution (Only works for Singular Positive Definite Matrices) def chsolve(a,b): u = sla.cholesky(a) return np.dot(np.dot(u,u.T),b) # Polar Factorization Solve def psolve(a,b): u,p = sla.polar(a); return np.dot(np.dot(la.inv(p),la.inv(u)),b) # Built in solver def pro(a,b): return la.solve(a,b) a = np.random.random((10000,10000)) b = np.random.random((10000,1)) x = pro(a,b) ax = np.dot(a,x) sub = ax - b qr = timeit.timeit("qrsolve(a,b)", setup="from __main__ import qrsolve, a, b",number=1) lu = timeit.timeit("lusolve(a,b)", setup="from __main__ import lusolve, a, b",number=1) reg = timeit.timeit("solve(a,b)", setup="from __main__ import solve, a, b",number=1) pro = timeit.timeit("pro(a,b)",setup="from __main__ import pro, a, b",number=1) pol = timeit.timeit("psolve(a,b)",setup="from __main__ import psolve, a, b",number=1) print("QR:",qr,"\nLU:",lu,"\nReg:",reg,"\nProgrammed:",pro,"\nPolar:",pol) count = 0 for i in sub: if i > 1e-4: print(i) count += 1 print(count)
Developed by dietitians, our meals meet specific targets for calories, protein, sodium, fat, and cholesterol. Bateman Classics meals can be prepared quickly, in both the conventional oven or the microwave. Whether you are serving five or a crowd of 500, our Classic meals give you flexibility to meet your daily needs.
# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals from .completion import Person def test_person_suggests_on_all_variants_of_name(write_client): Person.init(using=write_client) Person(name="Honza Král", popularity=42).save(refresh=True) s = Person.search().suggest("t", "kra", completion={"field": "suggest"}) response = s.execute() opts = response.suggest.t[0].options assert 1 == len(opts) assert opts[0]._score == 42 assert opts[0]._source.name == "Honza Král"
The server pwclassic is a true classic of the golden era Perfect World, you will find yourself in that old-good atmosphere, where there are no thousands of grueling daily quests, rebirths and new races, and constant updating of gear. © GTOP100.COM - 2019. Trademarks are the property of their respective owners, all sites listed are added by users.
# Copyright 2015 Google # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re import csv import sys script, logfile, outfile = sys.argv IP_RE = re.compile('(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') TIMESTAMP_RE = re.compile('\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}') STATUS_RE = re.compile('Completed (\d{3})') RESOURCE_RE = re.compile('Started (GET|POST|PUT|DELETE) "(.*?)"') PROCESSING_TIME_RE = re.compile(' (\d+)ms') records = [] with open(logfile, 'r') as f: for line in f: line = line.strip() if not line: continue d = {} d['body'] = line match = IP_RE.search(line) d['ip_address'] = match.group() if match else None match = TIMESTAMP_RE.search(line) d['timestamp'] = match.group() if match else None match = STATUS_RE.search(line) d['status'] = match.group() if match else None match = PROCESSING_TIME_RE.search(line) d['processing_time'] = match.group() if match else None match = RESOURCE_RE.search(line) d['verb'] = match.groups()[0] if match else None d['resource'] = match.groups()[1] if match else None records.append(d) with open(outfile, 'wb') as csvfile: logwriter = csv.writer(csvfile) logwriter.writerow(records[0].keys()) for record in records: logwriter.writerow(record.values())
Offering our commercial painting and decorating services across Glasgow and surrounding areas, including Barrhead, Newton Mearns, Bearsden, Milngavie, Cambuslang, East Kilbride and Hamilton. Whether you're looking for a painter and decorator to paint a large office space, a restaurant/bar or hundreds of rooms in your hotel, HW UK Services Ltd are on hand. Call our painters and decorators in Glasgow on 07518 884171 or send us a message using the contact form. Breathing new light into a business with a fresh lick of paint can liven up the place for both colleagues and customers alike, which is why HW UK Services Ltd are on hand to professionally advise on the design/colours used in your space and complete the full job from start to finish. If you are the owner of any of the above and would like to get a full quote for your next painting and decorating project, please call us on 07518 884171. Why hire us to paint your commercial premises in Glasgow? We are great at what we do. Our motto is Do it once. Do it right. Use the contact details below to speak with our painters and decorators and arrange a free quote. If you're ready to get started on your next paint job, no matter how big or small your business is, please call us on 07518 884171, email us at hwukservices@gmail.com or fill in our contact form. We'll look forward to hearing from you soon.
from numpy.testing import TestCase, run_module_suite from numpy.testing import assert_equal, assert_almost_equal # WARNING: numpy also has an fft object from aubio import fvec, fft, cvec from numpy import array, shape from math import pi class aubio_fft_test_case(TestCase): def test_members(self): f = fft() assert_equal (f.win_s, 1024) def test_output_dimensions(self): """ check the dimensions of output """ win_s = 1024 timegrain = fvec(win_s) f = fft(win_s) fftgrain = f (timegrain) assert_equal (fftgrain.norm, 0) assert_equal (shape(fftgrain.norm), (win_s/2+1,)) assert_equal (fftgrain.phas, 0) assert_equal (shape(fftgrain.phas), (win_s/2+1,)) def test_zeros(self): """ check the transform of zeros """ win_s = 512 timegrain = fvec(win_s) f = fft(win_s) fftgrain = f(timegrain) assert_equal ( fftgrain.norm == 0, True ) assert_equal ( fftgrain.phas == 0, True ) def test_impulse(self): """ check the transform of one impulse at a random place """ from random import random from math import floor win_s = 256 i = floor(random()*win_s) impulse = pi * random() f = fft(win_s) timegrain = fvec(win_s) timegrain[i] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_almost_equal ( fftgrain.norm, impulse, decimal = 6 ) assert_equal ( fftgrain.phas <= pi, True) assert_equal ( fftgrain.phas >= -pi, True) def test_impulse_negative(self): """ check the transform of one impulse at a random place """ from random import random from math import floor win_s = 256 i = 0 impulse = -10. f = fft(win_s) timegrain = fvec(win_s) timegrain[i] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_almost_equal ( fftgrain.norm, abs(impulse), decimal = 6 ) if impulse < 0: # phase can be pi or -pi, as it is not unwrapped assert_almost_equal ( abs(fftgrain.phas[1:-1]) , pi, decimal = 6 ) assert_almost_equal ( fftgrain.phas[0], pi, decimal = 6) assert_almost_equal ( fftgrain.phas[-1], pi, decimal = 6) else: assert_equal ( fftgrain.phas[1:-1] == 0, True) assert_equal ( fftgrain.phas[0] == 0, True) assert_equal ( fftgrain.phas[-1] == 0, True) # now check the resynthesis synthgrain = f.rdo ( fftgrain ) #self.plot_this ( fftgrain.phas.T ) assert_equal ( fftgrain.phas <= pi, True) assert_equal ( fftgrain.phas >= -pi, True) #self.plot_this ( synthgrain - timegrain ) assert_almost_equal ( synthgrain, timegrain, decimal = 6 ) def test_impulse_at_zero(self): """ check the transform of one impulse at a index 0 """ win_s = 1024 impulse = pi f = fft(win_s) timegrain = fvec(win_s) timegrain[0] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_equal ( fftgrain.phas[0], 0) assert_equal ( fftgrain.phas[1], 0) assert_almost_equal (fftgrain.norm[0], impulse, decimal = 6 ) def test_rdo_before_do(self): """ check running fft.rdo before fft.do works """ win_s = 1024 impulse = pi f = fft(win_s) fftgrain = cvec(win_s) t = f.rdo( fftgrain ) assert_equal ( t, 0 ) def plot_this(self, this): from pylab import plot, show plot ( this ) show () if __name__ == '__main__': from unittest import main main()
Internet addiction can be like any other addiction. It has a compulsive nature in which individuals start to have strong urges about hopping on the internet, checking their cell phones and using computers for non-essential purposes. With all the apps and social media sites immediately accessible on smartphones, this contributes to the pervasive nature of internet addiction. Researcher, Dr. Bert Wilt, found that there are often co-existing disorders with those that are addicted to the internet and cell phone apps. These conditions mostly include depression and anxiety disorders. Additionally, his study compared 25 individuals with internet addiction to 25 individuals that were addicted to alcohol. Surprisingly, the results show stark similarities. Internet addiction affects individuals who have high rates of comorbitity (additional/co-existing disorders)3 accompanied by compulsive behaviors similar to an alcoholic who needs a drink. In a large-scale study at the University of Leeds, researchers found that people who spend large amounts of time on the internet are more likely to have symptoms of depression. In fact, researchers found the evidence that internet users can develop a compulsive internet habit where they start to replace the real-life relationships of real people for online social networking. This, as researchers found, can have a great impact on ones mental health. However, developing proper skills by becoming aware of these intrusive thoughts to hop onto the internet or an app, and responding appropriately, is a successful method used in The Truth Of Addiction Program. 1. Nadeau, L. (2008). A new addiction: Internet junkies. Université de Montréal's Department of Psychology. 2-3. Wilt, B.(2014). Internet addicts often suffer from additional disorders. Ruhr-Universitaet-Bochum.
from . import InstanceResource, ListResource from .media import MediaList from .util import normalize_dates, parse_date class Message(InstanceResource): """ A Message instance. .. attribute:: sid A 34 character string that uniquely identifies this resource. .. attribute:: account_sid The unique id of the Account that sent or received this message. .. attribute:: from The phone number that initiated this message in E.164 format. For incoming messages, this will be the remote phone. For outgoing messages, this will be one of your Twilio phone numbers. .. attribute:: to The phone number that received the message in E.164 format. For incoming messages, this will be one of your Twilio phone numbers. For outgoing messages, this will be the remote phone. .. attribute:: date_created The date that this resource was created, given in RFC 2822 format. .. attribute:: date_updated The date that this resource was last updated, given in RFC 2822 format. .. attribute:: date_sent The date that the SMS was sent, given in RFC 2822 format. .. attribute:: body The text body of the message, as a unicode string. .. attribute:: num_segments The number of SMS messages used to deliver the body specified. .. attribute:: num_media The number of media that are associated with the message. If num_media is 0, then the media and image subresource will not contain any images. .. attribute:: status The status of this message. Either queued, sending, sent,failed, or received. .. attribute:: direction The direction of this message. inbound for incoming messages, outbound-api for messages initiated via the REST API, outbound-call for messages initiated during a call or outbound-reply for messages initiated in response to an incoming message. .. attribute:: price The amount billed for the message, in the currency associated with the account. .. attribute:: price_unit The currency in which price is measured, in ISO 4127 format (e.g. USD,EUR, JPY). .. attribute:: api_version The version of the Twilio API used to process the message. .. attribute:: uri The URI for this resource, relative to https://api.twilio.com """ subresources = [MediaList] def delete(self): """Delete this Message record from Twilio.""" return self.parent.delete(self.sid) def redact(self): """Redact this Message's `body` field from Twilio while preserving the record itself and related metadata. """ return self.parent.redact(self.sid) class Messages(ListResource): name = "Messages" key = "messages" instance = Message def create(self, from_=None, **kwargs): """ Create and send a Message. :param str to: The destination phone number. :param str `from_`: The phone number sending this message (must be a verified Twilio number) :param str body: The message you want to send, limited to 1600 characters. :param list media_url: A list of URLs of images to include in the message. :param status_callback: A URL that Twilio will POST to when your message is processed. :param str application_sid: The 34 character sid of the application Twilio should use to handle this message. """ kwargs["from"] = from_ return self.create_instance(kwargs) @normalize_dates def list(self, from_=None, before=None, after=None, date_sent=None, **kw): """ Returns a page of :class:`Message` resources as a list. For paging information see :class:`ListResource`. :param to: Only show messages to this phone number. :param from_: Only show messages from this phone number. :param date after: Only list messages sent after this date. :param date before: Only list message sent before this date. :param date date_sent: Only list message sent on this date. :param `from_`: Only show messages from this phone number. :param date after: Only list messages logged after this datetime :param date before: Only list messages logged before this datetime """ kw["From"] = from_ kw["DateSent<"] = before kw["DateSent>"] = after kw["DateSent"] = parse_date(date_sent) return self.get_instances(kw) def update(self, sid, **kwargs): """ Updates the message for the given sid :param sid: The sid of the message to update. """ return self.update_instance(sid, kwargs) def delete(self, sid): """Delete the specified Message record from Twilio.""" return self.delete_instance(sid) def redact(self, sid): """Redact the specified Message record's Body field.""" return self.update_instance(sid, {'Body': ''})
Q: What will happen during my first visit to Peterson Dental? A: After making an appointment to see Dr. Peterson, you’ll arrive to find a friendly and caring staff with many personal touches to keep you comfortable. You can enjoy magazines, television and coffee or bottled water in our lobby. We’ll take a few digital x-rays. In our comfortable patient rooms, you’ll have a seat in our massaging and heated dental chairs and you can watch television or listen to music while our hygienist cleans your teeth. Dr. Peterson will visit with you for your dental exam. At Peterson Dental, we believe in taking a comprehensive approach to your dental care. This includes a personalized treatment plan, oral cancer screening, periodontal exam, home care technique assessment and more. Then we’ll schedule your next visit with us. Our goal is to make your visit as comfortable and convenient as possible. If you have more questions about your first visit, or to schedule an appointment, please call us at (205) 699-1155. Q: How often should you visit the dentist? A: If your teeth and gums are healthy, you should visit the dentist every six months for a regular cleaning, x-rays and exam by the dentist. If you are having dental problems you may need an extra visit or two to get your teeth and gums back to health. A: Visiting the dentist regularly will not only keep your teeth and mouth healthy, it will also help keep the rest of your body healthy. Regular visits help prevent tooth decay, periodontal disease which can lead to tooth and bone loss, prevents bad breath and helps keep your teeth looking bright by removing surface stains. When you visit the dentist regularly, you can have a more attractive smile which can increase your self-confidence. So many of our patients now say they love to smile for pictures because they love how their teeth look. Q: My teeth feel fine. Do I still need to see a dentist? A: If you never changed the oil in your car what would happen? If you didn’t wash your clothes, how would they look? Going to the dentist helps to maintain your teeth. But the best part is your dentist can catch problems BEFORE they start hurting you. With so many advances in dentistry, you don’t have to settle for stained, chipped, missing or misshapen teeth. Restoring your smile may be more affordable than you expect. Take a small step in the right direction and simply visit the dentist for a cleaning. Little steps over a long period of time can restore you smile to camera ready before you know it! Q: I haven’t been to the dentist in a long time. What should I expect? A: If you haven’t been to the dentist in a long time, we may break up your cleaning into multiple visits to make you a little more comfortable. Sometimes it takes a little extra time to get your teeth and gums back to their healthiest. But don’t worry, we’ll take the time to explain things to you and help you get your smile back! Take a look at some of our Patient Testimonials to hear firsthand about our successful smile restorations. Q: What should I look for when choosing the right dentist for me? A: Choosing a dentist who “clicks” with you and your family is important, so you may wish to consider several dentists before making your final decision. All dentists in the State of Alabama attend rigorous schooling and must pass difficult testing before being allowed to practice in our state. Was this visit convenient for me and my family? Did the dentist and staff take time to explain things to me? Did I feel comfortable and relaxed in the office? Am I better equipped and motivated to take care of my teeth and gums?
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import random def estimate_density(DATA_PATH, feature_size): """sample 10 times of a size of 1000 for estimating the density of the sparse dataset""" if not os.path.exists(DATA_PATH): raise Exception("Data is not there!") density = [] P = 0.01 for _ in range(10): num_non_zero = 0 num_sample = 0 with open(DATA_PATH) as f: for line in f: if (random.random() < P): num_non_zero += len(line.split(" ")) - 1 num_sample += 1 density.append(num_non_zero * 1.0 / (feature_size * num_sample)) return sum(density) / len(density)
Craving Soup Tonight? Join the Club. Literally. Like soup from a stone, each of those neighborhood flavors goes into the pot and the result is greater than the sum of its parts. Stand by the fireplace and eat a rich, hearty dish that’s all about improvisation. The 1976 Farm Winery Act was designed to entice grape growing for wine in the Empire State. Today the impacts of these laws can practically be seen from space. Chef Telepan grew up eating his own mom’s latkes. But here’s the thing: He’s Catholic. No cranberries across the pond, but a bird, plenty of butter and pie spiked with Scotch instead of Bourbon.
''' This plugin implements a wrapper around the BAP plugin cwe_checker, which checks ELF executables for several CWEs (Common Weakness Enumeration). Please refer to cwe_checkers implementation for further information. Please note that these checks are heuristics and the checks are static. This means that there are definitely false positives and false negatives. The objective of this plugin is to find potentially interesting binaries that deserve a deep manual analysis or intensive fuzzing. As the plugin depends on BAP, it depends on BAP's lifting capabilities. Currently, BAP lifts to the following architectures: - Intel x86 (32 and 64 bits) - ARM - PowerPC - Mips ''' import json import logging from collections import defaultdict from common_helper_process import execute_shell_command_get_return_code from helperFunctions.docker import run_docker_container from analysis.PluginBase import AnalysisBasePlugin TIMEOUT_IN_SECONDS = 600 # 10 minutes DOCKER_IMAGE = 'fkiecad/cwe_checker:latest' class AnalysisPlugin(AnalysisBasePlugin): ''' This class implements the FACT Python wrapper for the BAP plugin cwe_checker. ''' NAME = 'cwe_checker' DESCRIPTION = 'This plugin checks ELF binaries for several CWEs (Common Weakness Enumeration) like'\ 'CWE-243 (Creation of chroot Jail Without Changing Working Directory) and'\ 'CWE-676 (Use of Potentially Dangerous Function).'\ 'Due to the nature of static analysis, this plugin may run for a long time.' DEPENDENCIES = ['cpu_architecture', 'file_type'] VERSION = '0.5.0' MIME_WHITELIST = ['application/x-executable', 'application/x-object', 'application/x-sharedlib'] SUPPORTED_ARCHS = ['arm', 'x86', 'x64', 'mips', 'ppc'] def __init__(self, plugin_administrator, config=None, recursive=True, timeout=TIMEOUT_IN_SECONDS + 30): self.config = config if not self._check_docker_installed(): raise RuntimeError('Docker is not installed.') self._log_version_string() super().__init__(plugin_administrator, config=config, plugin_path=__file__, recursive=recursive, timeout=timeout) @staticmethod def _check_docker_installed(): _, return_code = execute_shell_command_get_return_code('docker -v') return return_code == 0 def _log_version_string(self): output = self._run_cwe_checker_to_get_version_string() if output is None: logging.error('Could not get version string from cwe_checker.') else: logging.info('Version is {}'.format(str(output))) return output @staticmethod def _run_cwe_checker_to_get_version_string(): return run_docker_container(DOCKER_IMAGE, timeout=60, command='--version') @staticmethod def _run_cwe_checker_in_docker(file_object): return run_docker_container(DOCKER_IMAGE, timeout=TIMEOUT_IN_SECONDS, command='/input --json --quiet', mount=('/input', file_object.file_path)) @staticmethod def _parse_cwe_checker_output(output): tmp = defaultdict(list) j_doc = json.loads(output) for warning in j_doc: tmp[warning['name']] = tmp[warning['name']] + [warning, ] res = {} for key, values in tmp.items(): tmp_list = [] plugin_version = None for hit in values: tmp_list.append(hit['description']) if not plugin_version: plugin_version = hit['version'] res[key] = {'plugin_version': plugin_version, 'warnings': tmp_list} return res def _is_supported_arch(self, file_object): arch_type = file_object.processed_analysis['file_type']['full'].lower() return any(supported_arch in arch_type for supported_arch in self.SUPPORTED_ARCHS) def _do_full_analysis(self, file_object): output = self._run_cwe_checker_in_docker(file_object) if output is not None: try: cwe_messages = self._parse_cwe_checker_output(output) file_object.processed_analysis[self.NAME] = {'full': cwe_messages, 'summary': list(cwe_messages.keys())} except json.JSONDecodeError: logging.error('cwe_checker execution failed: {}\nUID: {}'.format(output, file_object.uid)) file_object.processed_analysis[self.NAME] = {'summary': []} else: logging.error('Timeout or error during cwe_checker execution.\nUID: {}'.format(file_object.uid)) file_object.processed_analysis[self.NAME] = {'summary': []} return file_object def process_object(self, file_object): ''' This function handles only ELF executables. Otherwise it returns an empty dictionary. It calls the cwe_checker docker container. ''' if not self._is_supported_arch(file_object): logging.debug('{}\'s arch is not supported ({})'.format( file_object.file_path, file_object.processed_analysis['cpu_architecture']['summary'])) file_object.processed_analysis[self.NAME] = {'summary': []} else: file_object = self._do_full_analysis(file_object) return file_object
Jazzhandz focuses primarily on theatre performance including singing, acting and dancing. Using music and scenes from new musicals such as ‘Wicked’ and ‘Matilda’ and classics such as ‘Fame’ and ‘The Sound of music’, the children get an all round experience of musical theatre. There will be a showcase of the children’s work and talent at the end of the school year where every child gets their chance to shine and show off those Jazz Hands!!!
import sys import cProfile def maxProductPaths(nXm_matrix): """ Returns the `maximum` path `product` from the top left to the bottom right by moving right and down in a 2D array of size n x m. """ hight = len(nXm_matrix) width = len(nXm_matrix[0]) noop = 1 grid = {} # create a grid system dictionary using key format of (h,w) for h in range(hight): for w in range(width): grid[(h, w)] = nXm_matrix[h][w] # start from bottom-right and move your way up for h in reversed(range(hight)): for w in reversed(range(width)): bottom = grid.get((h + 1, w), noop) right = grid.get((h, w + 1), noop) if not all(x == noop for x in (bottom, right)): grid[(h, w)] *= max(bottom, right) # print a matrix where (0,0) has the total max `product` number for h in range(hight): row = [str(grid[(h,w)]).rjust(5) for w in range(width)] print '[{}]'.format(','.join(row)) # to traverse the the minimal product path, start from (0,0) and # work your way down. (take-home exercise) count = grid[(0,0)] return count def run_test(): """ Test function. """ d2_matrix = [ [1,6,1], [3,7,2], [5,0,8], [1,6,1], ] count = maxProductPaths(d2_matrix) print "---------------------" print "Maximum Path Product = {}\n".format(count) if __name__ == "__main__": """ Run the code and profile it. """ cProfile.run('run_test()')
Who'd be a martial artist, eh? Take Billy and Jimmy Lee for instance. You might have thought that their unrivalled mastery of combat techniques would assure them a trouble-free life, but it's just not so. Perhaps their run of bad luck has something to do with Marion, their mutual girlfriend (an arrangement which perhaps should not be discussed in polite company). First of all, the girl was kidnapped by Mr Big, until the brothers Lee decided to rescue her and destroy the crime empire. Being a bit miffed at this, the evil crime boss shot Marion, leaving the Lee chaps to avenge her death and find someone capable of resurrecting her. In this, the third of the famous Double Dragon series of multi-level scrolling beat 'em ups, Marion has gone and got herself kidnapped again. This time, Billy and Jimmy have opted for the easy route - they're going to pay the ransom. The trouble is, the ransom just happens to be the fabled Rosetta Stones mystical artefacts with great powers! Now the duo once again face many dangers as they trot around the globe in their search for the stones. It really does say a lot for computer dating agencies doesn't it? Where did the programmers of this game go wrong? It's not the graphics - they're great, with loads of detailed and colourful sprites ninja-ing around thanks to the extra graphics chips contained in the cart. It's a shame they didn't see fit to do the same thing with the sound, though, because that's particularly weedy. Still, it's neither of these things that spoils the game. The control method is a little fiddly to start with, it must be said, but you do eventually get used to it. What does seal Double Dragon III's mediocrity is the stupid difficulty level. The enemies have a habit of just crowding around you and pummeling away. This makes the more complex moves almost impossible to perform and renders the simple moves completely useless. This makes the whole thing unnecessarily frustrating and it becomes boring in a very short space of time. Given more lives or slightly less intelligent enemies this could have been outstanding, but as it is Double Dragon II is a very annoying and frustrating game. Arrrgghhh! I've played this game a lot over the last few months and it's taken me ages just to get off the first level! I just can't understand why the game's designers chose to make Double Dragon III so difficult. A good challenge increases a game's longevity, but this is just so tough you're instantly put off. It's a shame really, because this game has the best graphics of the Nintendo Double Dragon series. It also beats its predecessors in terms of depth as well. If you're a die-hard Double Dragon nut who completed the first two chapters of the series with incredible ease, I recommend this without hesitation. However, it's just too difficult and frustrating for the average games player. OOOOOOOOO this game well didn't deserve that low score! I completed it within about 2 months which I thought was great value for cash. The only thing I will agree with is that the controls are very sluggish but once you adapt to them it's ok. I think the key to the game was to get the other characters on your side then they could act as psudo lives but honestly I didn't think it was that hard, I thought it was fine! Bimmy had a little weep when he got hold of a copy of this game, because they spelt his name wrong. And made his attacks play like poo. Try Turbo controllers, the ones with X and Y (and sometimes Z too) buttons. They make the game much easier than a normal controller. But the final queen will still be very difficult. Believe me this game is absolutely amazing and i compare it to guitar in the fact that their both hard to learn at first but once you get good your opened to a whole new world.
# Copyright 2016 Nexenta Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_serialization import jsonutils import requests from cinder import exception from cinder.utils import retry LOG = logging.getLogger(__name__) TIMEOUT = 60 class NexentaJSONProxy(object): retry_exc_tuple = (requests.exceptions.ConnectionError,) def __init__(self, scheme, host, port, path, user, password, auto=False, obj=None, method=None, session=None): if session: self.session = session else: self.session = requests.Session() self.session.auth = (user, password) self.session.headers.update({'Content-Type': 'application/json'}) self.scheme = scheme.lower() self.host = host self.port = port self.path = path self.user = user self.password = password self.auto = auto self.obj = obj self.method = method def __getattr__(self, name): if not self.obj: obj, method = name, None elif not self.method: obj, method = self.obj, name else: obj, method = '%s.%s' % (self.obj, self.method), name return NexentaJSONProxy(self.scheme, self.host, self.port, self.path, self.user, self.password, self.auto, obj, method, self.session) @property def url(self): return '%s://%s:%s%s' % (self.scheme, self.host, self.port, self.path) def __hash__(self): return self.url.__hash__() def __repr__(self): return 'NMS proxy: %s' % self.url @retry(retry_exc_tuple, retries=6) def __call__(self, *args): data = jsonutils.dumps({ 'object': self.obj, 'method': self.method, 'params': args }) LOG.debug('Sending JSON data: %s', data) r = self.session.post(self.url, data=data, timeout=TIMEOUT) response = r.json() LOG.debug('Got response: %s', response) if response.get('error') is not None: message = response['error'].get('message', '') raise exception.NexentaException(message) return response.get('result')
Is your current BATHROOM just a magnet for dirt, grease, grime and mold? Our Torrance showroom has HUNDREDS of tile and bathroom ideas – you’re certain to find something you LOVE – and our prices are affordable. We’ll do your powder room in DAYS — guaranteed.
from suds.client import Client as SoapClient from lxml import objectify #import logging #logging.basicConfig(level=logging.INFO) #logging.getLogger('suds.client').setLevel(logging.DEBUG) #print(client) class SMS(object): def __init__(self, app=None): self.sender = None self.soapservice = None self.user = None self.password = None self.template = '<xmsrequest>' + \ '<userid>{userid}</userid>' + \ '<password>{password}</password>' + \ '<action>smssend</action>' + '<body>' + \ '<type>otm</type>' + \ '<message originator="{sender}">{message}</message>' + \ '<recipient>{number}</recipient>' + \ '</xmsrequest>' if app is not None: self.init_app(app) def init_app(self, app): """Set up this instance for use with *app*, if no app was passed to the constructor. """ self.sender = app.config['SMS_SENDER'] self.soapservice = app.config['SMS_SOAP_SERVICE'] self.user = app.config['SMS_USERNAME'] self.password = app.config['SMS_PASSWORD'] def send(self, number, message): client = SoapClient(self.soapservice) response = client.service.XmsRequest( self.template.format(userid=self.user, password=self.password, sender=self.sender, message=message, number=number) ) try: response = objectify(response) return str(response), response.get('mobile', None) except: return 'error', response
£8,500 / €10,200 / US$11,475(i)The Euro and Dollar prices shown are approximate: the actual cost when paying with any currency other than GBP will be displayed on the Paypal checkout page. oil on canvas 16 x 20 in. (40.6 x 50.8 cm).
#!/usr/bin/env python # -*- coding: utf-8 -*- import requests import json from bs4 import BeautifulSoup import settings as uapi from spider163 import settings from spider163.utils import pysql from spider163.utils import pylog from terminaltables import AsciiTable class Music: def __init__(self): self.__headers = uapi.header self.__url = uapi.music_url self.session = settings.Session() def views_capture(self): urls = self.session.query(pysql.Playlist163).filter(pysql.Playlist163.over == 'N').limit(10) for url in urls: print("正在抓取歌单《{}》的歌曲……".format(url.title.encode("utf-8"))) self.view_capture(url.link) for url in urls: self.session.query(pysql.Playlist163).filter(pysql.Playlist163.link == url.link).update({'over': 'Y'}) self.session.commit() return urls.count() def view_capture(self, link): self.session.query(pysql.Playlist163).filter(pysql.Playlist163.link == link).update({'over': 'Y'}) url = self.__url + str(link) s = requests.session() try: s = BeautifulSoup(s.get(url, headers=self.__headers).content, "html.parser") musics = json.loads(s.text)['result']['tracks'] exist = 0 for music in musics: name = music['name'].encode('utf-8') author = music['artists'][0]['name'].encode('utf-8') if pysql.single("music163", "song_id", (music['id'])) == True: self.session.add(pysql.Music163(song_id=music['id'],song_name=name,author=author)) self.session.commit() exist = exist + 1 else: pylog.log.info('{} : {} {}'.format("重复抓取歌曲", name, "取消持久化")) print("歌单包含歌曲 {} 首,数据库 merge 歌曲 {} 首 \r\n".format(len(musics), exist)) except Exception: pylog.log.error('{} : {}'.format("抓取歌单页面存在问题", url)) def get_playlist(self, playlist_id): self.view_capture(int(playlist_id)) url = uapi.playlist_api.format(playlist_id) s = requests.session() s = BeautifulSoup(s.get(url, headers=self.__headers).content, "html.parser") playlist = json.loads(s.text)['result'] print("《" + playlist['name'].encode('utf-8') + "》") author = playlist['creator']['nickname'].encode('utf-8') pc = str(playlist['playCount']) sc = str(playlist['subscribedCount']) rc = str(playlist['shareCount']) cc = str(playlist['commentCount']) print("维护者:{} 播放:{} 关注:{} 分享:{} 评论:{}".format(author, pc, sc, rc, cc)) print("描述:{}".format(playlist['description'].encode('utf-8'))) print("标签:{}".format(",".join(playlist['tags']).encode("utf-8"))) tb = [["ID", "歌曲名字", "艺术家", "唱片"]] for music in playlist['tracks']: artists = [] for s in music['artists']: artists.append(s['name']) ms = music['name'].encode("utf-8") ar = ",".join(artists).encode("utf-8") ab = music['album']['name'].encode("utf-8") id = music['id'] tb.append([id, ms, ar, ab]) print(AsciiTable(tb).table) if __name__ == "__main__": tmp = Music() print tmp.views_capture()
At TradingBeasts, we do our best to provide accurate price predictions for a wide range of digital coins like Birake. We update our predictions daily working with historical data and using a combination of linear and polynomial regressions. No one can, however, predict prices of cryptocurrencies with total certainty, thus it is crucial to understand that the following BIR price predictions serve merely as a suggestion of possible price development and are not intended to be used as investment advice. The Birake price is forecasted to reach $0.01 by the beginning of May 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. The Birake price is forecasted to reach $N/A by the beginning of June 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. The Birake price is forecasted to reach $N/A by the beginning of July 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. The Birake price is forecasted to reach $N/A by the beginning of August 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. The Birake price is forecasted to reach $N/A by the beginning of September 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. The Birake price is forecasted to reach $N/A by the beginning of October 2019. The expected maximum price is $N/A, minimum price $N/A. The Birake price prediction for the end of the month is $N/A. Feel free to rate our Birake price predictions and their accuracy.
import os import re import sys import urllib2 import HTMLParser import xbmcgui import xbmcplugin from t0mm0.common.addon import Addon from t0mm0.common.addon import Addon as Addon2 addon = Addon('plugin.video.waldo', sys.argv) _1CH = Addon2('plugin.video.1channel', sys.argv) #BASE_Address = 'www.primewire.ag' BASE_Address = _1CH.get_setting('domain').replace('http://','') if (_1CH.get_setting("enableDomain")=='true') and (len(_1CH.get_setting("customDomain")) > 10): BASE_Address=_1CH.get_setting("customDomain").replace('http://','') if not BASE_Address.startswith('http'): BASE_URL = 'http://'+BASE_Address display_name = 'PrimeWire'#'1Channel' #Label that will be displayed to the user representing this index tag = 'PrimeWire'#'1Channel' #MUST be implemented. Unique 3 or 4 character string that will be used to #identify this index required_addons = [] #MUST be implemented. A list of strings indicating which addons are required to #be installed for this index to be used. #For example: required_addons = ['script.module.beautifulsoup', 'plugin.video.youtube'] #Currently, xbmc does not provide a way to require a specific version of an addon def get_settings_xml(): """ Must be defined. This method should return XML which describes any Waldo specific settings you would like for your plugin. You should make sure that the ``id`` starts with your tag followed by an underscore. For example: xml = '<setting id="ExI_priority" ' xml += 'type="number" label="Priority" default="100"/>\\n' return xml The settings category will be your plugin's :attr:`display_name`. Returns: A string containing XML which would be valid in ``resources/settings.xml`` or boolean False if none are required """ return False def get_browsing_options():#MUST be defined """ Returns a list of dicts. Each dict represents a different method of browsing this index. The following keys MUST be provided: 'name': Label to display to the user to represent this browsing method 'function': A function (defined in this index) which will be executed when the user selects this browsing method. This function should describe and add the list items to the directory, and assume flow control from this point on. Once the user indicates the content they would like to search the providers for (usually via selecting a list item), plugin.video.waldo should be called with the following parameters (again usually via listitem): mode = 'GetAllResults' type = either 'movie', 'tvshow', 'season', or 'episode' title = The title string to look for year = The release year of the desired movie, or premiere date of the desired tv show. imdb = The imdb id of the movie or tvshow to find sources for tvdb = The tvdb id of the movie or tvshow to find sources for season = The season number for which to return results. If season is supplied, but not episode, all results for that season should be returned episode: The episode number for which to return results """ option_1 = {'name': 'Tv Shows', 'function': 'BrowseListMenu', 'kwargs': {'section': 'tv'}} option_2 = {'name': 'Movies', 'function': 'BrowseListMenu', 'kwargs': {'section': 'movies'}} return [option_1, option_2] def callback(params): """ MUST be implemented. This method will be called when the user selects a listitem you created. It will be passed a dict of parameters you passed to the listitem's url. For example, the following listitem url: plugin://plugin.video.waldo/?mode=main&section=tv&api_key=1234 Will call this function with: {'mode':'main', 'section':'tv', 'api_key':'1234'} """ try: addon.log('%s was called with the following parameters: %s' % (params.get('receiver', ''), params)) except: pass sort_by = params.get('sort', None) section = params.get('section') if sort_by: GetFilteredResults(section, sort=sort_by) def BrowseListMenu(section): #This must match the 'function' key of an option from get_browsing_options addon.add_directory({'section': section, 'sort': 'featured'}, {'title': 'Featured'}, img=art('featured.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'views'}, {'title': 'Most Popular'}, img=art('most_popular.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'ratings'}, {'title': 'Highly rated'}, img=art('highly_rated.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'release'}, {'title': 'Date released'}, img=art('date_released.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'date'}, {'title': 'Date added'}, img=art('date_added.png'), fanart=art('fanart.png')) addon.end_of_directory() def art(filename): adn = Addon('plugin.video.1channel', sys.argv) THEME_LIST = ['mikey1234', 'Glossy_Black', 'PrimeWire'] THEME = THEME_LIST[int(adn.get_setting('theme'))] THEME_PATH = os.path.join(adn.get_path(), 'art', 'themes', THEME) img = os.path.join(THEME_PATH, filename) return img def GetFilteredResults(section=None, genre=None, letter=None, sort='alphabet', page=None): #3000 try: addon.log('Filtered results for Section: %s Genre: %s Letter: %s Sort: %s Page: %s' % (section, genre, letter, sort, page)) except: pass pageurl = BASE_URL + '/?' if section == 'tv': pageurl += 'tv' if genre: pageurl += '&genre=' + genre if letter: pageurl += '&letter=' + letter if sort: pageurl += '&sort=' + sort if page: pageurl += '&page=%s' % page if page: page = int(page) + 1 else: page = 2 html = GetURL(pageurl) r = re.search('number_movies_result">([0-9,]+)', html) if r: total = int(r.group(1).replace(',', '')) else: total = 0 total_pages = total / 24 total = min(total, 24) r = 'class="index_item.+?href="(.+?)" title="Watch (.+?)"?\(?([0-9]{4})?\)?"?>.+?src="(.+?)"' regex = re.finditer(r, html, re.DOTALL) resurls = [] for s in regex: resurl, title, year, thumb = s.groups() if resurl not in resurls: resurls.append(resurl) li_title = '%s (%s)' % (title, year) li = xbmcgui.ListItem(li_title, iconImage=thumb, thumbnailImage=thumb) if section == 'tv': section = 'tvshow' else: section = 'movie' queries = {'waldo_mode': 'GetAllResults', 'title': title, 'vid_type': section} li_url = addon.build_plugin_url(queries) xbmcplugin.addDirectoryItem(int(sys.argv[1]), li_url, li, isFolder=True, totalItems=total) if html.find('> >> <') > -1: label = 'Skip to Page...' command = addon.build_plugin_url( {'mode': 'PageSelect', 'pages': total_pages, 'section': section, 'genre': genre, 'letter': letter, 'sort': sort}) command = 'RunPlugin(%s)' % command cm = [(label, command)] meta = {'title': 'Next Page >>'} addon.add_directory( {'mode': 'CallModule', 'receiver': 'PrimeWire', 'ind_path': os.path.dirname(__file__), 'section': section, 'genre': genre, 'letter': letter, 'sort': sort, 'page': page}, meta, cm, True, art('nextpage.png'), art('fanart.png'), is_folder=True) addon.end_of_directory() def GetURL(url, params=None, referrer=BASE_URL): try: addon.log('Fetching URL: %s' % url) except: pass USER_AGENT = 'User-Agent:Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.56' if params: req = urllib2.Request(url, params) else: req = urllib2.Request(url) req.add_header('User-Agent', USER_AGENT) req.add_header('Host', BASE_Address) #'www.primewire.ag' req.add_header('Referer', referrer) try: response = urllib2.urlopen(req, timeout=10) body = response.read() body = unicode(body, 'iso-8859-1') h = HTMLParser.HTMLParser() body = h.unescape(body) except Exception, e: try: addon.log('Failed to connect to %s: %s' % (url, e)) except: pass return '' return body.encode('utf-8')
ummm how do u know wat iron tasted like? u lick the stuff? JOIN THE SLYFOX HOMIE KINGDOM!!
"""List directory contents SYNOPSIS: ls [<REMOTE PATH>] ... DESCRIPTION: List the files in given remote directory path - If the given element is not an accessible directory, the payload automatically considers the path's basename as a regex pattern, it allows to list files which match a specific pattern only, for example: "ls /tmp/*.txt", will list only .txt files. - Ending the argument string with a path separator (for example, '/tmp/' instead of '/tmp') explicitly indicates that the given path is the exact directory location you want to list, so it disables the pattern feature mentionned above. NOTE: If the plugin receives multiple arguments, each one will be listed in the given order. WARNING: The 'ls' plugin gives permission informations about each listed file, in unix drwxrwxrwx mode. If the permission informations are not available, then the payload tries to provide basic permission informations in drwx mode, which indicates the file rights relative to the current user. EXAMPLES: > ls - List any element in the current directory > ls ~ - List any element in the user's home directory > ls .. /home - List the path above the current working directory - After that, list the '/home' directory. > ls D:\\*.ini - List any element in D:\\ whose names end with '.ini' MAINTAINERS: nil0x42 <http://goo.gl/kb2wf> Wannes Rombouts <https://github.com/wapiflapi> """ import sys from ui.color import colorize, decolorize from api import plugin from api import server from api import environ for path in plugin.argv[1:] or [environ['PWD']]: absolute_path = server.path.abspath(path) lister = server.payload.Payload("payload.php") lister['TARGET'] = absolute_path lister['SEPARATOR'] = "/" lister['PARSE'] = 1 if absolute_path == environ['HOME'] or path.endswith(environ['PATH_SEP']): lister['PARSE'] = 0 try: response = lister.send() except server.payload.PayloadError as e: if e.args[0] == 'nodir': sys.exit("cannot access %s: No such file or directory." % (path)) if e.args[0] == 'noright': sys.exit("cannot open %s: Permission denied." % (path)) if e.args[0] == 'nomatch': sys.exit("cannot find %s: No matching elements." % (path)) target, regex, lines = response[0], response[1], response[2] # if at least one owner/group is not '?', use unix-like formatter if any((x[2] + x[3]) != '??' for x in lines): rows_hdr = ["Mode", "Owner", "Group", "Size", "Last Modified", "Name"] rows = ([l[0], l[2], l[3], l[4], l[5], l[6]] for l in lines) # otherwise, use windows-like formatter else: rows_hdr = ["Mode", "Size", "Last Modified", "Name"] rows = ([x[1], x[4], x[5], x[6]] for x in lines) # format rows the right way rows = sorted(rows, key=(lambda elem: elem[-1])) rows.insert(0, rows_hdr) rows.insert(1, [("-" * len(elem)) for elem in rows_hdr]) # format and display output title header = "Listing: %s" % target if regex: header += " (matching r'%s')" % colorize("%White", regex) print("\n" + header + "\n" + ("=" * len(decolorize(header))) + "\n") widths = [max(map(len, col)) for col in zip(*rows)] for i, row in enumerate(rows): if i > 0: if row[0].startswith('d'): row[-1] = colorize("%BoldBlue", row[-1]) elif not row[0].startswith('-'): row[-1] = colorize("%BoldPink", row[-1]) print(" ".join((val.ljust(width) for val, width in zip(row, widths)))) print()
We are asking for one leader to register their entire unit. Step 1: Add the number of youth and adults attending and give a unit contact. Step 2: Click on the merit badges and register Scouts for the classes. Continue to click on the badge to add additional names. Step 3: Check out in the cart.
from django.conf.urls import url from . import views urlpatterns = [ url(r"^$", views.index_view, name="docs_home"), url(r"new/$", views.new_article_view, name="new_article"), url(r"^list/$", views.list_articles_view, name="list_articles"), url(r"(?P<article_slug>[\w-]+)/$", views.read_article_view, name="read_article"), url(r"(?P<article_slug>[\w-]+)/save$", views.save_view, name="save_article"), url(r"(?P<article_slug>[\w-]+)/save_revision$", views.save_history_view, name="save_history_article"), url(r"(?P<article_slug>[\w-]+)/publish$", views.publish_view, name="publish_article"), url(r"(?P<article_slug>[\w-]+)/unpublish$", views.unpublish_view, name="unpublish_article"), url(r"(?P<article_slug>[\w-]+)/history$", views.article_history_view, name="article_history"), url(r"(?P<article_slug>[\w-]+)/edit$", views.edit_article_view, name="edit_article"), url(r"(?P<article_slug>[\w-]+)/r/(?P<revision_id>\d+)$", views.read_article_view, name="read_article_revision") ]
Level 1 Forex Intro - Currency Trading ( evel1/currency-trading.aspx) The foreign exchange market (forex or FX for short) is one of the largest, most exciting, fastest-paced markets in the world. It seems to be easier to understand, compared to the stock market. Forex Walkthrough. Investopedia is important to keep up to date on these macroeconomic and geopolitical events. Despite this, it is important to forex what ikili opsiyon sinyalleri releases will affect the currencies you trade. Investopedia Forex Pdf. Double Exponential Moving Averages Explained Investopedia. Artical It really is essential to be told that there are hundreds or possibly of millions of robots for Contract for difference. The Forex Market Investopedia Forex Walkthrough Investopedia Forex Tutorials Investopedia Forex Trading:Click here and open the manual of the Plus500 software: Please ensure you fully understand the risks involved by reading our full risk warning. Fórum: Strategi Transaksi Forum Forex mt5 Pengenalan. Market di bidang forex sangat menghasilkan dan beresiko tinggi hanya mengambil keuntungan dengan perbandingan mata uang. Irama dari cara kerja mercado forex dalam banyak hal menentukan hasil dari transaksi mata uang yang di buat oleh peserta market forex nasabah nasabah brokers. Investopedia forex walkthrough pdf merge Forex Brokers Sempre que você dedica dinheiro à negociação, é importante levá-lo a sério. Ao entrar no mercado forex (FX) pela primeira vez, basicamente significa começar a partir do primeiro quadrado. Qualifizierte Ausländische Investoren Investopedia Forex July 27, 2017 Ausländische Investitionen Was ist Ausländische Investitionen Ausländische Investitionen umfassen Kapitalströme von einem Land zum anderen und gewähren umfangreiche Beteiligungen an inländischen Unternehmen und Vermögenswerten. The forex FX market has many similarities to the equity markets; however, there pdf some key differences. This article will show you those forex and help you walkthrough started in forex investopedia. There forex many forex brokers to choose from, walkthrough as in any other market. Here are some things forex look for:. Day Trading Strategies For Beginners by Justin Kuepper, FREE Forex Report - The 5 Things That Move The Currency Market (Contact Author | Biography) Filed Under: Active Trading, Technical Analysis When people use the term "day trading", they mean the act of buying and selling a stock within the same day. Forex is an advanced trading arena where a newbie has no way to compete. Do you even have a trade plan, know your time frame, determined your risk level? Do you even know what leverage is? Teken van die analise hier en die inherente risiko's van die huidige infrastruktuur tot die mark toetree nou Investopedia forex walkthrough pdf eintlik eie Bitcoins in enige Investopedia forex walkthrough pdf hoeveelheid is net raadsaam wat dit kan bekostig om, of die kennis om beide verkry en om Bitcoins te beveilig in groot hoeveelhede. Handel forex goed cara Aflaai beste gids tot die binêre opsies Investopedia toekoms handelaars mark MT4 forex handel opsies Investopedia forex walkthrough pdf forex Asiatiese sessie 'N keuse bied'. Forex Tutorial: The Forex Market. Forex is short pdf foreign exchangebut the actual asset class we are referring forex is currencies. Foreign exchange is forex act of changing one country's currency knowledge another country's currency for a variety investopedia reasons, usually for tourism or commerce. @@ & gt; Get investopedia forex explicação passo a passo pdf Forex Trading System site Forex Trading . 10/3/2016 · Die werklike jy stel behoort jou belegging persoonlikheid en jou vlak van kundigheid in 'n bate of klas, wat albei baie subjektief. Investopedia forex walkthrough pdf free Word of the Year Our Word of the Year choice serves as a symbol of each year’s most meaningful events and lookup trends. It is an opportunity for us to reflect on the language and ideas that represented each year. © Investopedia forex walkthrough pdf Binary Option | Investopedia forex walkthrough pdf Best binary options.
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-11 19:49 from __future__ import unicode_literals import django.contrib.gis.db.models.fields from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0010_auto_20170502_1511'), ] operations = [ migrations.CreateModel( name='dropoff_locations', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tripid', models.BigIntegerField(blank=True, default=0, null=True)), ('p_lat', models.FloatField(blank=True, default=0, null=True)), ('p_lng', models.FloatField(blank=True, default=0, null=True)), ('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)), ('trip', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='api.trips')), ], ), migrations.CreateModel( name='pickup_locations', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tripid', models.BigIntegerField(blank=True, default=0, null=True)), ('p_lat', models.FloatField(blank=True, default=0, null=True)), ('p_lng', models.FloatField(blank=True, default=0, null=True)), ('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)), ('trip', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='api.trips')), ], ), ]
Paulina Lakeshore Trail is a hiking trail in Deschutes County, Oregon. It is within Newberry National Volcanic Monument. It is 5.8 miles long and begins at 6,346 feet altitude. Traveling the entire trail is 7.7 miles with a total elevation gain of 994 feet. Near the trailhead there is parking. The Paulina Lake Lodge hotel, the Newberry Group Campground and Paulina Lake Campground camp sites, and the Paulina Lake Restaurant can be seen along the trail. There are also piers, a parking, and wetlands along the trail. One of Oregon's best trails, Paulina Lakeshore Trail is located near Newberry National Volcanic Monument, OR. Trails' printable online topo maps offer shaded and un-shaded reliefs, and aerial photos too. Use topographic maps to find elevation, print high resolution maps, save a PNG, or just learn the topography around Paulina Lakeshore Trail. You can also get free latitude and longitude coordinates from the topographical map and set your GPS. Premium members can download or print any topo and cover more terrain when you map your Paulina Lakeshore Trail route ahead of time.
# -*- coding: utf-8 -*- # # PySPED - Python libraries to deal with Brazil's SPED Project # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira at tauga.com.br> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # PySPED - Bibliotecas Python para o # SPED - Sistema Público de Escrituração Digital # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira arroba tauga.com.br> # # Este programa é um software livre: você pode redistribuir e/ou modificar # este programa sob os termos da licença GNU Affero General Public License, # publicada pela Free Software Foundation, em sua versão 3 ou, de acordo # com sua opção, qualquer versão posterior. # # Este programa é distribuido na esperança de que venha a ser útil, # porém SEM QUAISQUER GARANTIAS, nem mesmo a garantia implícita de # COMERCIABILIDADE ou ADEQUAÇÃO A UMA FINALIDADE ESPECÍFICA. Veja a # GNU Affero General Public License para mais detalhes. # # Você deve ter recebido uma cópia da GNU Affero General Public License # juntamente com este programa. Caso esse não seja o caso, acesse: # <http://www.gnu.org/licenses/> # from __future__ import (division, print_function, unicode_literals, absolute_import) from builtins import str from pysped.xml_sped import * from pysped.cte.leiaute import ESQUEMA_ATUAL_VERSAO_300 as ESQUEMA_ATUAL import os DIRNAME = os.path.dirname(__file__) class EmiOcc(XMLNFe): def __init__(self): super(EmiOcc, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF', tamanho=[ 2, 2] , raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.fone = TagInteiro(nome='fone', tamanho=[ 6, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<emiOcc>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.UF.xml xml += self.fone.xml xml += '</emiOcc>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CNPJ.xml = arquivo self.cInt.xml = arquivo self.IE.xml = arquivo self.UF.xml = arquivo self.fone.xml = arquivo xml = property(get_xml, set_xml) class Occ(XMLNFe): def __init__(self): super(Occ, self).__init__() self.serie = TagCaracter(nome='serie' , tamanho=[ 8, 8, 8], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOcc = TagInteiro(nome='nOcc' , tamanho=[ 1, 6], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dEmi = TagData(nome='dEmi', raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.emiOcc = EmiOcc() def get_xml(self): if not (self.nOcc.valor or self.dEmi.valor or self.emiOcc is not None): return '' xml = XMLNFe.get_xml(self) xml += '<occ>' xml += self.serie.xml xml += self.nOcc.xml xml += self.dEmi.xml xml += self.emiOcc.xml xml += '</occ>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.serie.xml = arquivo self.nOcc.xml = arquivo self.dEmi.xml = arquivo self.emiOcc.xml = arquivo xml = property(get_xml, set_xml) class Rodo(XMLNFe): def __init__(self): super(Rodo, self).__init__() self.RNTRC = TagCaracter(nome='RNTRC' , tamanho=[ 8, 8, 8], raiz='//rodo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.occ = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<rodo>' xml += self.RNTRC.xml for o in self.occ: xml += o.xml xml += '</rodo>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.RNTRC.xml = arquivo self.occ = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/rodo/occ', Occ, sigla_ns='cte') xml = property(get_xml, set_xml) class InfTotAP(XMLNFe): def __init__(self): super(InfTotAP, self).__init__() self.qTotProd = TagCaracter(nome='qTotProd' , tamanho=[ 1, 1, 1], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.uniAP = TagCaracter(nome='uniAP' , tamanho=[ 1, 4], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<infTotAP>' xml += self.qTotProd.xml xml += self.uniAP.xml xml += '</infTotAP>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.qTotProd.xml = arquivo self.uniAP.xml = arquivo xml = property(get_xml, set_xml) class Peri(XMLNFe): def __init__(self): super(Peri, self).__init__() self.nONU = TagCaracter(nome='nONU' , tamanho=[ 4, 4, 4], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.qTotEmb = TagCaracter(nome='qTotEmb' , tamanho=[ 1, 20], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.infTotAP = InfTotAP() def get_xml(self): if not (self.nONU.valor or self.qTotEmb.valor or self.infTotAP is not None): return '' xml = XMLNFe.get_xml(self) xml += '<peri>' xml += self.nONU.xml xml += self.qTotEmb.xml xml += self.infTotAP.xml xml += '</peri>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nONU.xml = arquivo self.qTotEmb.xml = arquivo self.infTotAP.xml = arquivo xml = property(get_xml, set_xml) class Tarifa(XMLNFe): def __init__(self): super(Tarifa, self).__init__() self.CL = TagCaracter(nome='CL' , tamanho=[ 1, 1, 1], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.cTar = TagCaracter(nome='cTar' , tamanho=[ 1, 4], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.vTar = TagDecimal(nome='vTar', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<tarifa>' xml += self.CL.xml xml += self.cTar.xml xml += self.vTar.xml xml += '</tarifa>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CL.xml = arquivo self.cTar.xml = arquivo self.vTar.xml = arquivo xml = property(get_xml, set_xml) class TagCInfManu(TagCaracter): def __init__(self, *args, **kwargs): super(TagCInfManu, self).__init__(*args, **kwargs) self.nome = 'cInfMan' self.tamanho = [2, 2] self.raiz = '//natCarga' class NatCarga(XMLNFe): def __init__(self): super(NatCarga, self).__init__() self.xDime = TagCaracter(nome='xDime' , tamanho=[ 5, 14], raiz='//natCarga', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInfManu = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<natCarga>' xml += self.xDime.xml for c in self.cInfManu: xml += c.xml xml += '</natCarga>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xDime.xml = arquivo self.cInfManu = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/natCarga/cInfMan', TagCInfManu, sigla_ns='cte') xml = property(get_xml, set_xml) class Aereo(XMLNFe): def __init__(self): super(Aereo, self).__init__() self.nMinu = TagInteiro(nome='nMin' , tamanho=[ 9, 9, 9], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOCA = TagInteiro(nome='nOCA' , tamanho=[ 11, 11, 11], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dPrevAereo = TagData(nome='dPrevAereo' , raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.natCarga = NatCarga() self.tarifa = Tarifa() self.peri = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<aereo>' xml += self.nMinu.xml xml += self.nOCA.xml xml += self.dPrevAereo.xml xml += self.natCarga.xml xml += self.tarifa.xml for p in self.peri: xml += p.xml xml += '</aereo>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nMinu.xml = arquivo self.nOCA.xml = arquivo self.dPrevAereo.xml = arquivo self.natCarga.xml = arquivo self.tarifa.xml = arquivo self.peri = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/peri', Peri, sigla_ns='cte') xml = property(get_xml, set_xml) class InfNFeAquav(XMLNFe): def __init__(self): super(InfNFeAquav, self).__init__() self.chave = TagCaracter(nome='chave', tamanho=[ 44, 44], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', obrigatorio=False) def get_xml(self): if not (self.chave.valor): return '' xml = XMLNFe.get_xml(self) xml += '<infNF>' xml += self.chave.xml xml += self.unidRat.xml xml += '</infNF>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.chave.xml = arquivo self.unidRat.xml = arquivo xml = property(get_xml, set_xml) class InfNFAquav(XMLNFe): def __init__(self): super(InfNFAquav, self).__init__() self.serie = TagCaracter(nome='serie', tamanho=[ 1, 3], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nDoc = TagCaracter(nome='nDoc', tamanho=[ 1, 20], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', obrigatorio=False) def get_xml(self): if not (self.serie.valor or self.nDoc.valor): return '' xml = XMLNFe.get_xml(self) xml += '<infNF>' xml += self.serie.xml xml += self.nDoc.xml xml += self.unidRat.xml xml += '</infNF>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.serie.xml = arquivo self.nDoc.xml = arquivo self.unidRat.xml = arquivo xml = property(get_xml, set_xml) class InfDocAquav(XMLNFe): def __init__(self): super(InfDocAquav, self).__init__() self.infNF = [] self.infNFe = [] def get_xml(self): if (len(self.infNF)==0 and len(self.infNFe)==0): return '' xml = XMLNFe.get_xml(self) xml += '<infDoc>' for inf in self.infNF: xml += inf.xml for infe in self.infNFe: xml += infe.xml xml += '</infDoc>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.infNF = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', InfNFAquav, sigla_ns='cte') self.infNFe = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', InfNFeAquav, sigla_ns='cte') xml = property(get_xml, set_xml) class Lacre(XMLNFe): def __init__(self): super(Lacre, self).__init__() self.nLacre = TagCaracter(nome='nLacre' , tamanho=[ 1, 20], raiz='//lacre', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.nLacre.valor): return '' xml = XMLNFe.get_xml(self) xml += '<lacre>' xml += self.nLacre.xml xml += '</lacre>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nLacre.xml = arquivo xml = property(get_xml, set_xml) class DetCont(XMLNFe): def __init__(self): super(DetCont, self).__init__() self.nCont = TagCaracter(nome='xBalsa' , tamanho=[ 1, 20], raiz='//detCont', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.lacre = [] ##Evitar conflito de nome com InfDoc self.infDoc = InfDocAquav() def get_xml(self): if not (self.nCont.valor): return '' xml = XMLNFe.get_xml(self) xml += '<detCont>' xml += self.nCont.xml for l in self.lacre: xml += l.xml xml += self.infDoc.xml xml += '</detCont>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nCont.xml = arquivo self.infDoc.xml = arquivo self.lacre = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/lacre', Lacre, sigla_ns='cte') xml = property(get_xml, set_xml) class Balsa(XMLNFe): def __init__(self): super(Balsa, self).__init__() self.xBalsa = TagCaracter(nome='xBalsa' , tamanho=[ 1, 60], raiz='//balsa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.xBalsa.valor): return '' xml = XMLNFe.get_xml(self) xml += '<balsa>' xml += self.xBalsa.xml xml += '</balsa>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xBalsa.xml = arquivo xml = property(get_xml, set_xml) class Aquav(XMLNFe): def __init__(self): super(Aquav, self).__init__() self.vPrest = TagDecimal(nome='vPrest', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vAFRMM = TagDecimal(nome='vAFRMM', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xNavio = TagCaracter(nome='xNavio' , tamanho=[1, 60], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.balsa = [] self.nViag = TagInteiro(nome='nViag', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.direc = TagCaracter(nome='direc', tamanho=[1, 1, 1], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.irin = TagCaracter(nome='irin', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.detCont = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<aquav>' xml += self.vPrest.xml xml += self.vAFRMM.xml xml += self.xNavio.xml for b in self.balsa: xml += b.xml for d in self.detCont: xml += d.xml xml += self.nViag.xml xml += self.direc.xml xml += self.irin.xml xml += '</aquav>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.vPrest.xml = arquivo self.vAFRMM.xml = arquivo self.xNavio.xml = arquivo self.nViag.xml = arquivo self.direc.xml = arquivo self.irin.xml = arquivo self.balsa = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/balsa', Balsa, sigla_ns='cte') self.detCont = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont', DetCont, sigla_ns='cte') xml = property(get_xml, set_xml) class EnderFerro(XMLNFe): def __init__(self): super(EnderFerro, self).__init__() self.xLgr = TagCaracter(nome='xLgr' , tamanho=[ 2, 255] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nro = TagCaracter(nome='nro' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xCpl = TagCaracter(nome='xCpl' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xBairro = TagCaracter(nome='xBairro', tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cMun = TagInteiro(nome='cMun' , tamanho=[ 7, 7, 7], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xMun = TagCaracter(nome='xMun' , tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CEP = TagCaracter(nome='CEP' , tamanho=[ 8, 8, 8], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF' , tamanho=[ 2, 2] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<enderFerro>' xml += self.xLgr.xml xml += self.nro.xml xml += self.xCpl.xml xml += self.xBairro.xml xml += self.cMun.xml xml += self.xMun.xml xml += self.CEP.xml xml += self.UF.xml xml += '</enderFerro>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xLgr.xml = arquivo self.nro.xml = arquivo self.xCpl.xml = arquivo self.xBairro.xml = arquivo self.cMun.xml = arquivo self.xMun.xml = arquivo self.CEP.xml = arquivo self.UF.xml = arquivo xml = property(get_xml, set_xml) class FerroEnv(XMLNFe): def __init__(self): super(FerroEnv, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xNome = TagCaracter(nome='xNome', tamanho=[ 2, 60] , raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.enderFerro = EnderFerro() def get_xml(self): if not (self.CNPJ.valor): return '' xml = XMLNFe.get_xml(self) xml += '<ferroEnv>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.xNome.xml xml += self.enderFerro.xml xml += '</ferroEnv>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CNPJ.xml = arquivo self.cInt.xml = arquivo self.IE.xml = arquivo self.xNome.xml = arquivo self.enderFerro.xml = arquivo xml = property(get_xml, set_xml) class TrafMut(XMLNFe): def __init__(self): super(TrafMut, self).__init__() self.respFat = TagInteiro(nome='respFat' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.ferrEmi = TagInteiro(nome='ferrEmi' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vFrete = TagDecimal(nome='vFrete', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.chCTeFerroOrigem = TagCaracter(nome='chCTeFerroOrigem' , tamanho=[ 44, 44], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.ferroEnv = [] def get_xml(self): if not (self.respFat.valor or self.ferrEmi.valor): return '' xml = XMLNFe.get_xml(self) xml += '<trafMut>' xml += self.respFat.xml xml += self.ferrEmi.xml xml += self.vFrete.xml xml += self.chCTeFerroOrigem.xml for f in self.ferroEnv: xml += f.xml xml += '</trafMut>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.respFat.xml = arquivo self.ferrEmi.xml = arquivo self.vFrete.xml = arquivo self.chCTeFerroOrigem.xml = arquivo self.ferroEnv = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/ferrov/trafMut/ferroEnv', FerroEnv, sigla_ns='cte') xml = property(get_xml, set_xml) class Ferrov(XMLNFe): def __init__(self): super(Ferrov, self).__init__() self.tpTraf = TagInteiro(nome='tpTraf', tamanho=[1, 1, 1], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.trafMut = TrafMut() self.fluxo = TagCaracter(nome='fluxo', tamanho=[ 1, 10], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<ferrov>' xml += self.tpTraf.xml xml += self.trafMut.xml xml += self.fluxo.xml xml += '</ferrov>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.tpTraf.xml = arquivo self.trafMut.xml = arquivo self.fluxo.xml = arquivo xml = property(get_xml, set_xml) class Duto(XMLNFe): def __init__(self): super(Duto, self).__init__() self.vTar = TagDecimal(nome='vTar', tamanho=[1, 9, 1], decimais=[0, 6, 6], raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dIni = TagData(nome='dIni', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dFim = TagData(nome='dFim', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<duto>' xml += self.vTar.xml xml += self.dIni.xml xml += self.dFim.xml xml += '</duto>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.vTar.xml = arquivo self.dIni.xml = arquivo self.dFim.xml = arquivo xml = property(get_xml, set_xml) class InfSeg(XMLNFe): def __init__(self): super(Seg, self).__init__() self.xSeg = TagCaracter(nome='xSeg', tamanho=[1, 30], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<infSeg>' xml += self.xSeg.xml xml += self.CNPJ.xml xml += '</infSeg>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xSeg.xml = arquivo self.CNPJ.xml = arquivo xml = property(get_xml, set_xml) class Seg(XMLNFe): def __init__(self): super(Seg, self).__init__() self.infSeg = InfSeg() self.nApol = TagCaracter(nome='nApol', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nAver = TagCaracter(nome='nAver', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.nApol.valor or self.infSeg is not None): return '' xml = XMLNFe.get_xml(self) xml += '<seg>' xml += self.infSeg.xml xml += self.nApol.xml xml += self.nAver.xml xml += '</seg>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.infSeg.xml = arquivo self.nApol.xml = arquivo self.nAver.xml = arquivo xml = property(get_xml, set_xml) class Multimodal(XMLNFe): def __init__(self): super(Multimodal, self).__init__() self.COTM = TagCaracter(nome='COTM', tamanho=[1, 20], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.indNegociavel = TagInteiro(nome='indNegociavel', tamanho=[1, 1, 1], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.seg = Seg() def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<multimodal>' xml += self.COTM.xml xml += self.indNegociavel.xml xml += self.seg.xml xml += '</multimodal>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.COTM.xml = arquivo self.indNegociavel.xml = arquivo self.seg.xml = arquivo xml = property(get_xml, set_xml)
Nigel is a very experienced director with extensive drama credits including: CASUALTY, HOLLYOAKS and BALLYKISSANGEL. His highly distinctive short films and installations have screened all over the world. Prizes include: the Gold Plaque at Chicago and Best of the Fest at Edinburgh. He also directs commercials and documentaries. He is currently developing a slate of feature films.
#!/usr/bin/env python # Implement next permutation, which rearranges numbers into the lexicographically next greater permutation of numbers. # If such arrangement is not possible, it must rearrange it as the lowest possible order (ie, sorted in ascending order). # The replacement must be in-place, do not allocate extra memory. # Here are some examples. Inputs are in the left-hand column and its corresponding outputs are in the right-hand column. # 1,2,3 → 1,3,2 # 3,2,1 → 1,2,3 # 1,1,5 → 1,5,1 # Brute force: find all permutation, then find the next greater permutation, complexity O(n!) # Essentially we need to find: # 1. An index i indicating the first difference between original permutation and current permutation # original[i] < current[i] # 2. current[i:] should be the smallest permutation possible # 3. index i should be the largest possible class Solution(object): def nextPermutation(self, nums): """ :type nums: List[int] :rtype: void Do not return anything, modify nums in-place instead. """ for i in range(len(nums) - 2, -1, -1): swapCandidates = [j for j in range(i + 1, len(nums)) if nums[j] > nums[i]] if len(swapCandidates) > 0: bestSwap = swapCandidates.sort(key=lambda j: nums[j])[0] candidates = sorted([nums[x] for x in range(i, len(nums)) if x != bestSwap]) return nums[:i] + candidates
In 2013, the Presidential IT Awards Committee selected 39 university and high school students from State Engineering University of Armenia (Polytechnic), Yerevan State University (YSU), American University of Armenia (AUA), Russian-Armenian (Slavonic) University (RAU), European Regional Academy (ERA), YSU Physics-Mathematical School, SEUA Basic Specialized College, Quant College, Anania Shirakatsy college of Yerevan, Foton college of Gyumri, Evrika college of Vanadzor, Mkhitar Sebastatsi Educomplex, Ayb High School. By the decision of the commission, this year there was no winner in the Grand Prix nomination. The Committee screened 191 nominees: 111 students and 80 high school students. To date 280 university and high school students have been awarded with the Presidential Educational Award in the IT sphere. In the contemporary world, human knowledge and talent are the most expensive commodities. Today, you represent talents of our nation, our nation’s wealth, the best part of our society. I have no doubt that in your person, the future, the bright future of Armenia is right here. I once again congratulate our award winners. I wish you success, cloudless road full of new achievements which will bring new victories to each of you and our entire nation,” President Serzh Sargsyan said during Awarding ceremony at Presidential House on October 16, 2013.
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Nalu(CMakePackage): """Nalu: a generalized unstructured massively parallel low Mach flow code designed to support a variety of energy applications of interest (most notably Wind ECP) built on the Sierra Toolkit and Trilinos solver Tpetra/Epetra stack """ homepage = "https://github.com/NaluCFD/Nalu" url = "https://github.com/NaluCFD/Nalu.git" version('master', git='https://github.com/NaluCFD/Nalu.git', branch='master') # Currently Nalu only builds static libraries; To be fixed soon depends_on('yaml-cpp+pic~shared') depends_on('trilinos~shared+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+zlib+pnetcdf@master') def cmake_args(self): spec = self.spec options = [] options.extend([ '-DTrilinos_DIR:PATH=%s' % spec['trilinos'].prefix, '-DYAML_DIR:PATH=%s' % spec['yaml-cpp'].prefix, '-DENABLE_INSTALL:BOOL=ON' ]) return options
Not all trusts are created equal, especially when it comes to protection in a divorce. No client wants to leave an estate to a child only to find that the money is lost later on in a divorce. The estate planning answer for a lot of parents is to leave inheritances in trust for the benefit of their child. But trusts vary widely, and because this is a message many clients miss, planners must reinforce the idea. Most clients continue to use trusts that will provide modest protection—or perhaps none—if a child later divorces. There are far better options. To understand the alternatives, the nuances of will and trust provisions have to be addressed. Advisors need to assess the fine print of every trust to see what it will really do. Poorly crafted trusts are the weak link in most clients' wealth transfer plans. Before starting a conversation about trusts, it helps to understand a client's mind-set. They usually make two common mistakes when they plan wealth transfers via trusts: They insist on simplicity, and they insist on control. This is a classic estate planning double fault, and their kids will not only lose the point but also the money. Begin by explaining that simplicity should never outweigh protecting heirs. Planners can play a vital role in helping clients understand and gain comfort with the recommendations of the estate planner to achieve these protective goals. As for control, clients must understand that a properly structured trust is more about providing an heir flexibility and security, not about setting limits. * The trust ends at some specified age, typically when a client feels the child will be mature enough to handle money. Many end at 25 or 30. Some divide up the principal and make distributions in segments, for example, at age 25, 30 and the balance at 35. * The child is named co-trustee at some age, say 21, so that the child can be in control of his or her financial future. * Trustees can distribute money to maintain a child's standard of living. In tax jargon, this is often referred to as a health, education, maintenance and support standard. * Children often have the right to distribute to themselves to maintain a standard of living. While there are no reliable statistics on trust terms, most estate planning lawyers would tell you the above approach has been the norm. That's too bad, because clients can get better results. The typical trust will accomplish what has historically been most parents' main goal: to protect a child from imprudence until that child attains an age and maturity to be able to handle money. But this is one of many goals. The reality is that distributing money outright at any age is often a mistake. What if a divorce occurs the year after the distribution? Not knowing when a divorce might occur indicates that trusts should be made to last as long as possible. Naming a child as trustee or co-trustee is common, but it also vests the child with power over distributions that might undermine the protection the trust would otherwise afford if the child trustee divorces. If the child can make distributions to himself or herself during a marriage, might a court be inclined to try to force a child trustee to continue making distributions after a divorce? Distributions to maintain a child's standard of living have considerable appeal to most clients. They want a safety net for a child to assure a reasonable lifestyle, and setting up in advance what seem like reasonable distributions for this type of support might allay parents' concerns about a child's potential to spend money on extravagances. But might a court interpret the requirement to maintain a child's standard of living as including in that standard the care of a minor child? What about child support? This could well be a hole in the trust, and trying to stem the bleeding of assets through this hole during a divorce will be a weak attempt to rectify what proper planning could have secured from the start. A recent landmark case in New Jersey , Tannen v. Tannen, held that the wife's trust could not be touched to satisfy divorce claims. The wife's parents created the trust for her, and named themselves and the wife as co-trustees. The court, on appeal, held that no income could be imputed to the wife from the trust for purposes of determining alimony or child support in the divorce. Even though the trust distribution included details on both support distributions to maintain a standard of living and discretionary distributions to be determined by the trustee, the court treated the trust as a discretionary trust. Based on this and other factors, the court held that the wife did not have an income interest in the trust which she could enforce, and hence which the court could attribute to her in the divorce. The New Jersey Supreme Court affirmed. This case will likely influence similar challenges that occur in other states, so it's important for planners to be aware of the ruling. The wife was fortunate in how the court resolved the matter, and advisors should guide clients to avoid potentially close calls in their own planning. While the Tannen trust worked, the family still had to defend it through costly litigation. It's possible to do better protecting wealth should an heir divorce. Trusts can generally be classified based on their distribution provisions as either support or discretionary trusts. The Tannen trust included standard of living support, but also gave the trustees discretion, so it was really a mixture. Depending on the state involved, a court might find a different result with a similar trust. The safer approach is always to have a trust designed as solely a discretionary trust, no standard of living support or any other standard for distribution. The risk is that if the trust, as in the Tannen case, has a support standard, the court might interpret that standard as including the payment of a particular matrimonial obligation or other claim. In contrast, a pure discretionary trust, in which distributions are the sole discretion of the trustee, is harder to consider as a resource. With no mandated standard for distributions, since all distributions are discretionary, what can the court force the trustee to do? Most clients, however, are loath to give that much discretion to a trustee, so the decision becomes a weighing of better divorce protection on one hand and greater certainty over distributions on the other. Ask any lawyer—in our litigious society, the recommendation will be to favor better divorce protection. The message for advisors to get across to clients is that divorce is a major threat to wealth transfers. For many clients, it poses a greater risk than the estate tax. Unfortunately, too many clients leave assets outright to adult heirs instead of in trusts, or opt for trusts that do not afford the protections available with better trusts. It's worth spending some extra time and effort to make sure clients understand the potential future consequences—for their children and heirs—of estate planning decisions. Source: SourceMedia and Financial Planning (02/2012).
# Copyright Aaron Smith 2009 # # This file is part of Gity. # # Gity is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Gity is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Gity. If not, see <http://www.gnu.org/licenses/>. from _util import * try: import re,os,subprocess,simplejson as json except Exception,e: sys.stderr.write(str(e)) exit(84) command="" try: from _argv import * if not options.misc: raise Exception("Gitty Error: getting remote branches requires a remote.") remote=sanitize_str(options.misc[0]) command="%s %s %s"%(options.git,"ls-remote --heads",remote) rcode,stout,sterr=run_command(command) if server_hung_up(sterr):exit(85) if server_unreachable(sterr):exit(86) rcode_for_git_exit(rcode,sterr) lines=re.split("\n",stout) if len(lines)>0:lines.pop() finals=[] for line in lines: if line=="":continue a=re.search("\t[a-zA-Z0-9\/].*",line) l=a.group(0) b=l.split("/")[-1] finals.append(b) sys.stdout.write(json.dumps(finals)) exit(0) except Exception, e: sys.stderr.write("The get remote branches command threw this error: " + str(e)) sys.stderr.write("\ncommand: %s\n" % command) log_gity_version(options.gityversion) log_gitv(options.git) exit(84)
Rilke shows us how poetry made the transition from romanticism and symbolism into true Modernism. This new selection, drawing primarily on his extremely rich middle period in the first decade of the twentieth century, and concluding with a selection from his late Sonnets to Orpheus, offers a clear, powerful, and contemporary Rilke. David Young's versions of the Duino Elegies have captured a significant readership over the last thirty years. He has also translated Gunter Eich, Eugenio Montale, the T'ang dynasty poets, and Miroslav Holub. that's not their own, mirrored from far away. you feel how short the little life has been. a touching blue delights itself in green. Copyright c 1994 by Oberlin College. May not be reproduced without permission.
import numpy as np from vesper.tests.test_case import TestCase import vesper.util.time_frequency_analysis_utils as tfa_utils ''' TODO: Add test cases for which window size differs from DFT size, and for which window is not rectangular. ''' ''' TODO: Given that we need to test FFTs and spectrograms implemented in various programming languages, it might make sense to prepare a set of test cases in a language-portable format like JSON that can be used by test code in the different languages. ''' class TimeFrequencyAnalysisUtilsTests(TestCase): def test_get_dft_analysis_data(self): cases = [ (1000, 4, None, 4, [0, 250, 500]) ] for sample_rate, window_size, dft_size, expected_dft_size, \ expected_freqs in cases: expected_freqs = np.array(expected_freqs) actual_dft_size, actual_freqs = tfa_utils.get_dft_analysis_data( sample_rate, window_size, dft_size) self.assertEqual(actual_dft_size, expected_dft_size) self.assertTrue(np.array_equal(actual_freqs, expected_freqs)) def test_get_dft_size(self): cases = [ (1, 1), (2, 2), (3, 4), (4, 4), (5, 8), (6, 8), (7, 8), (8, 8), (9, 16) ] for window_size, expected in cases: actual = tfa_utils.get_dft_size(window_size) self.assertEqual(actual, expected) def test_get_dft_freqs(self): cases = [ (1000, 1, [0]), (1000, 2, [0, 500]), (1000, 4, [0, 250, 500]), (2000, 8, [0, 250, 500, 750, 1000]) ] for sample_rate, dft_size, expected in cases: expected = np.array(expected) actual = tfa_utils.get_dft_freqs(sample_rate, dft_size) self.assertTrue(np.array_equal(actual, expected)) def test_get_dft_bin_num(self): cases = [ ((0, 8000, 8), 0), ((4000, 8000, 8), 4), ((1000, 8000, 8), 1), ((499, 8000, 8), 0), ((501, 8000, 8), 1), ((11024.5, 22050., 8), 4) ] for args, expected in cases: actual = tfa_utils.get_dft_bin_num(*args) self.assertEqual(actual, expected) def test_get_num_analysis_records(self): cases = [ (0, 8, 4, 0), (8, 8, 4, 1), (16, 8, 4, 3), (17, 8, 4, 3), (18, 8, 4, 3), (19, 8, 4, 3), (20, 8, 4, 4), (20, 8, 3, 5), (21, 8, 3, 5), (22, 8, 3, 5), (23, 8, 3, 6) ] for num_samples, window_size, hop_size, expected in cases: actual = tfa_utils.get_num_analysis_records( num_samples, window_size, hop_size) self.assertEqual(actual, expected) def test_get_num_analysis_records_errors(self): cases = [ # record size zero (0, 0, 1), # hop size zero (0, 1, 0), # hop size exceeds record size (0, 1, 2) ] for args in cases: self._assert_raises( ValueError, tfa_utils.get_num_analysis_records, *args) def test_get_analysis_records_1d(self): """Tests `get_analysis_records` with 1-dimensional input.""" samples = np.arange(8) cases = [ # record size and hop size equal (1, 1, [[0], [1], [2], [3], [4], [5], [6], [7]]), (2, 2, [[0, 1], [2, 3], [4, 5], [6, 7]]), (3, 3, [[0, 1, 2], [3, 4, 5]]), (4, 4, [[0, 1, 2, 3], [4, 5, 6, 7]]), (5, 5, [[0, 1, 2, 3, 4]]), (8, 8, [[0, 1, 2, 3, 4, 5, 6, 7]]), # record size and hop size not equal (2, 1, [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7]]), (3, 2, [[0, 1, 2], [2, 3, 4], [4, 5, 6]]), (4, 2, [[0, 1, 2, 3], [2, 3, 4, 5], [4, 5, 6, 7]]), (4, 3, [[0, 1, 2, 3], [3, 4, 5, 6]]), ] self._test_get_analysis_records(samples, cases) def _test_get_analysis_records(self, samples, cases): for record_size, hop_size, expected in cases: expected = np.array(expected) actual = tfa_utils._get_analysis_records( samples, record_size, hop_size) self._assert_arrays_equal(actual, expected) def test_get_analysis_records_2d(self): """Tests `get_analysis_records` with 2-dimensional input.""" samples = np.arange(8).reshape((2, 4)) cases = [ # record size and hop size equal (1, 1, [[[0], [1], [2], [3]], [[4], [5], [6], [7]]]), (2, 2, [[[0, 1], [2, 3]], [[4, 5], [6, 7]]]), (3, 3, [[[0, 1, 2]], [[4, 5, 6]]]), (4, 4, [[[0, 1, 2, 3]], [[4, 5, 6, 7]]]), # record size and hop size not equal (2, 1, [[[0, 1], [1, 2], [2, 3]], [[4, 5], [5, 6], [6, 7]]]), (3, 1, [[[0, 1, 2], [1, 2, 3]], [[4, 5, 6], [5, 6, 7]]]), (3, 2, [[[0, 1, 2]], [[4, 5, 6]]]) ] self._test_get_analysis_records(samples, cases) def test_compute_spectrogram(self): # This tests that our spectrogram function produces the # expected output for an input comprising a single channel # with a single window's worth of cosine samples. We use # a rectangular window so the expected output spectrum has # a particularly simple form. for num_channels in [1, 2]: for dft_size in [1, 2, 4, 8, 16]: if dft_size == 1: hop_sizes = [1] else: hop_sizes = [dft_size // 2, dft_size] for hop_size in hop_sizes: for bin_num in range(dft_size // 2 + 1): self._test_compute_spectrogram( num_channels, dft_size, hop_size, bin_num) def _test_compute_spectrogram( self, num_channels, dft_size, hop_size, bin_num): num_samples = dft_size * 2 samples = self._create_test_signal( num_channels, num_samples, dft_size, bin_num) window = np.ones(dft_size) spectra = tfa_utils.compute_spectrogram( samples, window, hop_size, dft_size) expected = self._get_expected_spectra( num_channels, num_samples, hop_size, dft_size, bin_num) self.assertTrue(np.allclose(spectra, expected)) def _create_test_signal( self, num_channels, num_samples, dft_size, bin_num): phase_factor = 2 * np.pi * bin_num / dft_size samples = np.cos(phase_factor * np.arange(num_samples)) if num_channels == 2: samples = np.stack((samples, np.ones(num_samples))) return samples def _get_expected_spectra( self, num_channels, num_samples, hop_size, dft_size, bin_num): num_spectra = tfa_utils.get_num_analysis_records( num_samples, dft_size, hop_size) spectrum = self._get_expected_spectrum(dft_size, bin_num) spectra = np.ones((num_spectra, 1)) * spectrum if num_channels == 2: spectrum = self._get_expected_spectrum(dft_size, 0) spectra_1 = np.ones((num_spectra, 1)) * spectrum spectra = np.stack((spectra, spectra_1)) return spectra def _get_expected_spectrum(self, dft_size, bin_num): num_bins = dft_size // 2 + 1 spectrum = np.zeros(num_bins) spectrum[bin_num] = dft_size ** 2 if bin_num != 0 and bin_num != num_bins - 1: spectrum[bin_num] /= 4 return spectrum.reshape((1, len(spectrum))) def test_scale_spectrogram(self): cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[1], [2]], [[1], [2]]), ([[1, 2], [3, 4]], [[.5, 1], [1.5, 2]]), ([[1, 2, 3]], [[.25, 1, .75]]), # stereo ([[[1], [2]], [[3], [4]]], [[[1], [2]], [[3], [4]]]), ([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], [[[.5, 1], [1.5, 2]], [[2.5, 3], [3.5, 4]]]), ([[[1, 2, 3]], [[4, 5, 6]]], [[[.25, 1, .75]], [[1, 2.5, 1.5]]]) ] for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.scale_spectrogram, spectra) def _test_op(self, expected, op, input, *args, **kwargs): # out of place, result allocated by op actual = op(input, *args, **kwargs) self.assertFalse(actual is input) self._assert_arrays_equal(actual, expected) # out of place, result preallocated actual = np.zeros_like(expected) kwargs_ = dict(kwargs, out=actual) actual = op(input, *args, **kwargs_) self.assertFalse(actual is input) self._assert_arrays_equal(actual, expected) # in place kwargs_ = dict(kwargs, out=input) actual = op(input, *args, **kwargs_) self.assertTrue(actual is input) self._assert_arrays_equal(actual, expected) def test_linear_to_log(self): minus_infinity = tfa_utils.SMALL_POWER_DB cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[0], [1], [10]], [[minus_infinity], [0], [10]]), ([[0, 1], [1, 10]], [[minus_infinity, 0], [0, 10]]), # stereo ([[[0, 1], [1, 10]], [[1, 10], [10, 100]]], [[[minus_infinity, 0], [0, 10]], [[0, 10], [10, 20]]]) ] # default reference power for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.linear_to_log, spectra) # explicit reference power reference_power = 10 reference_power_db = 10 * np.log10(reference_power) for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') expected[expected != minus_infinity] -= reference_power_db self._test_op( expected, tfa_utils.linear_to_log, spectra, reference_power) def test_log_to_linear(self): cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[-10], [0], [10]], [[.1], [1], [10]]), ([[-10, 0], [0, 10]], [[.1, 1], [1, 10]]), # stereo ([[[-10, 0], [0, 10]], [[0, 10], [10, 20]]], [[[.1, 1], [1, 10]], [[1, 10], [10, 100]]]) ] # default reference power for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.log_to_linear, spectra) # explicit reference power reference_power = 10 for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') expected *= reference_power self._test_op( expected, tfa_utils.log_to_linear, spectra, reference_power)
Liz Lemon: night cheese connoisseur, eye roll aficionado and possibly one of the worst homemakers, ever. This week, we look at the holy grail of home-based everything, Good Housekeeping with our fearless heroine Liz guiding us through the appropriate reactions to some of their top headlines. Godspeed, Liz Lemon. Good Housekeeping? Pfft. I’m a Country Living kind of woman, thankyouverymuch. But in the interest of trying new things, I may be able to entertain this “Good Housekeeping” you speak of. 75% of my textversations are now held entirely in gifs and so I’m grateful for these posts. Previous Previous post: Is Fostering Worth It?
# Coded by Walker Argendeli, Lumyo Capstone Group from urlparse import urlparse, urljoin from datetime import datetime, timedelta import requests, json from getpass import getpass from enum import Enum, IntEnum from collections import namedtuple # class API: # # def __init__(self, low_myo, timestamp, firmware_version): # super(DataCaptureListener.MyoProxy, self).__init__() # self._connect_time = None # # @property # def connected(self): # with self.synchronized: # return ( # self._connect_time is not None and # self._disconnect_time is None # ) _apiURL = "http://drewswinney.com:8080/api/" _requestSuccessful = 200 def _apiParams(**params): return params # It turns out that classes cannot have static function pointers; they'll be converted to unbound methods >< dumb # class RequestMethod(Enum): # GET=requests.get # POST=requests.post _RequestMethods = namedtuple('RequestMethod', ['GET', 'POST', 'PUT']) _RequestMethod = { _RequestMethods.GET : requests.get, _RequestMethods.POST : requests.post, _RequestMethods.PUT : requests.put } def _apiRequest(reqURL, reqParams, reqMethod): response = reqMethod(reqURL, reqParams) # auth=('user', 'pw') requestStatus = response.status_code if requestStatus != _requestSuccessful: raise RequestError("Request failed with error code " + str(requestStatus)) return response def _request(path, params, methodName): reqURL = urljoin(_apiURL, path) reqParams = params reqMethod = _RequestMethod[methodName] return _apiRequest(reqURL, reqParams, reqMethod) # _dateTimeFormatString = '%Y-%m-%d %H:%M:%S:%f' # TODO Suppport microseconds here _dateTimeFormatString = '%Y-%m-%d %H:%M:%S' def timeToSQL(pyTime): return pyTime.strftime(_dateTimeFormatString) def timeFromSQL(sqlTime): return datetime.strptime(sqlTime, _dateTimeFormatString) class RequestError(Exception): pass class Connection: __loginAuthPath = "loginauth" def __init__(self, username=None, password=None): if not username: username = raw_input("Username: ") if not password: password = getpass() self.username = username self.__password = password # TODO We should be using salted hashes for this >< self.loginID = None self.__connect() self.session = None # def __del__(self): # if self.session: # self.session.updateSession() class AuthError(RequestError): pass def __connect(self): loginAuthParams = _apiParams(username=self.username, password=self.__password) response = _request(Connection.__loginAuthPath, loginAuthParams, _RequestMethods.POST) # if responseText == "NOTFOUND": try: self.loginID = int(response.text) except: raise Connection.AuthError("Could not establish authenticated connection with API") @property def connected(self): return self.loginID is not None def openNewSession(self, sessionType, sessionStartTime=None, initialTimestamp=None): if not self.connected: if not self.__connect(): raise Connection.AuthError("Can't open new session -- no valid authenticated connection to API") self.session = Session.openNewSession(self.loginID, sessionType, sessionStartTime, initialTimestamp) return self.session def openSession(self, sessionID): if not self.connected: if not self.__connect(): raise Connection.AuthError("Can't open a session -- no valid authenticated connection to API") self.session = Session.openSession(sessionID, self.loginID) return self.session class Session: __sessionPath = "session" class SessionType(IntEnum): SLEEP = 0 PEDOMETRY = 1 REP_COUNT = 2 def __init__(self, loginID, sessionID, sessionType, sessionStartTime, initialTimestamp=None, sessionEndTime=None, sessionQuality=None): self.loginID = loginID self.sessionID = sessionID self.sessionType = sessionType self.sessionStartTime = sessionStartTime self.sessionEndTime = sessionEndTime self.initialTimestamp = initialTimestamp self.sessionQuality = sessionQuality @classmethod def openNewSession(cls, loginID, sessionType, sessionStartTime=None, initialTimestamp=None): if not sessionStartTime: sessionStartTime = datetime.now() sessionStartParam = timeToSQL(sessionStartTime) sessionTypeID = int(sessionType) sessionIDParams = _apiParams(loginID=loginID, sessionTypeID=sessionTypeID, sessionStartTime=sessionStartParam) response = _request(Session.__sessionPath, sessionIDParams, _RequestMethods.POST) responseJSON = response.json() sessionID = responseJSON['id'] return cls(loginID, sessionID, sessionType, sessionStartTime, initialTimestamp) @classmethod def openSession(cls, sessionID, loginID=None): path = Session.__sessionPath + "/" + str(sessionID) params = None response = _request(path, params, _RequestMethods.GET) responseJSON = response.json() sessionLoginID = responseJSON['loginID'] if loginID is not None: if loginID != sessionLoginID: raise RequestError("Current loginID doesn't match the loginID of the requested session") sessionType = SessionType(int(responseJSON['sessionTypeID'])) sessionStartTime = timeFromSQL(responseJSON['sessionStartTime']) sessionEndTime = timeFromSQL(responseJSON['sessionEndTime']) sessionQuality = int(responseJSON['sessionQuality']) initialTimestamp = None # TODO return cls(sessionLoginID, sessionID, sessionType, sessionStartTime, initialTimestamp, sessionEndTime, sessionQuality) def updateSession(self): updateSessionPath = Session.__sessionPath + "/" + self.sessionID sessionTypeID = int(self.sessionType) sessionStartTime = timeToSQL(self.sessionStartTime) sessionEndTime = timeToSQL(self.sessionEndTime) params = _apiParams(loginID=self.loginID, sessionTypeID=sessionTypeID, sessionStartTime=sessionStartTime, sessionEndTime=sessionEndTime, sessionQuality=self.sessionQuality) response = _request(updateSessionPath, params, _RequestMethods.PUT) responseJSON = response.json() sessionLoginID = responseJSON['loginID'] # TODO Encapsulate the below __allDPSubPath = "sbysessionid/" def getDatapoints(self, dpSubPath): path = dpSubPath + self.__allDPSubPath + str(self.sessionID) params = None response = _request(path, params, _RequestMethods.GET) return response.json() def getFirstTimestamp(self, dpSubPath, currTimestamp, timestampKey): datapoints = self.getDatapoints(dpSubPath) if isinstance(datapoints, list): if not datapoints: # If no entries firstTimestamp = currTimestamp else: # If multiple entries firstTimestamp = min(timeFromSQL(dp[timestampKey]) for dp in datapoints) else: # If 1 entry firstTimestamp = datapoints[timestampKey] return firstTimestamp def __genericDataPointParams(self, dpSubPath, timestamp, timestampKey): paramsDict = {'sessionID' : self.sessionID} if self.initialTimestamp is None: # self.initialTimestamp = self.getFirstTimestamp(dpSubPath, timestamp, timestampKey) self.initialTimestamp = timestamp microsecondsDiff = timestamp - self.initialTimestamp timeDiff = timedelta(microseconds=microsecondsDiff) pyTimestamp = self.sessionStartTime + timeDiff sqlTime = timeToSQL(pyTimestamp) paramsDict[timestampKey] = sqlTime return paramsDict __emgPath = "emgdatapoint" def addEMGDataPoint(self, timestamp, emgData): paramsDict = self.__genericDataPointParams(Session.__emgPath, timestamp, 'emgpDateTime') for podNum, podData in enumerate(emgData, 1): paramsDict['emgpPod' + str(podNum)] = podData params = _apiParams(**paramsDict) response = _request(Session.__emgPath, params, _RequestMethods.POST) __gyroPath = "rotationdatapoint" def addGyroDataPoint(self, timestamp, gyroData): paramsDict = self.__genericDataPointParams(Session.__gyroPath, timestamp, 'rdpDateTime') paramsDict['rdpXRotation'] = gyroData[0] paramsDict['rdpYRotation'] = gyroData[1] paramsDict['rdpZRotation'] = gyroData[2] params = _apiParams(**paramsDict) response = _request(Session.__gyroPath, params, _RequestMethods.POST) __accelPath = "accelerationdatapoint" def addAccelDataPoint(self, timestamp, accelData): paramsDict = self.__genericDataPointParams(Session.__accelPath, timestamp, 'adpDateTime') paramsDict['adpXAcceleration'] = accelData[0] paramsDict['adpYAcceleration'] = accelData[1] paramsDict['adpZAcceleration'] = accelData[2] params = _apiParams(**paramsDict) response = _request(Session.__accelPath, params, _RequestMethods.POST) __orientPath = "orientationdatapoint" def addOrientDataPoint(self, timestamp, orientData): # TODO Decide what to do about quaternions vs Euler angles paramsDict = self.__genericDataPointParams(Session.__orientPath, timestamp, 'odpDateTime') paramsDict['odpXRotation'] = orientData[0] paramsDict['odpYRotation'] = orientData[1] paramsDict['odpZRotation'] = orientData[2] params = _apiParams(**paramsDict) response = _request(Session.__orientPath, params, _RequestMethods.POST) def main(): testUsername = "walker" testPassword = "walkersux" connection = Connection(testUsername, testPassword) session = connection.openNewSession(Session.SessionType.SLEEP) session.initialTimestamp = 179484700928 dpTimestamp = 179488247655 dpData = [-1.062011719, 0.30859375, 0.79296875] session.addAccelDataPoint(dpTimestamp, dpData) # TODO datapoint datatypes of Double(8,2) # accell, orient: signed double(10,9) # gyro: signed double(7, 4) # emg: signed smallint if __name__ == "__main__": main()
Aubrey Bouck is a loving wife, and mother of 2. When she isn’t creating jewelry, she is often found outdoors, in her garden, or playing with her many pets. She is almost entirely self-taught, often inspired by other artists, and enjoys the challenge of learning new techniques. She hosts a YouTube channel, called Aubrey of Brandavir, where she enjoys teaching others the techniques she has learned, and sharing her creation processes. She also operates an Etsy storefront to sell her artwork. While her favorite medium is wire, she is currently teaching herself to use Metal Clay.
# -*- coding: utf-8 -*- """ .. module:: tests.test_config :synopsis: Tests for the Facio config module. """ import os import six from mock import MagicMock, patch, PropertyMock from facio.config import ConfigurationFile, CommandLineInterface, Settings from facio.exceptions import FacioException from six import StringIO from six.moves import configparser as ConfigParser from textwrap import dedent from . import BaseTestCase class TestCommandLintInterface(BaseTestCase): def setUp(self): self._patch_clint([ 'facio.exceptions.puts', ]) patcher = patch('facio.config.state.state', new_callable=PropertyMock, create=True) self.mock_state = patcher.start() self.mock_state.context_variables = {} self.addCleanup(patcher.stop) @patch('facio.config.docopt') @patch('facio.config.CommandLineInterface.validate_project_name') def test_project_name_should_be_validated( self, mock_validate, mock_docopt): mock_docopt.return_value = { '<project_name>': 'foo' } i = CommandLineInterface() i.start() mock_validate.assert_called_with('foo') def test_valid_project_name(self): valid_names = [ 'this_is_valid', 'this1is_valid', 'Thisisvalid'] i = CommandLineInterface() for name in valid_names: i.validate_project_name(name) self.assertEqual(name, self.mock_state.project_name) self.assertEqual({'PROJECT_NAME': name}, self.mock_state.context_variables) @patch('sys.exit') def test_invalid_project_name(self, mock_exit): invalid_names = [ 'this_is_not-valid', 'this_is not_valid', '*this_is_not_valid'] i = CommandLineInterface() for name in invalid_names: with self.assertRaises(FacioException): i.validate_project_name(name) self.mocked_facio_exceptions_puts.assert_any_call( 'Error: Project names can only contain numbers letters and ' 'underscores') class TestConfigurationFile(BaseTestCase): """ Tests for facio.config.ConfigurationFile. """ config_path = os.path.expanduser('~/.facio.cfg') def setUp(self): self._patch_clint([ 'facio.exceptions.puts', 'facio.config.ConfigurationFile.out', 'facio.config.ConfigurationFile.warning', ]) def _patch_open(self, data): if six.PY3: func = 'builtins.open' else: func = '__builtin__.open' patcher = patch(func, return_value=StringIO( data)) self.addCleanup(patcher.stop) return patcher @patch('facio.config.ConfigParser.ConfigParser.readfp') def test_warning_no_config_file(self, mock_readfp): mock_readfp.side_effect = IOError c = ConfigurationFile() c.read() self.mocked_facio_config_ConfigurationFile_warning.assert_any_call( "{0} Not found".format(self.config_path)) @patch('sys.exit') def test_config_read_parse_error(self, exit_mock): config = dedent("""\ [this_is not = formatted correctly """) patch_open = self._patch_open(config) patch_open.start() with self.assertRaises(FacioException): c = ConfigurationFile() c.read() self.mocked_facio_exceptions_puts.assert_any_call( "Error: Unable to parse {0}".format(self.config_path)) self.assertTrue(exit_mock.called) def test_config_read_success(self): config = dedent("""\ [template] template1 = /foo/bar/baz template2 = /baz/bar/foo """) patch_open = self._patch_open(config) patch_open.start() c = ConfigurationFile() c.read() self.mocked_facio_config_ConfigurationFile_out.assert_any_call( "Loaded {0}".format(self.config_path)) class TestSettings(BaseTestCase): def setUp(self): self._patch_clint([ 'facio.exceptions.puts', 'facio.config.Settings.out', 'facio.config.Settings.warning', 'facio.config.Settings.error', ]) # Mocks for ConfigFile and CommandLineInterface classes self.mock_interface() self.config = MagicMock() def mock_interface(self): self.interface = MagicMock() arguments = PropertyMock(return_value={ '<project_name>': 'foo' }) type(self.interface).arguments = arguments def test_attrs_set_on_init(self): s = Settings(self.interface, self.config) self.assertIsInstance(s.config, MagicMock) self.assertIsInstance(s.interface, MagicMock) @patch('sys.exit') def test_exception_raised_select_template_no_config(self, mock_exit): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.side_effect = ConfigParser.NoSectionError('template') s = Settings(self.interface, self.config) with self.assertRaises(FacioException): with self.assertRaises(ConfigParser.NoSectionError): s.get_template_path() self.mocked_facio_exceptions_puts.assert_any_call( 'Error: Missing [template] section in Facio configuration file.') self.assertTrue(mock_exit.called) @patch('sys.exit') def test_default_template_returned_none_defined(self, mock_exit): arguments = PropertyMock(return_value={ '--select': False}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(Settings.default_template_path, path) def test_path_returned_if_not_alias(self): arguments = PropertyMock(return_value={ '--template': '/foo/bar/baz'}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo/bar/baz') def test_path_retuend_from_alias(self): arguments = PropertyMock(return_value={ '--template': 'foobar'}) type(self.interface).arguments = arguments self.config.items.return_value = [('foobar', '/foo/bar/baz')] s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo/bar/baz') @patch('facio.base.input') def test_template_selection_input_success(self, mock_input): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.return_value = [ ('foo', '/foo'), ('bar', '/bar'), ('baz', '/baz'), ] mock_input.return_value = 1 s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo') @patch('sys.exit') @patch('facio.base.input') def test_template_selection_input_error(self, mock_input, mock_exit): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.return_value = [ ('foo', '/foo'), ] mock_input.return_value = 0 s = Settings(self.interface, self.config) with self.assertRaises(FacioException): with self.assertRaises(ValueError): s.get_template_path() self.mocked_facio_exceptions_puts.assert_any_call( 'Error: A template was not selected') self.assertTrue(mock_exit.called) def test_get_variables_from_cli(self): arguments = PropertyMock(return_value={ '--vars': 'foo=bar'}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) self.assertEqual(s.get_variables(), {'foo': 'bar'}) def test_empty_copy_ignore_no_files_section(self): self.config.get.side_effect = ConfigParser.NoSectionError('files') s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), []) def test_empty_copy_ignore_no_option(self): self.config.get.side_effect = ConfigParser.NoOptionError( 'files', 'copy_ignore') s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), []) def test_copy_ignore_returned_as_list(self): self.config.get.return_value = 'foo=bar,baz=foo' s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), ['foo=bar', 'baz=foo']) def test_empty_render_ignore_no_section(self): self.config.get.side_effect = ConfigParser.NoSectionError('files') s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), []) def test_empty_render_ignore_no_option(self): self.config.get.side_effect = ConfigParser.NoOptionError( 'files', 'render_ignore') s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), []) def test_render_ignore_returned_as_list(self): self.config.get.return_value = 'foo=bar,baz=foo' s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), ['foo=bar', 'baz=foo'])
????? ?????? ?? ?? ???? ??? ???? ?????- ????????, ??΄ ????? ? ???????? ???????? ??? ????????? ??? ??? ????? ??? ????? ?? ??? ????? ????. ???? ?????????? ?????????? ???????, ???????, ???????, ???????? ??? ????????. ???? ??? ?? ????? ??? ?? ???? ???? ???? ???? ????????? ??? ????????? ??? ????? ??? ?? ??????? ?????. ???????????????? ???? ??? ??????????? ??????? ??? ???? ?????? ?????????? ??? ????? ????? ???? ?????? ??????. ????? ??????????? ??? ???? ???????? ????????, ??????????? ?? ????????, ?????? ??? ???????. ??????? ?? ?????????? ???????????? ????? ??????? ??????, ??? ?????, ??? ?????? ??? ??? ??????? ??? ? ????? ??????? ?????? ???? ????????? ?????? ???????? ??? ?. ???? ??? ??? ???? ??????. ??? 5? ??? 4? ?.?. ????? ?? ???? ???? ??? 5 ?????? ??????? ?? ?????? ??? ??????????? ?? ?????? ??????? ??????? ?????? ??? ??????. ? ???? ???????? ?? 408 ?.?. ??? ?? 290 ?.?. ?????????????? ? ????????. ??? ????? ?????? ? ????????? ???? ??????? ???????? ?????? ????? ???? ????? ???????? ????????? ?????????? ??????????????. ?? 164 ?.?. ????????? ?? ?? ???? ??? ???????? ????????? ????????. ???? ??????? ???? ??? ????????? ?????? ???? ?? ?????????? ??? ???????? ?? ???? ????, ?? ??????, ?? ??????, ?? ?????????, ?? ??????? ?? ???????? ??? ????? ?? ???????????. ?? 70?. ?. ?????????? ????? ???????? ??? ???????? ??? ????????. ?? 515 ?.?. ???????????? ??? ?????? ?????? ??? ?? 1097 ??? ????????? ?? ???????????. ?? 1309 ??????????? ??? ???? ??????? ??? ????? ??????? ?? ?????? ??????? ???????? ??? ???? ?? ?????, ??????? ???? ???? ?????????? ???? ?? ?????? ??? ???????????? ???????? ??????? ??? ??????? ???? ??? ??????? 2 ?????? ??? ????. ?? 1480 ??? ????????? ???? ????????? ? ????? ???????????? ??? ?????? ????????. ?? 1522 ??? ????????? ?? ??????? ?? ?????? 200,000 ?????????? ??? ??? ???????? ??? ????????? ??? ???????????. ? ??????? ????????? ????? ????? ??? ??? ????? ???? ??? ?????????? ????????? ??????????? ?? ????????? ??? ???? ??? ?????. ?? 1912 ??? ????????????? ?? ?????? ??? ?? 1948 ??? ?????????????? ??? ???? ?? ?? ???? ?????????? ???????????? ???? ??????. ?? ???? ???? 100,000 ????????? ??? ????? ??? ??? ?? ?????????? ??? ??????????? ??? ???????????. ???? ??????? ????????, ?????????? ??? ????????? ?????? ???? ??? ??????? ????????? ??? ??? ?????????????? ??? ???? ???????. ?? ????? ??? ????? ???? ?????, ?? ?????????? ?????? ??? ?????? ??? ????????????? ??? ??? ? ????????? ??? ??? ?????????? ??? ??????? ?????????? ??? ?????????. ? ????????? ????? ???? ???????????? ??? ???? ??? ???? ?????? ?????????? ???? ????? ???? ?? ??????? ???????? ??? ?? ????????? ???????? ??? ?? ????? ??????????? ??????? ??? ?????? ???? ????? ????????? ??? ??? ??? ?????. ???? ??? ???????????? ?? ???????? ????? ??? ?? ????? ???? ????? ???????? ?? ?????????. ???? ????? ???? ???????????? ? ??????? ??? ??????????? ????? ?????? ?????. ??? ????? ????? ??? ????? ?????? ?? ???????, ????????? ?? ??????? ??? ??? ?????? ?? ????????? ??? ?????? ?? ?????????? ??? ?? ?????????? ?????????? ?????? ?????? ?? ??????. ?? ???????? ??? ??? ?? ???????? ??? ??? ??????? ????? ?????????. ???? ???? ?? ????? ?? ?????? ??? ??????? ?????????, ??? ????? ??? ???????, ?? ????? ??? ???????, ??? ????? ??? ????? ????????, ?? ????????????, ?? ????????? & ?? ?????????? ???????, ?? ?????, ?? ????? ??? ?????????, ?? ????? ??? ?????? ????, ??? ???????? ??????????, ?? ?????, ?? ?????????? ??? ??????? ???? ????????????. ??? Monte Smith ???? ??????? ? ?????? ???????? ????????? ?? ??????? ??? ???????? ??? ???????? ?? ????? ??????. ?? ?????? ??? ??????????, ??? ???????????, ??? ?????????, ??? ????????????, ??? ??????? ??? ??? ?????? ?????? ???? ???? ??? ???? ?? ????? ????? ???????? ??????????????. ??? ?????? ?????? ??? ???????? ??? ???????? ???? ?????????? ??????? ??? ??? ?? 7 ??????? ??? ?????? ? ???????? ?? ?????? ?????? ??? ????? ???? ?????? ???? ???? ??? ????? ?? ??????. ??? ?????? ?? ??????? ??? ????????? ?????? ??? ????????? ???????????? ?? ????????? ????? ??? ?? ????? ??? ??????? ???? ??? ??????????. ?????? ???????? ?? ????? ?????????? ???? ???? ???????? ??????????, ?? ????????? ??? ???????? ?????? ???? ?????? ??? ???????? ???? ??? ?? ??????? ??? ???????? ???? ?????????? ??????????. ?? ??? ?????? ??????? ?? ???? ??? ???????? ??? ????? ????????????, ??? ???? ???????? ??? ??? ?????????????. ??? ?????? ?? ??????? ??? ???????? ??????? ?? ??????????????? ??? ??????????? ?????????? ??? ???? ??? ??? ??? ???? ??????? ?? ?????? ????????? ?? ???????? ??? ????? ??? ?? ??????. ??? ???????? ?????? ????????????? ???? ???????? ???? ??????????? ??????? ?????? ??? ???????? ??? ???? ??????? ?? 1020, ?? ????? ???????? ??? ?????????? ??? ?? ?????????? ????? ???????? ???, ???? ??????? ???? ???????? ??? ? ?????????? ?? ?????? ????. ??????? ????????? ??? ??? ?????? ????? ??? ??????????? ????? ?????? ?? ???? ?????, ???? ???? ??? ?? ???????? ????? ???? ????? ??? ??? ?????????? ?? ?????? ??????. ??? ?????? ?????? ?? ??????????? ??? ???????? ???? ?????? ????? ?? ???? 900 ????????? ??? ????? ???????? ???? ?? ??? ????? ??? ???????? ?? ??????????? ??? ?????. ???? ???????? ??? ?????? ?? ???????? ???? ?? ?? ????? ???? ?? ?????????? ???? ?? ????? ?? ???????????? ?????? ??? ?????????? ??????? ???. ??? ?????????? ???? ??? ??? ?????? ??? 6? ????? ?.?. ???? ??? ?????????? ? ?????????? ???? ??? ???? ?????? ??? ??????????? ??? ????????? ??? ???????? ?????. ???? ??????? ??? ????? ????? ????? ?? ????????? ??? ???????? ?????? ??? 17?? ?????, ??? ???? ?????? ????? ?? ???????? ???????? ????? ?? ??????????, ????? ?????? ? ?????? ???????? ? ????????? ??? ?????? ? ?????? ??????? ??? ??? ??? ??????????? ?????? ??? ??????. ?? ???????????? ????? ??? ??? ???? ????? ??? ?????? ?? ??????????? ???????? ???? ?????????? ??? ?????????? ??????????? ??????? ?? ??????, ????????, ????????? ??? ?????? ?????????. ?? ?????????? ???? ???? ??? ?? ???????????????? ???? 15 ??????? ??? ??????? ????? ???? 30 ???????????. ??? ????? ???? ?????????? ??? ?? ??? ??????? ??? ?????????? ??????? ???? ?????? ??? ??? ??? ?????? ?? ???????? ???? ??? ???? ??? ?? ??? ????? ?? ?????? ??? ?????? ?? ????????. ????? ??? ?????? ??? ??????? ???????? ?? ??????? ?????. ?????? ??? ???????? ???? ?? ??? ???? ??? ???? ?????? ??? ?????? ??? ?????????. ???? ????????? ??? ???? ?????? ????????. ?????? ???? ???? ???? ??? ??????? ????????. ??? ??? ??? ???????????? ??? ????? ??? ?????? ???? ???? ?? ??????. ?????????? ??? ??? ?? ????? ????? ???? ???? ? ????? ????? ?????? ??? ??? ????????? ??? ???. ???? ???? ????? ???? ??? ?? ??? ??????????? ??? ?????? ??????????? 3 ????. ??? ?? ???? ???????? ?? ????? ???????? ???????? ???? ???? ?????? ??????????? ??? ??? ?? ???????? ??? ?? ???? ??? ????? ?????? ?????? ???????? ?? ?????????? ?????? ? ?????????? ??????? ?????? ???????? ????????????. ??? ?? ??????? ??? ???? ??? ????? ??? ??????????? ???? ??????? ??????????. ?????? ?? ???????? ??? ??????? ????? 15 ????. ????????? ???? ??????????? ??? ?? ???????, ????, ??, ???????, ????????, ????????? ??? ??????. ?? ???????? ???????? ?? ??? ???????, ???????, ???????, ????, ??, ???????, ??????, ????, ????, ?????, ??????, ????????, ???? ??? ??????????. ??? ????? ?? ???????? ????????? ?? ??????????, ???????, ???????, ????, ??, ???????, ????, ???????, ??????, ????, ????, ?????, ????????, ??????, ?????, ????, ???, ??, ????, ????, ????????? ??? ????.
# -*- coding:utf-8 -*- import json import os from flask import Flask, request from message import pr_msg, ci_failed_msg from wechat_sdk import WeChat app = Flask(__name__) @app.route('/') def hello_world(): return 'Hello World!' if 'GITHUB_WECHAT_CORP_SECRET' in os.environ.keys(): we_github = WeChat( url=os.environ.get('WECHAT_BASE_URL'), corp_id=os.environ.get('WECHAT_CORP_ID'), corp_secret=os.environ.get('GITHUB_WECHAT_CORP_SECRET'), agent_id=os.environ.get('GITHUB_WECHAT_AGENT_ID') ) @app.route('/wechat', methods=['POST']) def github(): payload = json.loads(request.data) repo = { 'full_name': payload['repository']['full_name'], 'owner': payload['repository']['owner']['login'] } if 'pull_request' in payload: if payload['action'] not in ['closed', 'opened', 'reopened']: return 'ignore ' + payload['action'] we_github.auto_send_text_card_message( pr_msg(repo, payload) ) return 'yoyoyo' elif 'issue' in payload: return else: return 'not support' if 'CI_WECHAT_CORP_SECRET' in os.environ.keys(): we_ci = WeChat( url=os.environ.get('WECHAT_BASE_URL'), corp_id=os.environ.get('WECHAT_CORP_ID'), corp_secret=os.environ.get('CI_WECHAT_CORP_SECRET'), agent_id=os.environ.get('CI_WECHAT_AGENT_ID') ) @app.route('/jenkins', methods=['POST']) def jenkins(): payload = request.json if 'FINALIZED' in payload['build']['phase']: we_ci.auto_send_text_card_message( ci_failed_msg(payload) ) return 'yoyoyo' return 'ignore' if __name__ == '__main__': app.run()
Let's be difference makers in our community by volunteering in our local elementary schools! Help kids with their reading or math or spend time in the lunch room as a lunch buddy. Background check and training required.
# Name:Cobham Aviator/Explorer/Sailor admin reset code generator # File:admin_reset_code.py # Author:Ján Trenčanský # License: GNU GPL v3 # Created: 4.12.2015 # Last modified: 4.12.2015 # Shodan Dork: # Description: generates predictable admin reset code for Cobham Aviator/Explorer/Sailor - CVE-2014-2943 # Based on work by Sinnet3000 and # https://www.blackhat.com/docs/us-14/materials/us-14-Santamarta-SATCOM-Terminals-Hacking-By-Air-Sea-And-Land.pdf import hashlib import core.Misc import core.io from interface.messages import print_help, print_info class Misc(core.Misc.RextMisc): """ Name:Cobham Aviator/Explorer/Sailor admin reset code generator File:admin_reset_code.py Author:Ján Trenčanský License: GNU GPL v3 Created: 4.12.2015 Description: generates predictable admin reset code for Cobham Aviator/Explorer/Sailor - CVE-2014-2943 Based on: Work by Sinnet3000 and https://www.blackhat.com/docs/us-14/materials/us-14-Santamarta-SATCOM-Terminals-Hacking-By-Air-Sea-And-Land.pdf Options: Name Description serial Serial number of the device """ serial = "12345678" def __init__(self): core.Misc.RextMisc.__init__(self) def do_set(self, e): args = e.split(' ') if args[0] == "serial": self.serial = args[1] print_info("Serial number set to: " + self.serial) def do_run(self, e): m = hashlib.md5() m.update(bytearray.fromhex(self.serial) + b'\x00'*12 + "kdf04rasdfKKduzA".encode('utf-8')) code = m.hexdigest() print("Reset code: " + code) def do_serial(self, e): print_info(self.serial) def help_set(self): print_help("Set value of variable: \"set serial 12345678\"") def help_serial(self): print_help("Prints value of variable serial") Misc()
The Mid-States MSDC offers your business an excellent opportunity to meet one-on-one with corporate buyers who are interested in working with qualified MBEs. We offer many formal and informal opportunities to meet with corporate buyers, learn about state and local opportunities, improve your professional alliances and enhance your business. In short, the Mid-States MSDC offers access to more business opportunities with Fortune 500 and local and regional corporations. In addition, we offer Subscription Services to MBEs who want to stay connected to additional networking, programs and training that are above and beyond what is provided through regular certification. The Mid-States MSDC is providing a guide for submitting new applications for certification to ensure quality service to all potential certified MBEs. Our goal is to process all certification applications within 45 business days of receiving a completed file. A completed file is considered one that is submitted with all information completely filled in on the application and all supporting documentation as it pertains to your business and business structure. If your file is received incomplete, we will notify you of any omissions and you will have an opportunity to return all requested materials. Once you have submitted the information, the 45-60 business day processing period begins. Our Board of Directors meets every second Friday of each month to review certification applications. A site visit will be scheduled and performed prior to the board meeting. In order for your application to be reviewed during any given month, the Mid-States MSDC must have received a completed file by the application deadline date outlined below.
import socket import struct import binascii import time import json import urllib2 import requests macs = { '74c246c3e349' : 'dash_tide' } def post_data(): data = { "date": time.strftime("%Y-%m-%d"), "tally": '1' } requests.post("https://sheetsu.com/apis/390c6259", data) def record_tally(): print 'triggering tally... ' post_data(); rawSocket = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(0x0003)) while True: packet = rawSocket.recvfrom(2048) ethernet_header = packet[0][0:14] ethernet_detailed = struct.unpack("!6s6s2s", ethernet_header) arp_header = packet[0][14:42] arp_detailed = struct.unpack("2s2s1s1s2s6s4s6s4s", arp_header) # skip non-ARP packets ethertype = ethernet_detailed[2] if ethertype != '\x08\x06': continue source_mac = binascii.hexlify(arp_detailed[5]) source_ip = socket.inet_ntoa(arp_detailed[6]) dest_ip = socket.inet_ntoa(arp_detailed[8]) if source_mac in macs: #print "ARP from " + macs[source_mac] + " with IP " + source_ip if macs[source_mac] == 'dash_tide': record_tally() else: print "Unknown MAC " + source_mac + " from IP " + source_ip
As many Pecan Grove residents are aware, the district has called for an election on May 4th, 2019, to obtain authorization for bonds and to raise the cap on the maintenance and operations tax rate. The May 4, 2019 election will include two propositions that registered voters of the district will be asked to vote on, Proposition A and Proposition B. Proposition A provides the funding for the District’s plan for the rehabilitation and long term improvements to our aging water, sewer and drainage systems. Proposition B provides the district an option, if needed in the future, to increase the Maintenance and Operations (M&O) tax rate beyond the current 25 cents cap instead of raising water rates to generate revenue that might be required for operations. The Board of Directors will be hosting a Resident Awareness Open House on Tuesday, April 16, 2019 at the Pecan Grove Plantation Country Club. We will be on the second (2nd) floor, from 6 p.m. to 8 p.m.; residents are encouraged to come at their convenience any time during that window to speak with resident directors and the district consultants and review the exhibits, regardless of whether they are registered to vote. There will be exhibits outlining the engineering plans for the district, drainage solutions, bond and financial details, and more. In addition to your 5 resident board members, district consultants will be on hand to answer your questions. As this process has proceeded, many residents have asked a number of financial questions via the website. For those who may not be able to attend the open house, we have summarized the most frequently asked questions into this news post. How much is the district asking to authorize in bonds with Proposition A? The bond proposal is set for $33,600,000. If so authorized, the bonds would be sold in phases to allow for continued paydown of existing debt while accomplishing priority engineering tasks.Currently, the district Board has discussed all possible scenarios with the financial advisor and their goals are: once authorized, the bonds would be sold in phases to allow for continued paydown of existing debt while accomplishing priority engineering tasks, starting with critical drainage improvements. What is the impact to my bills? Current goals and assumptions are there be no planned impact to your water bill, and the anticipation is an initial 2.5 cents (0.025) increase in your annual taxes as a result of issuing approximately $5,450,000 principal amount of bonds in late 2019 to address critical drainage improvements. Assuming the best case with the information at hand, this would bring debt service to 39 cents (.039), which is roughly where it was 4 years ago. Assuming an average home value of $244,400, a 2.5 cent increase per $100 assessed value results in a capital increase of $61.10 to the tax bill annually, based on current assumptions and goals.The all-resident, PGMUD Board is dedicated to completing the projects in a timely fashion while managing existing bond debt efficiently, simultaneously being mindful of tax impact to all residents. With Proposition B, is there a cap to increasing M&O tax rate? Short answer: There is no current plan to raise the M&O tax rate if the M&O authorization passes. Long answer: Currently, the cap is set at $0.25/$100 of your assessed property value. If the election were to pass, the cap would be raised to $0.75, giving the district the option to raise the M&O tax rate, if needed, in the future. However, at this time, the resident directors on the board do not intend or anticipate needing to raise the M&O tax; they are merely asking for the option to be able to raise it. The election is an authorization, not an actual increase. As residents, the directors are also impacted by tax rate increases and are committed to managing the rates and keeping them as low as possible to service the district needs. Is the M&O tax rate applied as a tax on the taxable value of our property? The M&O tax is part of your ad valorem tax (or, property tax) assessed by the district. Pecan Grove MUD assesses both a debt service tax and the M&O tax. The combined amount right now for your tax rate from the MUD is $0.615/$100 of your assessed property value. With the first bond issuance (if approved by voters) addressing critical drainage concerns, the anticipated debt service increase would be less than 3 cents per $100 ($0.025/$100). Keep in mind this is based on conditional research and estimates, but it is the anticipation that will be presented at the open house. What is the current indebtedness of the district? The current principal amount of the District’s outstanding debt is $51,515,000 and the associated debt service is 36.5 cents (0.365) per $100 assessed value. Thus, for the average home value of $244,400, that adds up to $892.06 per year. Over the last five years, the District has lowered the debt service tax rate through repayment and fiscal management from 41 cents (0.41) to the current rate of 36.5 cents.For more information on your tax rate and water rate, click here to view the EVO dashboard showing comparative tax and water rates. What happens if the Propositions do not pass? As residents, the Board of Directors do not want to increase water rates or taxes beyond that which is necessary, and only then to service the needs of the district. As you can see via the answers above, the lowest impact on residents is funding through the sales of bonds. Again, we encourage everyone to come out to the Open House on April 16th, 2019, and ask questions of the resident directors and their consultants. If you can’t make it, be sure to share this information with your neighbors through social media! All residents should have gotten a notice with their most recent water bill about the open house scheduled for Tuesday, April 16, 2019 at the Pecan Grove Plantation Country Club. We will be on the second (2nd) floor, from 6 p.m. to 8 p.m.; residents are encouraged to come at their convenience any time during that window to speak with resident directors and the district consultants and review the exhibits. If you have any further questions regarding the open house event or the upcoming election, please use the contact form on the website for “election information”.
#!/home/software/SACLA_tool/bin/python2.7 import os import sys from argparse import ArgumentParser parser = ArgumentParser() parser = ArgumentParser(description="Wrapper to run 01_process_runs.py that can be run as batch job") parser.add_argument("-start", "--start-run", type=int, dest="start", required=True, help="first run to process") parser.add_argument("-stop", "--stop-run", type=int, dest="stop", default=0, help="last run to process (default: only process 1 run)") parser.add_argument("-exp", "--exp-year", type=int, dest="exp", default=2016, help="experimental year to compress") parser.add_argument("-multi", "--multi-run", action="store_true", dest="multi", required=False, default=False, help="process multi-file run converted using DataConvert4") parser.add_argument("-force", "--force-run", action="store_true", dest="force", required=False, default=False, help="force run to process (overwrite existing folder)") parser.add_argument("-tag", "--output-tag", type=str, dest="tag", default="run", help="tag for output folder (default: run)") parser.add_argument("-o", "--output-flag", type=str, dest="outputFlag", help="where to process runs. 'W' refers to /work/perakis/ and 'UD' refers to '/UserData/fperakis'", choices=['W','UD'], default='UD') args = parser.parse_args() log_dump = '/home/fperakis/qsub_dump/processed/' if (args.stop < args.start): args.stop = args.start assert( args.stop >= args.start ) for run in xrange( args.start, args.stop+1 ): RUN = str(run) # run number as string # create a batchfile as a string (first line) #batchFile = '#PBS -u fperakis\n' batchFile = '#PBS -N %s_process\n' % RUN batchFile += '#PBS -d %s\n' % log_dump # second line is the command cmd = ['%s/01_process_runs.py -start %s -exp %d -tag %s -o %s' % (os.getcwd(), RUN, args.exp, args.tag, args.outputFlag)] if args.multi: cmd.append('-multi') if args.force: cmd.append('-force') batchFile = batchFile + ' '.join(cmd) # save a temp bacthfile and direct it to qsub outfile = open('tmpc.batch', 'w') outfile.write(batchFile) outfile.close() os.system('qsub < tmpc.batch') os.remove('tmpc.batch') # check the status of the submitted job using: # 'qstat' # check output of the submitted job using: # 'tail /home/fperakis/qsub_dump/processed/%s_process.o*' # check error of the submitted job using: # 'tail /home/fperakis/qsub_dump/processed/%S_process.e*'
It's the fourth day of my blogversary celebration and I'm having such fun celebrating with each of you! I hope that you are finding tons of wonderful resources as you browse the daily prizes. I know I've already stocked up on some of these for the back to school season, which is quickly approaching! Speaking of which, today's giveaway prize pack has so many prizes that I couldn't fit them all in one picture. It is truly a back to school jackpot in that it includes FABULOUS resources that will start your year off on the right foot! There's an editable flip book, spelling activities for any list, an adorable calendar, interactive foldables that tackle classroom procedures, back to school writing prompts, a fun STEM activity, an IEP/504/RTI input & collaboration pack, and a winner's choice item from a store with tons of goodies! If you win this one, you'll be more than ready to get this wonderful year started! I LOVE Friday dress down days. I have TONS of super cute t-shirts with witty sayings on them and I'm always adding more to my collection. My go-to place for teacher t-shirts is easy to remember because it's called Teacher Shirts and it's by A+ images! It can't get any easier than that! I am constantly checking their site for new designs and they just added some pretty fabulous t-shirts! They have a wide range for many different positions and grade levels and lucky you, I've got a giveaway for one lucky person to win one of these fabulous shirts! The winner can choose any one shirt from their site, up to a $25.00 value. You'll be even more excited for dress down day with one of these adorable shirts! Here's just a few of the designs you will have to choose from and the link if you'd like to see more! I've got one more day of celebration coming your way tomorrow! Don't forget to check out the giveaways that are still open for entry in days #1-#3 as well!
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'globalt_dialog_base.ui' # # Created: Sat Jan 7 15:11:01 2017 # by: PyQt4 UI code generator 4.10.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName(_fromUtf8("Dialog")) Dialog.resize(561, 471) self.gridLayout_3 = QtGui.QGridLayout(Dialog) self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3")) self.splitter = QtGui.QSplitter(Dialog) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName(_fromUtf8("splitter")) self.label = QtGui.QLabel(self.splitter) self.label.setObjectName(_fromUtf8("label")) self.comboBox = QtGui.QComboBox(self.splitter) self.comboBox.setMinimumSize(QtCore.QSize(251, 0)) self.comboBox.setObjectName(_fromUtf8("comboBox")) self.gridLayout_3.addWidget(self.splitter, 0, 0, 1, 7) self.label_5 = QtGui.QLabel(Dialog) self.label_5.setObjectName(_fromUtf8("label_5")) self.gridLayout_3.addWidget(self.label_5, 1, 0, 1, 1) self.comboBox_5 = QtGui.QComboBox(Dialog) self.comboBox_5.setObjectName(_fromUtf8("comboBox_5")) self.gridLayout_3.addWidget(self.comboBox_5, 1, 1, 1, 3) self.lineEdit = QtGui.QLineEdit(Dialog) self.lineEdit.setInputMethodHints(QtCore.Qt.ImhNone) self.lineEdit.setInputMask(_fromUtf8("")) self.lineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.lineEdit.setObjectName(_fromUtf8("lineEdit")) self.gridLayout_3.addWidget(self.lineEdit, 1, 4, 1, 1) self.comboBox_6 = QtGui.QComboBox(Dialog) self.comboBox_6.setObjectName(_fromUtf8("comboBox_6")) self.gridLayout_3.addWidget(self.comboBox_6, 1, 5, 1, 1) spacerItem = QtGui.QSpacerItem(25, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem, 1, 6, 1, 1) self.gridLayout = QtGui.QGridLayout() self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label_2 = QtGui.QLabel(Dialog) self.label_2.setObjectName(_fromUtf8("label_2")) self.gridLayout.addWidget(self.label_2, 0, 0, 1, 1) self.comboBox_2 = QtGui.QComboBox(Dialog) self.comboBox_2.setObjectName(_fromUtf8("comboBox_2")) self.gridLayout.addWidget(self.comboBox_2, 0, 1, 1, 1) self.gridLayout_3.addLayout(self.gridLayout, 2, 0, 1, 2) spacerItem1 = QtGui.QSpacerItem(64, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem1, 2, 2, 1, 1) self.label_4 = QtGui.QLabel(Dialog) self.label_4.setObjectName(_fromUtf8("label_4")) self.gridLayout_3.addWidget(self.label_4, 2, 3, 1, 2) self.comboBox_4 = QtGui.QComboBox(Dialog) self.comboBox_4.setObjectName(_fromUtf8("comboBox_4")) self.gridLayout_3.addWidget(self.comboBox_4, 2, 5, 1, 1) self.gridLayout_2 = QtGui.QGridLayout() self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.label_3 = QtGui.QLabel(Dialog) self.label_3.setObjectName(_fromUtf8("label_3")) self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1) self.comboBox_3 = QtGui.QComboBox(Dialog) self.comboBox_3.setObjectName(_fromUtf8("comboBox_3")) self.gridLayout_2.addWidget(self.comboBox_3, 0, 1, 1, 1) self.gridLayout_3.addLayout(self.gridLayout_2, 3, 0, 1, 3) spacerItem2 = QtGui.QSpacerItem(254, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem2, 3, 4, 1, 2) self.toolButton = QtGui.QToolButton(Dialog) self.toolButton.setIconSize(QtCore.QSize(30, 30)) self.toolButton.setObjectName(_fromUtf8("toolButton")) self.gridLayout_3.addWidget(self.toolButton, 3, 6, 1, 1) self.plainTextEdit = QtGui.QPlainTextEdit(Dialog) self.plainTextEdit.setObjectName(_fromUtf8("plainTextEdit")) self.gridLayout_3.addWidget(self.plainTextEdit, 4, 0, 1, 7) self.buttonBox = QtGui.QDialogButtonBox(Dialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close|QtGui.QDialogButtonBox.Save) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.gridLayout_3.addWidget(self.buttonBox, 5, 4, 1, 3) self.retranslateUi(Dialog) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): Dialog.setWindowTitle(_translate("Dialog", "Dialog", None)) self.label.setText(_translate("Dialog", "Data field:", None)) self.label_5.setText(_translate("Dialog", "Neighbouring method:", None)) self.label_2.setText(_translate("Dialog", "Weighting scheme:", None)) self.label_4.setText(_translate("Dialog", "Variance assumption:", None)) self.label_3.setText(_translate("Dialog", "Alternative hypothesis:", None)) self.toolButton.setText(_translate("Dialog", "...", None))
So it's the beginning of the summer holidays, day two for us, and my children have already eaten everything in the cupboards. It's going to be an expensive summer. With that in mind, I thought I would put together a post on the way's in which we can all save some pennies so our eyes don't water when we look at our bank statements come September. Don't feel like you have to go out every single day, you can make days at home super fun without needed to spend a penny. Lyndsey, at Me Him The Dog and Baby adds, "You can invite friends, have a small BBQ, craft and play in a paddling pool - make the most of what you have". Whether it's for days out, or whether you're looking for savings on a PS4 for the family you can find many savings online. Excellent sites like Latestdeals.co.uk are great for this, people share the latest deals, voucher codes, coupons, freebies and competitions with each other that they find. What makes Latest Deals different is that members get points and Amazon vouchers in return for their help. So, you can get rewards for your contributions on top of making great savings. It may be an obvious one but it's honestly always something I forget to do. Whenever we end up going out for the day this is always something that ends up costing more than I think! Jess at Tantrum to Smiles says, "even to the park for the day take your own food/drinks or a picnic. It's easy to get caught out having to buy expensive drinks when out and about especially in the sun, I always make sure I take a cool bag or backpack with some drinks and food in it to avoid the expensive café/kiosk costs". If you know you're going to be tempted by the ice cream van then "leave your purse at home" says Gemma at Mummy Waisted. Most cinema's run a "kids club" which are fantastic offers to see films at a great price. Jen at Just Average Jen has also found this incredible deal for great movie prices, which you can read more about here. I'm very fortunate that we live close by to many stunning beaches and so beautiful rivers which make for the perfect spots for a whole day of fun which are all absolutely free. Children do not need entertaining with zoo's, theme parks etc (though they are great fun), they can find the most simple pleasures of nature. Not once have my two ever uttered the words "I'm bored" when we're by the water. "You can stick whatever fruit you would like into a blender, pour into a reusable ice lolly mould and pop into the freezer for a few hours! Kids love the novelty of making their own lolly, you are getting fruit into them without them even realising and saving money in the process! Win-win!" From Joanne at New Mum Fun. This is a definite win-win and a tip that we love! Having an overview of the summer is a great idea of keeping track of days out and when you'll be spending money where. This way you can make sure you put aside funds needed for a particular day out so you don't end up overspending. Beth at Life as Mum does this and has made a summer calendar planner. Check out local Facebook groups and local libraries, shopping centres etc as lots of them will have free or very cheap activities during the summer holidays! I always find that many of these activities aren't as widely advertised so use these local groups and ask around to see whats on. Very often you will find if you book in advance you can make small savings on your bigger day out compared to "on the day" prices. This is where your planning will come in handy. If you have any top money-saving tips this summer then please feel free to share - we all need to help each other out and save the pennies (and our sanity!).