text
stringlengths 0
2.53M
|
---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Provides ``mapping`` of url paths to request handlers.
"""
from bootstrap import Bootstrap
from fund import InstantPaymentNotificationHandler
from fund import ThankYouHandler
from view import *
mapping = [
(r"/", Index),
(r"/ipn", InstantPaymentNotificationHandler),
(r"/thank-you", ThankYouHandler),
(r"/about\/?", About),
(r"/guide\/?", Guide),
(r"/guide/download\/?", Download),
(r"/guide/standards\/?", Standards),
(r"/community\/?", Community),
(r"/news\/?", News),
(r"/support\/?", Support),
(r"/contact\/?", Contact),
(r"/press\/?", Press),
(r"/legal/terms", Terms),
(r"/library\/?", Library),
(r"/library/sketchup\/?", Library),
(r"/library/series/(\w+)\/?", Library),
(r"/library/users\/?", Users),
(r"/library/users/([0-9]+)\/?", User),
(r"/library/designs/([0-9]+)\/?", Design),
(r"/library/designs/([0-9]+)/(edit)\/?", Design),
(r"/library/designs\/?", Design),
(r"/library/designs/add\/?", Design),
(r"/library/designs/add/sketchup\/?", Design),
(r"/redirect/success/([0-9]+)\/?", RedirectSuccess),
(r"/redirect/error\/?", RedirectError),
(r"/redirect/after/delete\/?", RedirectAfterDelete),
(r"/admin/moderate\/?", Moderate),
(r"/admin/bootstrap\/?", Bootstrap),
(r"/activity", ActivityScreen),
(r"/txns", TxnList),
(r"/blob64/([^/]+)/([^/]+)\/?", Base64Blob),
(r"/blob64/([^/]+)\/?", Base64Blob),
(r"/i18n/message_strings.json", MessageStrings),
(r"/.*", NotFound),
]
|
import msgpack
import gevent.pool
import gevent.queue
import gevent.event
import gevent.local
import gevent.lock
import logging
import sys
import gevent_zmq as zmq
from .exceptions import TimeoutExpired
from .context import Context
from .channel_base import ChannelBase
if (sys.version_info < (2, 7)):
def get_pyzmq_frame_buffer(frame):
return frame.buffer[:]
else:
def get_pyzmq_frame_buffer(frame):
return frame.buffer
logger = (__name__)
class SequentialSender(object):
def __init__(self, socket):
self._socket = socket
def _send(self, parts):
e = None
for i in (((parts) - 1)):
try:
(parts[i])
except (gevent.GreenletExit, gevent.Timeout) as e:
if (i == 0):
raise
(parts[i])
try:
(parts[(- 1)])
except (gevent.GreenletExit, gevent.Timeout) as e:
(parts[(- 1)])
if e:
raise e
def __call__(self, parts, timeout=None):
if timeout:
with (timeout):
(parts)
else:
(parts)
class SequentialReceiver(object):
def __init__(self, socket):
self._socket = socket
def _recv(self):
e = None
parts = []
while True:
try:
part = ()
except (gevent.GreenletExit, gevent.Timeout) as e:
if ((parts) == 0):
raise
part = ()
(part)
if (not part.more):
break
if e:
raise e
return parts
def __call__(self, timeout=None):
if timeout:
with (timeout):
return ()
else:
return ()
class Sender(SequentialSender):
def __init__(self, socket):
self._socket = socket
self._send_queue = ()
self._send_task = (self._sender)
def close(self):
if self._send_task:
()
def _sender(self):
for parts in self._send_queue:
(parts)
def __call__(self, parts, timeout=None):
try:
(parts)
except gevent.queue.Full:
raise (timeout)
class Receiver(SequentialReceiver):
def __init__(self, socket):
self._socket = socket
self._recv_queue = ()
self._recv_task = (self._recver)
def close(self):
if self._recv_task:
()
self._recv_queue = None
def _recver(self):
while True:
parts = ()
(parts)
def __call__(self, timeout=None):
try:
return ()
except gevent.queue.Empty:
raise (timeout)
class Event(object):
__slots__ = ['_name', '_args', '_header', '_identity']
def __init__(self, name, args, context, header=None):
self._name = name
self._args = args
if (header is None):
self._header = {'message_id': (), 'v': 3}
else:
self._header = header
self._identity = None
@property
def header(self):
return self._header
@property
def name(self):
return self._name
@name.setter
def name(self, v):
self._name = v
@property
def args(self):
return self._args
@property
def identity(self):
return self._identity
@identity.setter
def identity(self, v):
self._identity = v
def pack(self):
return ((self._header, self._name, self._args))
@staticmethod
def unpack(blob):
unpacker = ()
(blob)
unpacked_msg = ()
try:
(header, name, args) = unpacked_msg
except Exception as e:
raise ((unpacked_msg, e))
if (not (header, dict)):
header = {}
return (name, args, None, header)
def __str__(self, ignore_args=False):
if ignore_args:
args = '[...]'
else:
args = self._args
try:
args = (((self._args)))
except Exception:
raise
if self._identity:
identity = (((x.bytes) for x in self._identity))
return (identity, self._name, self._header, args)
return (self._name, self._header, args)
class Events(ChannelBase):
def __init__(self, zmq_socket_type, context=None):
self._debug = False
self._zmq_socket_type = zmq_socket_type
self._context = (context or ())
self._socket = (zmq_socket_type)
if (zmq_socket_type in (zmq.PUSH, zmq.PUB, zmq.DEALER, zmq.ROUTER)):
self._send = (self._socket)
elif (zmq_socket_type in (zmq.REQ, zmq.REP)):
self._send = (self._socket)
else:
self._send = None
if (zmq_socket_type in (zmq.PULL, zmq.SUB, zmq.DEALER, zmq.ROUTER)):
self._recv = (self._socket)
elif (zmq_socket_type in (zmq.REQ, zmq.REP)):
self._recv = (self._socket)
else:
self._recv = None
@property
def recv_is_supported(self):
return (self._recv is not None)
@property
def emit_is_supported(self):
return (self._send is not None)
def __del__(self):
try:
if (not self._socket.closed):
()
except (AttributeError, TypeError):
raise
def close(self):
try:
()
except AttributeError:
raise
try:
()
except AttributeError:
raise
()
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, v):
if (v != self._debug):
self._debug = v
if self._debug:
('debug enabled')
else:
('debug disabled')
def _resolve_endpoint(self, endpoint, resolve=True):
if resolve:
endpoint = (endpoint)
if (endpoint, (tuple, list)):
r = []
for sub_endpoint in endpoint:
((sub_endpoint, resolve))
return r
return [endpoint]
def connect(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('connected to %s (status=%s)', endpoint_, r[(- 1)])
return r
def bind(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('bound to %s (status=%s)', endpoint_, r[(- 1)])
return r
def disconnect(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('disconnected from %s (status=%s)', endpoint_, r[(- 1)])
return r
def new_event(self, name, args, xheader=None):
event = (name, args)
if xheader:
(xheader)
return event
def emit_event(self, event, timeout=None):
if self._debug:
('--> %s', event)
if event.identity:
parts = ((event.identity or ()))
(['', ()])
elif (self._zmq_socket_type in (zmq.DEALER, zmq.ROUTER)):
parts = ('', ())
else:
parts = ((),)
(parts, timeout)
def recv(self, timeout=None):
parts = ()
if ((parts) > 2):
identity = parts[0:(- 2)]
blob = parts[(- 1)]
elif ((parts) == 2):
identity = parts[0:(- 1)]
blob = parts[(- 1)]
else:
identity = None
blob = parts[0]
event = ((blob))
event.identity = identity
if self._debug:
('<-- %s', event)
return event
def setsockopt(self, *args):
return (*args)
@property
def context(self):
return self._context |
#!/usr/bin/env python
"""Django's command line utility."""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
"""Installer for hippybot
"""
import os
cwd = os.path.dirname(__file__)
__version__ = open(os.path.join(cwd, "hippybot", "version.txt"), "r").read().strip()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name="hippybot",
description="Python Hipchat bot",
long_description=open("README.rst").read(),
version=__version__,
author="Wes Mason",
author_email="wes[at]1stvamp[dot]org",
url="http://github.com/1stvamp/hippybot",
packages=find_packages(exclude=["ez_setup"]),
install_requires=open("requirements.txt").readlines(),
package_data={"hippybot": ["version.txt"]},
include_package_data=True,
extras_require={
"plugins": open("extras_requirements.txt").readlines(),
},
entry_points={
"console_scripts": [
"hippybot = hippybot.bot:main",
],
},
license="BSD",
)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "twobuntu.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Category",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"name",
models.CharField(
help_text=b"The name of the category.", max_length=40
),
),
(
"image",
models.ImageField(
help_text=b"A representative image.",
null=True,
upload_to=b"categories",
blank=True,
),
),
],
options={
"ordering": ("name",),
"verbose_name_plural": "Categories",
},
bases=(models.Model,),
),
]
|
import twitter
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.db import transaction
from django.shortcuts import redirect, render
from twobuntu.news.forms import AddItemForm
@user_passes_test(lambda u: u.is_staff)
def add(request):
"""
Add news items to the home page.
"""
if request.method == "POST":
form = AddItemForm(data=request.POST)
if form.is_valid():
item = form.save(commit=False)
item.reporter = request.user
try:
with transaction.atomic():
item.save()
except twitter.TwitterError as e:
messages.error(
request,
'Twitter error: "%s" Please try again.' % e.message[0]["message"],
)
else:
messages.info(request, "Your news item has been published!")
return redirect("home")
else:
form = AddItemForm()
return render(
request,
"form.html",
{
"title": "Add Item",
"form": form,
"description": "Enter the details for the news item below.",
"action": "Add",
},
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2015, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of django-wsgi <https://github.com/2degrees/django-wsgi/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
Exceptions raised by :mod:`django_wsgi.`
"""
__all__ = ("DjangoWSGIException", "ApplicationCallError")
class DjangoWSGIException(Exception):
"""Base class for exceptions raised by :mod:`django_wsgi`."""
pass
class ApplicationCallError(DjangoWSGIException):
"""
Exception raised when an embedded WSGI application was not called properly.
"""
pass
|
import boto
import boto.s3.connection
from django.conf import settings
import logging
log = logging.getLogger(__name__)
def get_s3_connection():
if settings.S3_ACCESS_KEY and settings.S3_SECRET_KEY and settings.S3_HOST:
log.debug(
"Connecting to {}, with secure connection is {}".format(
settings.S3_HOST, settings.S3_SECURE_CONNECTION
)
)
return boto.connect_s3(
aws_access_key_id=settings.S3_ACCESS_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
host=settings.S3_HOST,
is_secure=settings.S3_SECURE_CONNECTION,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
return None
def get_or_create_bucket(s3_connection):
bucket = s3_connection.get_bucket(settings.S3_BUCKET_NAME)
if bucket is None:
bucket = s3_connection.create_bucket(settings.S3_BUCKET_NAME)
return bucket
|
from django.db import models
import datetime
from common.models import Project
class Stage(models.Model):
name = models.CharField(max_length=128)
project = models.ForeignKey(Project)
text = models.TextField(default="", blank=True)
link = models.URLField(default=None, blank=True, null=True)
state = models.CharField(max_length=24, default="info", blank=True)
weight = models.IntegerField(default=0)
updated = models.DateTimeField(default=datetime.datetime.now())
def save(self, *args, **kwargs):
self.updated = datetime.datetime.now()
return super(Stage, self).save(*args, **kwargs)
def __str__(self):
return self.name
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
("testreport", "0026_testresult_launch_item_id"),
]
operations = [
migrations.AddField(
model_name="testplan",
name="filter",
field=models.TextField(
default=b"",
max_length=128,
verbose_name="Started by filter",
blank=True,
),
preserve_default=True,
),
migrations.AddField(
model_name="testplan",
name="main",
field=models.BooleanField(
default=False, verbose_name="Show in short statistic"
),
preserve_default=True,
),
]
|
import gevent
from gevent import monkey
()
import time
import smtplib
TEST_MAIL = '\nDate: Wed, 30 Jul 2014 03:29:50 +0800 (CST)\nFrom: =?utf-8?B?6IGU5oOz?= <client@gsmtpd.org>\nTo: test@gsmtpd.org\nMessage-ID: <766215193.1675381406662190229.JavaMail.root@USS-01>\nSubject: =?utf-8?B?6IGU5oOz56e75Yqo5LqS6IGU572R5pyN5Yqh5rOo5YaM56Gu6K6k6YKu5Lu2?=\nMIME-Version: 1.0\nContent-Type: multipart/mixed; \n boundary="----=_Part_335076_1490382245.1406662190222"\n\n------=_Part_335076_1490382245.1406662190222\nContent-Type: multipart/related; \n boundary="----=_Part_335077_605133107.1406662190222"\n\n------=_Part_335077_605133107.1406662190222\nContent-Type: text/html;charset=utf-8\nContent-Transfer-Encoding: quoted-printable\n\n <html><head></head><body>=E5=B0=8A=E6=95=AC=E7=9A=84=E7=94=A8=E6=88=B7=EF=\n=BC=9A<br/>=E6=82=A8=E5=A5=BD=EF=BC=81<br/>=E8=AF=B7=E7=82=B9=E5=87=BB=E8=\n=81=94=E6=83=B3=E5=B8=90=E5=8F=B7=E7=A1=AE=E8=AE=A4=E9=93=BE=E6=8E=A5=EF=BC=\n=8C=E4=BB=A5=E6=A0=A1=E9=AA=8C=E6=82=A8=E7=9A=84=E8=81=94=E6=83=B3=E5=B8=90=\n=E5=8F=B7=EF=BC=9A<br/><a href=3D"https://passport.lenovo.com/wauthen/verif=\nyuser?username=3D&vc=3DuHwf&accountid=3D1358934&lenovoid.=\ncb=3D&lenovoid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovo=\nid.ctx=3D&lenovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=\n=3D&verifyFlag=3Dnull">https://passport.lenovo.com/wauthen/verifyuser?usern=\name=3o.org&vc=3DuHwf&accountid=3&lenovoid.cb=3D&lenov=\noid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovoid.ctx=3D&l=\nenovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=3D&verifyF=\nlag=3Dnull</a><br/>=EF=BC=88=E5=A6=82=E6=9E=9C=E4=B8=8A=E9=9D=A2=E7=9A=84=\n=E9=93=BE=E6=8E=A5=E6=97=A0=E6=B3=95=E7=82=B9=E5=87=BB=EF=BC=8C=E6=82=A8=E4=\n=B9=9F=E5=8F=AF=E4=BB=A5=E5=A4=8D=E5=88=B6=E9=93=BE=E6=8E=A5=EF=BC=8C=E7=B2=\n=98=E8=B4=B4=E5=88=B0=E6=82=A8=E6=B5=8F=E8=A7=88=E5=99=A8=E7=9A=84=E5=9C=B0=\n=E5=9D=80=E6=A0=8F=E5=86=85=EF=BC=8C=E7=84=B6=E5=90=8E=E6=8C=89=E2=80=9C=E5=\n=9B=9E=E8=BD=A6=E2=80=9D=E9=94=AE)=E3=80=82<br/>=E6=9D=A5=E8=87=AA=E8=81=94=\n=E6=83=B3=E5=B8=90=E5=8F=B7</body></html>\n------=_Part_335077_605133107.1406662190222--\n\n------=_Part_335076_1490382245.1406662190222--\n'
def timeit(func):
def wrap(num, port, *args, **kwargs):
max_rqs = 0
for _ in (3):
conns = [() for x in (num)]
(((lambda x: ('127.0.0.1', port)), conns))
start_at = ()
(num, conns)
interval = (() - start_at)
for con in conns:
try:
()
()
except Exception:
raise
(3)
rqs = (num / interval)
max_rqs = (rqs, max_rqs)
return max_rqs
return wrap
@timeit
def helo(num, conns):
tasks = [(x.helo) for x in conns]
(tasks)
@timeit
def send(num, conns):
tasks = [(x.sendmail, 'r@r.com', ['test@test.org'], TEST_MAIL) for x in conns]
(tasks)
def main(port, num):
(('%d %s %s' % (num, (num, port), (num, port))))
if (__name__ == '__main__'):
import sys
try:
((sys.argv[1]), (sys.argv[2]))
except IndexError:
('python concurrency.py <port> <connection number>') |
#!/usr/bin/env python
import sys
import json
if sys.version_info < (3,):
def b(x):
return x
def s(x):
return x
else:
def b(x):
return bytes(x, "utf-8")
def s(x):
return x.decode("utf-8")
def parse_payload(payload):
if not isinstance(payload, str):
payload = " ".join(payload)
try:
json.loads(payload)
except ValueError:
kv = payload.split(" ", 1)
if len(kv) > 1:
payload = '{"%s": "%s"}' % (kv[0], kv[1])
else:
payload = "%s" % kv[0]
return payload
def requires_elements(xs, dictionary):
missing_values = []
for x in xs:
if x not in dictionary:
missing_values.append(x)
if missing_values:
err_msg = ", ".join(missing_values)
raise KeyError("Missing values %s" % (err_msg))
|
from flask_resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer, String
import helpers
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id_1 = (Integer)
id_2 = (Integer)
name = (String)
()
(yield {'widget': Widget})
()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id_1 = ()
id_2 = ()
name = ()
return {'widget': ()}
@()
def routes(app, models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
id_fields = ('id_1', 'id_2')
class WidgetListView(WidgetViewBase):
def get(self):
return ()
def post(self):
return ()
class WidgetView(WidgetViewBase):
def get(self, id_1, id_2):
return ((id_1, id_2))
def patch(self, id_1, id_2):
return ((id_1, id_2))
def delete(self, id_1, id_2):
return ((id_1, id_2))
api = (app)
('/widgets', WidgetListView, WidgetView)
@()
def data(db, models):
(((), (), ()))
()
def test_list(client):
response = ('/widgets')
if (not (response.status_code == 200)):
raise ()
if (not ((response) == [{'id_1': '1', 'id_2': '2', 'name': 'Foo'}, {'id_1': '1', 'id_2': '3', 'name': 'Bar'}, {'id_1': '4', 'id_2': '5', 'name': 'Baz'}])):
raise ()
def test_retrieve(client):
response = ('/widgets/1/2')
if (not (response.status_code == 200)):
raise ()
if (not ((response) == {'id_1': '1', 'id_2': '2', 'name': 'Foo'})):
raise ()
def test_create(client):
response = (client, 'POST', '/widgets', {'id_1': '4', 'id_2': '6', 'name': 'Qux'})
if (not (response.status_code == 201)):
raise ()
if (not (response.headers['Location'] == 'http://localhost/widgets/4/6')):
raise ()
if (not ((response) == {'id_1': '4', 'id_2': '6', 'name': 'Qux'})):
raise ()
def test_update(client):
update_response = (client, 'PATCH', '/widgets/1/2', {'id_1': '1', 'id_2': '2', 'name': 'Qux'})
if (not (update_response.status_code == 204)):
raise ()
retrieve_response = ('/widgets/1/2')
if (not (retrieve_response.status_code == 200)):
raise ()
if (not ((retrieve_response) == {'id_1': '1', 'id_2': '2', 'name': 'Qux'})):
raise ()
def test_destroy(client):
destroy_response = ('/widgets/1/2')
if (not (destroy_response.status_code == 204)):
raise ()
retrieve_response = ('/widgets/1/2')
if (not (retrieve_response.status_code == 404)):
raise () |
from .dogpile import Dogpile
|
"""
RPi-Tron-Radio
Raspberry Pi Web-Radio with 2.8" TFT Touchscreen and Tron-styled graphical interface
GitHub: http://github.com/5volt-junkie/RPi-Tron-Radio
Blog: http://5volt-junkie.net
MIT License: see license.txt
"""
import pygame
from pygame.locals import *
import time
import datetime
import sys
import os
import glob
import subprocess
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
#colors R G B
white = (255, 255, 255)
red = (255, 0, 0)
green = ( 0, 255, 0)
blue = ( 0, 0, 255)
black = ( 0, 0, 0)
cyan = ( 50, 255, 255)
magenta = (255, 0, 255)
yellow = (255, 255, 0)
orange = (255, 127, 0)
#screen size
width = 320
height = 240
size = (width, height)
screen = pygame.display.set_mode(size)
pygame.init()
#disable mouse cursor
pygame.mouse.set_visible(False)
#define font
font = pygame.font.Font(None, 25)
#screensaver
screensaver_timer = 5 #time until screensaver will be enabled, in minutes
screensaver = False
#load default skin
menu = 1
skin_number = 1
max_skins = 8
font_color = cyan
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
skin = skin1
screen.blit(skin, (0, 0))
#initial volume settings
subprocess.call('mpc volume 100' , shell=True)
reboot_label = font.render("rebooting...", 1, (font_color))
poweroff_label = font.render("shutting down", 1, (font_color))
song_title = " "
playlist = " "
def reboot():
screen.fill(black)
screen.blit(reboot_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('reboot' , shell=True)
def poweroff():
screen.fill(black)
screen.blit(poweroff_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('poweroff' , shell=True)
#copy playing title to favorite.txt
def favorite():
print(song_title)
f = open ('/var/www/favorite.txt' , 'a')
f.write('-' + song_title + '\n')
f.close()
#function runs if touchscreen was touched (and screensaver is disabled)
def on_touch():
#x_min x_max y_min y_max
if 13 <= pos[0] <= 75 and 121 <= pos[1] <= 173:
#print "button1 was pressed"
button(1)
if 90 <= pos[0] <= 152 and 121 <= pos[1] <= 173:
#print "button2 was pressed"
button(2)
if 167 <= pos[0] <= 229 and 121 <= pos[1] <= 173:
#print "button3 was pressed"
button(3)
if 244 <= pos[0] <= 306 and 121 <= pos[1] <= 173:
#print "button4 was pressed"
button(4)
if 13 <= pos[0] <= 75 and 181 <= pos[1] <= 233:
#print "button5 was pressed"
button(5)
if 90 <= pos[0] <= 152 and 181 <= pos[1] <= 233:
#print "button6 was pressed"
button(6)
if 167 <= pos[0] <= 229 and 181 <= pos[1] <= 233:
#print "button7 was pressed"
button(7)
if 244 <= pos[0] <= 306 and 181 <= pos[1] <= 233:
#print "button8 was pressed"
button(8)
#which button (and which menu) was presed on touch
def button(number):
global menu
if menu == 1:
if number == 1:
subprocess.call('mpc play' , shell=True)
#print "play"
if number == 2:
subprocess.call('mpc pause' , shell=True)
#print "pause"
if number == 3:
subprocess.call('mpc volume +5' , shell=True)
#print "vol +x"
if number == 4:
subprocess.call('mpc volume 0' , shell=True)
#print "vol 0"
if number == 5:
subprocess.call('mpc prev' , shell=True)
#print "prev"
if number == 6:
subprocess.call('mpc next' , shell=True)
#print "next"
if number == 7:
subprocess.call('mpc volume -5' , shell=True)
#print "vol -x"
if number == 8:
#print "go to menu 2"
menu = 2
update_screen()
return
if menu == 2:
if number == 1:
favorite()
if number == 2:
#print "switch skin"
global skin_number
skin_number = skin_number+1
#print skin_number
update_screen()
if number == 3:
#print "run in background"
pygame.quit()
sys.exit()
if number == 4:
#print "quit radio"
subprocess.call('mpc stop', shell=True)
pygame.quit()
sys.exit()
if number == 5:
print("power off")
poweroff()
if number == 6:
print("reboot")
reboot()
if number == 7:
#print "update screen"
update_screen()
if number == 8:
#print "go to menu 1"
menu = 1
update_screen()
return
#function to update screen
def update_screen():
global skin_number
if skin_number == 9:
skin_number = 1
if skin_number == 1:
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
font_color = cyan
if skin_number == 2:
skin1 = pygame.image.load("skins/skin_blue_m1.png")
skin2 = pygame.image.load("skins/skin_blue_m2.png")
font_color = blue
if skin_number == 3:
skin1 = pygame.image.load("skins/skin_green_m1.png")
skin2 = pygame.image.load("skins/skin_green_m2.png")
font_color = green
if skin_number == 4:
skin1 = pygame.image.load("skins/skin_magenta_m1.png")
skin2 = pygame.image.load("skins/skin_magenta_m2.png")
font_color = magenta
if skin_number == 5:
skin1 = pygame.image.load("skins/skin_orange_m1.png")
skin2 = pygame.image.load("skins/skin_orange_m2.png")
font_color = orange
if skin_number == 6:
skin1 = pygame.image.load("skins/skin_red_m1.png")
skin2 = pygame.image.load("skins/skin_red_m2.png")
font_color = red
if skin_number == 7:
skin1 = pygame.image.load("skins/skin_white_m1.png")
skin2 = pygame.image.load("skins/skin_white_m2.png")
font_color = white
if skin_number == 8:
skin1 = pygame.image.load("skins/skin_yellow_m1.png")
skin2 = pygame.image.load("skins/skin_yellow_m2.png")
font_color = yellow
global menu
if screensaver == False:
current_time = datetime.datetime.now().strftime('%H:%M %d.%m.%Y')
time_label = font.render(current_time, 1, (font_color))
if menu == 1:
skin = skin1
screen.blit(skin, (0, 0))
lines = subprocess.check_output('mpc current', shell=True).split(":")
if len(lines) == 1:
line1 = lines[0]
line1 = line1[:-1]
station_label = font.render("Station: no data", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
line1 = line1[:30]
station_label = font.render('Station: ' + line1 + '.', 1, (font_color))
lines = subprocess.check_output('mpc -f [%title%]', shell=True).split("\n")
line1 = lines[0]
if line1.startswith("volume"):
title_label = font.render("Title: no data! Try with PLAY!", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
global song_title
song_title = line1
line1 = line1[:30]
title_label = font.render(line1 + '.', 1, (font_color))
title = font.render("Now playing:", 1, (font_color))
screen.blit(skin, (0, 0))
screen.blit(station_label, (23, 15))
screen.blit(title, (23, 40))
screen.blit(title_label, (23, 60))
screen.blit(time_label, (160, 90))
lines = subprocess.check_output('mpc volume', shell=True).split("\n")
line1 = lines[0]
volume_label = font.render(line1, 1, (font_color))
screen.blit(volume_label, (23, 90))
pygame.display.flip()
if menu == 2:
skin = skin2
screen.blit(skin, (0, 0))
#get and display ip
ip = subprocess.check_output('hostname -I', shell=True).strip()
ip_label = font.render('IP: ' + ip, 1, (font_color))
screen.blit(ip_label, (23, 15))
#get and display cpu temp
cpu_temp = subprocess.check_output('/opt/vc/bin/vcgencmd measure_temp', shell=True).strip()
temp = font.render('cpu ' + cpu_temp, 1, (font_color))
screen.blit(temp, (23, 35))
#get current time
screen.blit(time_label, (90, 90))
pygame.display.flip()
if screensaver == True:
screen.fill(white)
pygame.display.flip()
minutes = 0
#userevent on every 1000ms, used for screensaver
pygame.time.set_timer(USEREVENT +1, 60000)
subprocess.call('mpc play' , shell=True)
update_screen()
running = True
while running:
for event in pygame.event.get():
if event.type == USEREVENT +1:
minutes += 1
if event.type == pygame.QUIT:
print("Quit radio")
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
print("Quit radio")
pygame.quit()
sys.exit()
#if screensaver is enabled and the screen was touched,
#just disable screensaver, reset timer and update screen
#no button state will be checked
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == True:
minutes = 0
subprocess.call('echo 0 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
screensaver = False
update_screen()
break
#if screen was touched and screensaver is disabled,
#get position of touched button, call on_touch(), reset timer and update screen
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == False:
pos = (pygame.mouse.get_pos() [0], pygame.mouse.get_pos() [1])
minutes = 0
on_touch()
update_screen()
#enable screensaver on timer overflow
if minutes > screensaver_timer:
screensaver = True
subprocess.call('echo 1 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
update_screen()
update_screen()
time.sleep(0.1)
|
# coding:utf8
"""
Created on 2013-7-10
memcached client
@author: lan (www.9miao.com)
"""
import memcache
class MemConnError(Exception):
""" """
def __str__(self):
return "memcache connect error"
class MemClient:
"""memcached"""
def __init__(self, timeout=0):
""" """
self._hostname = ""
self._urls = []
self.connection = None
def connect(self, urls, hostname):
"""memcached connect"""
self._hostname = hostname
self._urls = urls
self.connection = memcache.Client(self._urls, debug=0)
if not self.connection.set("__testkey__", 1):
raise MemConnError()
def produceKey(self, keyname):
""" """
if isinstance(keyname, str):
return "".join([self._hostname, ":", keyname])
else:
raise "type error"
def get(self, key):
""" """
key = self.produceKey(key)
return self.connection.get(key)
def get_multi(self, keys):
""" """
keynamelist = [self.produceKey(keyname) for keyname in keys]
olddict = self.connection.get_multi(keynamelist)
newdict = dict(
list(
zip(
[keyname.split(":")[-1] for keyname in list(olddict.keys())],
list(olddict.values()),
)
)
)
return newdict
def set(self, keyname, value):
""" """
key = self.produceKey(keyname)
result = self.connection.set(key, value)
if not result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set(key, value)
return result
def set_multi(self, mapping):
""" """
newmapping = dict(
list(
zip(
[self.produceKey(keyname) for keyname in list(mapping.keys())],
list(mapping.values()),
)
)
)
result = self.connection.set_multi(newmapping)
if result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set_multi(newmapping)
return result
def incr(self, key, delta):
""" """
key = self.produceKey(key)
return self.connection.incr(key, delta)
def delete(self, key):
""" """
key = self.produceKey(key)
return self.connection.delete(key)
def delete_multi(self, keys):
""" """
keys = [self.produceKey(key) for key in keys]
return self.connection.delete_multi(keys)
def flush_all(self):
""" """
self.connection.flush_all()
mclient = MemClient()
|
""" Really basic gatttool (BlueZ) wrapper
Based on https://github.com/stratosinc/pygatt
Part of https://github.com/ALPSquid/thebutton-monitor
"""
import pexpect
class connect:
"""Use to initiate a connection to a GATT device
Example: bt_device = gatt.connect('AB:CD:EF:01:23:45')
"""
def __init__(self, address):
self.address = "" # Connected bluetooth device address. Assigned from connect()
self.conn = None # pexpect.spawn() object for the gatttool command
self.connect(address)
def connect(self, address, adapter="hci0"):
"""Open an interactive connection to a bluetooth device
:param address: Bluetooth device address
:param adapter: Bluetooth adapter to use. Default: hci0
"""
if self.conn is None:
self.address = address
cmd = " ".join(["gatttool", "-b", address, "-i", adapter, "-I"])
self.conn = pexpect.spawn(cmd)
self.conn.expect(r"\[LE\]>", timeout=1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Connected to " + address))
except pexpect.TIMEOUT:
raise Exception("Unable to connect to device")
else:
raise Exception(
"Device already connected! Call disconnect before attempting a new connection"
)
def reconnect(self):
"""Check and attempt to reconnect to device if necessary
:return: True if a reconnect was performed
"""
try:
self.conn.expect(r"Disconnected", timeout=0.1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Reconnected to device: " + self.address))
except pexpect.TIMEOUT:
# Continue and try to reconnect next time
print(("Lost connection to device: " + self.address))
return True
except pexpect.TIMEOUT:
# No need to reconnect
return False
def disconnect(self):
"""Disconnect from current bluetooth device"""
if self.conn is not None:
self.conn.sendline("exit")
self.conn = None
print(("Disconnected from " + self.address))
def write(self, handle, value):
"""Write a value to the specified handle
:param handle: address to write to. e.g. 0016
:param value: value to write
"""
self.send(" ".join(["char-write-cmd", "0x" + handle, value]))
def read(self, handle):
"""Read from the specified handle
:param handle: address to read from. e.g. 0016
"""
self.send("char-read-hnd 0x" + handle, r"descriptor: .* \r", timeout=5)
val = " ".join(self.conn.after.decode("utf-8").split()[1:])
return val
def send(self, cmd, expect=None, timeout=5):
"""Send command to device. Attempt a reconnect if disconnected
:param cmd: Command to send
"""
self.conn.sendline(cmd)
if expect is not None:
try:
self.conn.expect(expect, timeout)
except pexpect.TIMEOUT:
if self.reconnect():
self.conn.sendline(cmd)
else:
if self.reconnect():
self.conn.sendline(cmd)
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
("puput", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="blogpage",
name="description",
field=models.CharField(
max_length=255,
help_text="The blog description that will appear under the title.",
verbose_name="Description",
blank=True,
),
),
migrations.AlterField(
model_name="category",
name="description",
field=models.CharField(
max_length=500, verbose_name="Description", blank=True
),
),
migrations.AlterField(
model_name="category",
name="name",
field=models.CharField(
max_length=80, unique=True, verbose_name="Category name"
),
),
migrations.AlterField(
model_name="category",
name="parent",
field=models.ForeignKey(
to="puput.Category",
related_name="children",
null=True,
verbose_name="Parent category",
blank=True,
),
),
migrations.AlterField(
model_name="entrypage",
name="excerpt",
field=wagtail.wagtailcore.fields.RichTextField(
help_text="Entry excerpt to be displayed on entries list. If this field is not filled, a truncate version of body text will be used.",
verbose_name="excerpt",
blank=True,
),
),
]
|
"""
==================================
Map two radars to a Cartesian grid
==================================
Map the reflectivity field of two nearby ARM XSARP radars from antenna
coordinates to a Cartesian grid.
"""
print(__doc__)
# Author: Jonathan J. Helmus (jhelmus@anl.gov)
# License: BSD 3 clause
import matplotlib.pyplot as plt
import pyart
# read in the data from both XSAPR radars
XSAPR_SW_FILE = "swx_20120520_0641.nc"
XSAPR_SE_FILE = "sex_20120520_0641.nc"
radar_sw = pyart.io.read_cfradial(XSAPR_SW_FILE)
radar_se = pyart.io.read_cfradial(XSAPR_SE_FILE)
# filter out gates with reflectivity > 100 from both radars
gatefilter_se = pyart.filters.GateFilter(radar_se)
gatefilter_se.exclude_above("corrected_reflectivity_horizontal", 100)
gatefilter_sw = pyart.filters.GateFilter(radar_sw)
gatefilter_sw.exclude_above("corrected_reflectivity_horizontal", 100)
# perform Cartesian mapping, limit to the reflectivity field.
grid = pyart.map.grid_from_radars(
(radar_se, radar_sw),
gatefilters=(gatefilter_se, gatefilter_sw),
grid_shape=(1, 201, 201),
grid_limits=((1000, 1000), (-50000, 40000), (-60000, 40000)),
grid_origin=(36.57861, -97.363611),
fields=["corrected_reflectivity_horizontal"],
)
# create the plot
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(
grid.fields["corrected_reflectivity_horizontal"]["data"][0],
origin="lower",
extent=(-60, 40, -50, 40),
vmin=0,
vmax=48,
)
plt.show()
|
"""
pyart.exceptions
================
Custom Py-ART exceptions.
.. autosummary::
:toctree: generated/
MissingOptionalDependency
DeprecatedAttribute
DeprecatedFunctionName
_deprecated_alias
"""
import warnings
class MissingOptionalDependency(Exception):
"""Exception raised when a optional dependency is needed by not found."""
pass
class DeprecatedAttribute(DeprecationWarning):
"""Warning category for an attribute which has been renamed/moved."""
pass
class DeprecatedFunctionName(DeprecationWarning):
"""Warning category for a function which has been renamed/moved."""
pass
def _deprecated_alias(func, old_name, new_name):
"""
A function for creating an alias to a renamed or moved function.
Parameters
----------
func : func
The function which has been renamed or moved.
old_name, new_name : str
Name of the function before and after it was moved or renamed
(with namespace if changed).
Returns
-------
wrapper : func
A wrapper version of func, which issues a DeprecatedFunctionName
warning when the called.
"""
def wrapper(*args, **kwargs):
warnings.warn(
(
"{0} has been deprecated and will be removed in future "
+ "versions of Py-ART, pleases use {1}. "
).format(old_name, new_name),
category=DeprecatedFunctionName,
)
return func(*args, **kwargs)
return wrapper
|
'\npyart.io.nexrad_archive\n=======================\n\nFunctions for reading NEXRAD Level II Archive files.\n\n.. autosummary::\n :toctree: generated/\n :template: dev_template.rst\n\n _NEXRADLevel2StagedField\n\n.. autosummary::\n :toctree: generated/\n\n read_nexrad_archive\n _find_range_params\n _find_scans_to_interp\n _interpolate_scan\n\n'
import warnings
import numpy as np
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .nexrad_level2 import NEXRADLevel2File
from ..lazydict import LazyLoadDict
from .nexrad_common import get_nexrad_location
def read_nexrad_archive(filename, field_names=None, additional_metadata=None, file_field_names=False, exclude_fields=None, delay_field_loading=False, station=None, scans=None, linear_interp=True, **kwargs):
"\n Read a NEXRAD Level 2 Archive file.\n\n Parameters\n ----------\n filename : str\n Filename of NEXRAD Level 2 Archive file. The files hosted by\n at the NOAA National Climate Data Center [1]_ as well as on the\n UCAR THREDDS Data Server [2]_ have been tested. Other NEXRAD\n Level 2 Archive files may or may not work. Message type 1 file\n and message type 31 files are supported.\n field_names : dict, optional\n Dictionary mapping NEXRAD moments to radar field names. If a\n data type found in the file does not appear in this dictionary or has\n a value of None it will not be placed in the radar.fields dictionary.\n A value of None, the default, will use the mapping defined in the\n metadata configuration file.\n additional_metadata : dict of dicts, optional\n Dictionary of dictionaries to retrieve metadata from during this read.\n This metadata is not used during any successive file reads unless\n explicitly included. A value of None, the default, will not\n introduct any addition metadata and the file specific or default\n metadata as specified by the metadata configuration file will be used.\n file_field_names : bool, optional\n True to use the NEXRAD field names for the field names. If this\n case the field_names parameter is ignored. The field dictionary will\n likely only have a 'data' key, unless the fields are defined in\n `additional_metadata`.\n exclude_fields : list or None, optional\n List of fields to exclude from the radar object. This is applied\n after the `file_field_names` and `field_names` parameters.\n delay_field_loading : bool, optional\n True to delay loading of field data from the file until the 'data'\n key in a particular field dictionary is accessed. In this case\n the field attribute of the returned Radar object will contain\n LazyLoadDict objects not dict objects.\n station : str or None, optional\n Four letter ICAO name of the NEXRAD station used to determine the\n location in the returned radar object. This parameter is only\n used when the location is not contained in the file, which occur\n in older NEXRAD message 1 files.\n scans : list or None, optional\n Read only specified scans from the file. None (the default) will read\n all scans.\n linear_interp : bool, optional\n True (the default) to perform linear interpolation between valid pairs\n of gates in low resolution rays in files mixed resolution rays.\n False will perform a nearest neighbor interpolation. This parameter is\n not used if the resolution of all rays in the file or requested sweeps\n is constant.\n\n Returns\n -------\n radar : Radar\n Radar object containing all moments and sweeps/cuts in the volume.\n Gates not collected are masked in the field data.\n\n References\n ----------\n .. [1] http://www.ncdc.noaa.gov/\n .. [2] http://thredds.ucar.edu/thredds/catalog.html\n\n "
(kwargs)
filemetadata = ('nexrad_archive', field_names, additional_metadata, file_field_names, exclude_fields)
nfile = ((filename))
scan_info = (scans)
time = ('time')
(time_start, _time) = (scans)
time['data'] = _time
time['units'] = (time_start)
_range = ('range')
(first_gate, gate_spacing, last_gate) = (scan_info, filemetadata)
_range['data'] = (first_gate, last_gate, gate_spacing, 'float32')
_range['meters_to_center_of_first_gate'] = (first_gate)
_range['meters_between_gates'] = (gate_spacing)
metadata = ('metadata')
metadata['original_container'] = 'NEXRAD Level II'
scan_type = 'ppi'
latitude = ('latitude')
longitude = ('longitude')
altitude = ('altitude')
if ((nfile._msg_type == '1') and (station is not None)):
(lat, lon, alt) = (station)
else:
(lat, lon, alt) = ()
latitude['data'] = ([lat])
longitude['data'] = ([lon])
altitude['data'] = ([alt])
sweep_number = ('sweep_number')
sweep_mode = ('sweep_mode')
sweep_start_ray_index = ('sweep_start_ray_index')
sweep_end_ray_index = ('sweep_end_ray_index')
if (scans is None):
nsweeps = (nfile.nscans)
else:
nsweeps = (scans)
sweep_number['data'] = (nsweeps)
sweep_mode['data'] = ((nsweeps * ['azimuth_surveillance']))
rays_per_scan = [s['nrays'] for s in scan_info]
sweep_end_ray_index['data'] = ((rays_per_scan) - 1)
(0, 0)
sweep_start_ray_index['data'] = (rays_per_scan[:(- 1)])
azimuth = ('azimuth')
elevation = ('elevation')
fixed_angle = ('fixed_angle')
azimuth['data'] = (scans)
elevation['data'] = ('float32')
fixed_angle['data'] = (scans)
max_ngates = (_range['data'])
available_moments = ([m for scan in scan_info for m in scan['moments']])
interpolate = (scan_info, first_gate, gate_spacing, filemetadata)
fields = {}
for moment in available_moments:
field_name = (moment)
if (field_name is None):
continue
dic = (field_name)
dic['_FillValue'] = ()
if (delay_field_loading and (moment not in interpolate)):
dic = (dic)
data_call = (nfile, moment, max_ngates, scans)
('data', data_call)
else:
mdata = (moment, max_ngates)
if (moment in interpolate):
interp_scans = interpolate[moment]
(('Gate spacing is not constant, interpolating data in ' + ('scans %s for moment %s.' % (interp_scans, moment))), UserWarning)
for scan in interp_scans:
idx = (moment)
moment_ngates = scan_info[scan]['ngates'][idx]
start = sweep_start_ray_index['data'][scan]
end = sweep_end_ray_index['data'][scan]
(mdata, start, end, moment_ngates, linear_interp)
dic['data'] = mdata
fields[field_name] = dic
nyquist_velocity = ('nyquist_velocity')
unambiguous_range = ('unambiguous_range')
nyquist_velocity['data'] = ('float32')
unambiguous_range['data'] = ('float32')
instrument_parameters = {'unambiguous_range': unambiguous_range, 'nyquist_velocity': nyquist_velocity}
()
return (time, _range, fields, metadata, scan_type, latitude, longitude, altitude, sweep_number, sweep_mode, fixed_angle, sweep_start_ray_index, sweep_end_ray_index, azimuth, elevation)
def _find_range_params(scan_info, filemetadata):
'Return range parameters, first_gate, gate_spacing, last_gate.'
min_first_gate = 999999
min_gate_spacing = 999999
max_last_gate = 0
for scan_params in scan_info:
ngates = scan_params['ngates'][0]
for (i, moment) in (scan_params['moments']):
if ((moment) is None):
continue
first_gate = scan_params['first_gate'][i]
gate_spacing = scan_params['gate_spacing'][i]
last_gate = (first_gate + (gate_spacing * (ngates - 0.5)))
min_first_gate = (min_first_gate, first_gate)
min_gate_spacing = (min_gate_spacing, gate_spacing)
max_last_gate = (max_last_gate, last_gate)
return (min_first_gate, min_gate_spacing, max_last_gate)
def _find_scans_to_interp(scan_info, first_gate, gate_spacing, filemetadata):
'Return a dict indicating what moments/scans need interpolation.'
moments = ([m for scan in scan_info for m in scan['moments']])
interpolate = ([(moment, []) for moment in moments])
for (scan_num, scan) in (scan_info):
for moment in moments:
if (moment not in scan['moments']):
continue
if ((moment) is None):
continue
index = (moment)
first = scan['first_gate'][index]
spacing = scan['gate_spacing'][index]
if ((first != first_gate) or (spacing != gate_spacing)):
(scan_num)
if (not (spacing == (gate_spacing * 4))):
raise ()
if (not ((first_gate + (1.5 * gate_spacing)) == first)):
raise ()
interpolate = ([(k, v) for (k, v) in (()) if ((v) != 0)])
return interpolate
def _interpolate_scan(mdata, start, end, moment_ngates, linear_interp=True):
'Interpolate a single NEXRAD moment scan from 1000 m to 250 m.'
for ray_num in (start, (end + 1)):
ray = ()
interp_ngates = (4 * moment_ngates)
ray[:interp_ngates] = (ray[:moment_ngates], 4)
if linear_interp:
for i in (2, (interp_ngates - 4), 4):
gate_val = ray[i]
next_val = ray[(i + 4)]
if ((gate_val) or (next_val)):
continue
delta = ((next_val - gate_val) / 4.0)
ray[(i + 0)] = (gate_val + (delta * 0.5))
ray[(i + 1)] = (gate_val + (delta * 1.5))
ray[(i + 2)] = (gate_val + (delta * 2.5))
ray[(i + 3)] = (gate_val + (delta * 3.5))
mdata[ray_num] = ray[:]
class _NEXRADLevel2StagedField(object):
'\n A class to facilitate on demand loading of field data from a Level 2 file.\n '
def __init__(self, nfile, moment, max_ngates, scans):
'initialize.'
self.nfile = nfile
self.moment = moment
self.max_ngates = max_ngates
self.scans = scans
def __call__(self):
'Return the array containing the field data.'
return (self.moment, self.max_ngates) |
"""
pyart.io.uf
===========
Reading of Universal format (UF) files
.. autosummary::
:toctree: generated/
read_uf
_get_instrument_parameters
"""
import warnings
import numpy as np
from netCDF4 import date2num
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .uffile import UFFile
_LIGHT_SPEED = 2.99792458e8 # speed of light in meters per second
_UF_SWEEP_MODES = {
0: "calibration",
1: "ppi",
2: "coplane",
3: "rhi",
4: "vpt",
5: "target",
6: "manual",
7: "idle",
}
_SWEEP_MODE_STR = {
"calibration": "calibration",
"ppi": "azimuth_surveillance",
"coplane": "coplane",
"rhi": "rhi",
"vpt": "vertical_pointing",
"target": "pointing",
"manual": "manual",
"idle": "idle",
}
def read_uf(
filename,
field_names=None,
additional_metadata=None,
file_field_names=False,
exclude_fields=None,
delay_field_loading=False,
**kwargs
):
"""
Read a UF File.
Parameters
----------
filename : str or file-like
Name of Universal format file to read data from.
field_names : dict, optional
Dictionary mapping UF data type names to radar field names. If a
data type found in the file does not appear in this dictionary or has
a value of None it will not be placed in the radar.fields dictionary.
A value of None, the default, will use the mapping defined in the
Py-ART configuration file.
additional_metadata : dict of dicts, optional
Dictionary of dictionaries to retrieve metadata from during this read.
This metadata is not used during any successive file reads unless
explicitly included. A value of None, the default, will not
introduce any addition metadata and the file specific or default
metadata as specified by the Py-ART configuration file will be used.
file_field_names : bool, optional
True to force the use of the field names from the file in which
case the `field_names` parameter is ignored. False will use to
`field_names` parameter to rename fields.
exclude_fields : list or None, optional
List of fields to exclude from the radar object. This is applied
after the `file_field_names` and `field_names` parameters.
delay_field_loading : bool
This option is not implemented in the function but included for
compatibility.
Returns
-------
radar : Radar
Radar object.
"""
# test for non empty kwargs
_test_arguments(kwargs)
# create metadata retrieval object
filemetadata = FileMetadata(
"uf", field_names, additional_metadata, file_field_names, exclude_fields
)
# Open UF file and get handle
ufile = UFFile(prepare_for_read(filename))
first_ray = ufile.rays[0]
# time
dts = ufile.get_datetimes()
units = make_time_unit_str(min(dts))
time = filemetadata("time")
time["units"] = units
time["data"] = date2num(dts, units).astype("float32")
# range
_range = filemetadata("range")
# assume that the number of gates and spacing from the first ray is
# representative of the entire volume
field_header = first_ray.field_headers[0]
ngates = field_header["nbins"]
step = field_header["range_spacing_m"]
# this gives distances to the center of each gate, remove step/2 for start
start = (
field_header["range_start_km"] * 1000.0
+ field_header["range_start_m"]
+ step / 2.0
)
_range["data"] = np.arange(ngates, dtype="float32") * step + start
_range["meters_to_center_of_first_gate"] = start
_range["meters_between_gates"] = step
# latitude, longitude and altitude
latitude = filemetadata("latitude")
longitude = filemetadata("longitude")
altitude = filemetadata("altitude")
lat, lon, height = first_ray.get_location()
latitude["data"] = np.array([lat], dtype="float64")
longitude["data"] = np.array([lon], dtype="float64")
altitude["data"] = np.array([height], dtype="float64")
# metadata
metadata = filemetadata("metadata")
metadata["original_container"] = "UF"
metadata["site_name"] = first_ray.mandatory_header["site_name"]
metadata["radar_name"] = first_ray.mandatory_header["radar_name"]
# sweep_start_ray_index, sweep_end_ray_index
sweep_start_ray_index = filemetadata("sweep_start_ray_index")
sweep_end_ray_index = filemetadata("sweep_end_ray_index")
sweep_start_ray_index["data"] = ufile.first_ray_in_sweep
sweep_end_ray_index["data"] = ufile.last_ray_in_sweep
# sweep number
sweep_number = filemetadata("sweep_number")
sweep_number["data"] = np.arange(ufile.nsweeps, dtype="int32")
# sweep_type
scan_type = _UF_SWEEP_MODES[first_ray.mandatory_header["sweep_mode"]]
# sweep_mode
sweep_mode = filemetadata("sweep_mode")
sweep_mode["data"] = np.array(
ufile.nsweeps * [_SWEEP_MODE_STR[scan_type]], dtype="S"
)
# elevation
elevation = filemetadata("elevation")
elevation["data"] = ufile.get_elevations()
# azimuth
azimuth = filemetadata("azimuth")
azimuth["data"] = ufile.get_azimuths()
# fixed_angle
fixed_angle = filemetadata("fixed_angle")
fixed_angle["data"] = ufile.get_sweep_fixed_angles()
# fields
fields = {}
for uf_field_number, uf_field_dic in enumerate(first_ray.field_positions):
uf_field_name = uf_field_dic["data_type"].decode("ascii")
field_name = filemetadata.get_field_name(uf_field_name)
if field_name is None:
continue
field_dic = filemetadata(field_name)
field_dic["data"] = ufile.get_field_data(uf_field_number)
field_dic["_FillValue"] = get_fillvalue()
fields[field_name] = field_dic
# instrument_parameters
instrument_parameters = _get_instrument_parameters(ufile, filemetadata)
# scan rate
scan_rate = filemetadata("scan_rate")
scan_rate["data"] = ufile.get_sweep_rates()
ufile.close()
return Radar(
time,
_range,
fields,
metadata,
scan_type,
latitude,
longitude,
altitude,
sweep_number,
sweep_mode,
fixed_angle,
sweep_start_ray_index,
sweep_end_ray_index,
azimuth,
elevation,
scan_rate=scan_rate,
instrument_parameters=instrument_parameters,
)
def _get_instrument_parameters(ufile, filemetadata):
"""Return a dictionary containing instrument parameters."""
# pulse width
pulse_width = filemetadata("pulse_width")
pulse_width["data"] = ufile.get_pulse_widths() / _LIGHT_SPEED # m->sec
# assume that the parameters in the first ray represent the beam widths,
# bandwidth and frequency in the entire volume
first_ray = ufile.rays[0]
field_header = first_ray.field_headers[0]
beam_width_h = field_header["beam_width_h"] / 64.0
beam_width_v = field_header["beam_width_v"] / 64.0
bandwidth = field_header["bandwidth"] / 16.0 * 1.0e6
wavelength_cm = field_header["wavelength_cm"] / 64.0
if wavelength_cm == 0:
warnings.warn("Invalid wavelength, frequency set to default value.")
wavelength_hz = 9999.0
else:
wavelength_hz = _LIGHT_SPEED / (wavelength_cm / 100.0)
# radar_beam_width_h
radar_beam_width_h = filemetadata("radar_beam_width_h")
radar_beam_width_h["data"] = np.array([beam_width_h], dtype="float32")
# radar_beam_width_v
radar_beam_width_v = filemetadata("radar_beam_width_w")
radar_beam_width_v["data"] = np.array([beam_width_v], dtype="float32")
# radar_receiver_bandwidth
radar_receiver_bandwidth = filemetadata("radar_receiver_bandwidth")
radar_receiver_bandwidth["data"] = np.array([bandwidth], dtype="float32")
# polarization_mode
polarization_mode = filemetadata("polarization_mode")
polarization_mode["data"] = ufile.get_sweep_polarizations()
# frequency
frequency = filemetadata("frequency")
frequency["data"] = np.array([wavelength_hz], dtype="float32")
# prt
prt = filemetadata("prt")
prt["data"] = ufile.get_prts() / 1e6 # us->sec
instrument_parameters = {
"pulse_width": pulse_width,
"radar_beam_width_h": radar_beam_width_h,
"radar_beam_width_v": radar_beam_width_v,
"radar_receiver_bandwidth": radar_receiver_bandwidth,
"polarization_mode": polarization_mode,
"frequency": frequency,
"prt": prt,
}
# nyquist velocity if defined
nyquist_velocity = filemetadata("nyquist_velocity")
nyquist_velocity["data"] = ufile.get_nyquists()
if nyquist_velocity["data"] is not None:
instrument_parameters["nyquist_velocity"] = nyquist_velocity
return instrument_parameters
|
#! /usr/bin/env python
"""
Make a small netCDF CF/Radial file containing a single RHI scan.
Single field and scan is converted from sigmet file XSW110520113537.RAW7HHL
"""
import pyart
radar = pyart.io.read_rsl("XSW110520113537.RAW7HHL")
time_slice = slice(None, 713, 18)
range_slice = slice(None, None, 12)
sweep_slice = slice(None, 1)
# remove all but the reflectivity_horizontal fields
rf_field = radar.fields["reflectivity"]
rf_data = rf_field["data"]
rf_field["data"] = rf_data[time_slice, range_slice]
radar.fields = {"reflectivity_horizontal": rf_field}
radar.nsweeps = 1
radar.nray = 40
radar.ngates = 45
# truncate the range based variables
radar.range["data"] = radar.range["data"][range_slice]
# truncate the time based variables
radar.time["data"] = radar.time["data"][time_slice]
radar.azimuth["data"] = radar.azimuth["data"][time_slice]
radar.elevation["data"] = radar.elevation["data"][time_slice]
radar.instrument_parameters["prt"]["data"] = radar.instrument_parameters["prt"]["data"][
time_slice
]
radar.instrument_parameters["unambiguous_range"]["data"] = radar.instrument_parameters[
"unambiguous_range"
]["data"][time_slice]
radar.instrument_parameters["nyquist_velocity"]["data"] = radar.instrument_parameters[
"nyquist_velocity"
]["data"][time_slice]
# truncate the sweep based variables
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.fixed_angle["data"] = radar.fixed_angle["data"][sweep_slice]
radar.sweep_start_ray_index["data"] = radar.sweep_start_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"] = radar.sweep_end_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"][0] = 39
radar.sweep_mode["data"] = radar.sweep_mode["data"][sweep_slice]
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.instrument_parameters["prt_mode"]["data"] = radar.instrument_parameters[
"prt_mode"
]["data"][sweep_slice]
# adjust metadata
radar.metadata = {
"Conventions": "CF/Radial instrument_parameters",
"version": "1.2",
"title": "Py-ART Example RHI CF/Radial file",
"institution": (
"United States Department of Energy - Atmospheric "
"Radiation Measurement (ARM) program"
),
"references": "none",
"source": "ARM SGP XSAPR Radar",
"history": "created by jhelmus on evs348532 at 2013-05-22T12:34:56",
"comment": "none",
"instrument_name": "xsapr-sgp",
}
pyart.io.write_cfradial("example_cfradial_rhi.nc", radar)
|
"""
pyart.util.radar_utils
======================
Functions for working radar instances.
.. autosummary::
:toctree: generated/
is_vpt
to_vpt
join_radar
"""
import copy
import numpy as np
from netCDF4 import num2date, date2num
from . import datetime_utils
def is_vpt(radar, offset=0.5):
"""
Determine if a Radar appears to be a vertical pointing scan.
This function only verifies that the object is a vertical pointing scan,
use the :py:func:`to_vpt` function to convert the radar to a vpt scan
if this function returns True.
Parameters
----------
radar : Radar
Radar object to determine if
offset : float
Maximum offset of the elevation from 90 degrees to still consider
to be vertically pointing.
Returns
-------
flag : bool
True if the radar appear to be verticle pointing, False if not.
"""
# check that the elevation is within offset of 90 degrees.
elev = radar.elevation["data"]
return np.all((elev < 90.0 + offset) & (elev > 90.0 - offset))
def to_vpt(radar, single_scan=True):
"""
Convert an existing Radar object to represent a vertical pointing scan.
This function does not verify that the Radar object contains a vertical
pointing scan. To perform such a check use :py:func:`is_vpt`.
Parameters
----------
radar : Radar
Mislabeled vertical pointing scan Radar object to convert to be
properly labeled. This object is converted in place, no copy of
the existing data is made.
single_scan : bool, optional
True to convert the volume to a single scan, any azimuth angle data
is lost. False will convert the scan to contain the same number of
scans as rays, azimuth angles are retained.
"""
if single_scan:
nsweeps = 1
radar.azimuth["data"][:] = 0.0
seri = np.array([radar.nrays - 1], dtype="int32")
radar.sweep_end_ray_index["data"] = seri
else:
nsweeps = radar.nrays
# radar.azimuth not adjusted
radar.sweep_end_ray_index["data"] = np.arange(nsweeps, dtype="int32")
radar.scan_type = "vpt"
radar.nsweeps = nsweeps
radar.target_scan_rate = None # no scanning
radar.elevation["data"][:] = 90.0
radar.sweep_number["data"] = np.arange(nsweeps, dtype="int32")
radar.sweep_mode["data"] = np.array(["vertical_pointing"] * nsweeps)
radar.fixed_angle["data"] = np.ones(nsweeps, dtype="float32") * 90.0
radar.sweep_start_ray_index["data"] = np.arange(nsweeps, dtype="int32")
if radar.instrument_parameters is not None:
for key in ["prt_mode", "follow_mode", "polarization_mode"]:
if key in radar.instrument_parameters:
ip_dic = radar.instrument_parameters[key]
ip_dic["data"] = np.array([ip_dic["data"][0]] * nsweeps)
# Attributes that do not need any changes
# radar.altitude
# radar.altitude_agl
# radar.latitude
# radar.longitude
# radar.range
# radar.ngates
# radar.nrays
# radar.metadata
# radar.radar_calibration
# radar.time
# radar.fields
# radar.antenna_transition
# radar.scan_rate
return
def join_radar(radar1, radar2):
"""
Combine two radar instances into one.
Parameters
----------
radar1 : Radar
Radar object.
radar2 : Radar
Radar object.
"""
# must have same gate spacing
new_radar = copy.deepcopy(radar1)
new_radar.azimuth["data"] = np.append(
radar1.azimuth["data"], radar2.azimuth["data"]
)
new_radar.elevation["data"] = np.append(
radar1.elevation["data"], radar2.elevation["data"]
)
if len(radar1.range["data"]) >= len(radar2.range["data"]):
new_radar.range["data"] = radar1.range["data"]
else:
new_radar.range["data"] = radar2.range["data"]
# to combine times we need to reference them to a standard
# for this we'll use epoch time
estring = "seconds since 1970-01-01T00:00:00Z"
r1dt = num2date(radar1.time["data"], radar1.time["units"])
r2dt = num2date(radar2.time["data"], radar2.time["units"])
r1num = datetime_utils.datetimes_from_radar(radar1, epoch=True)
r2num = datetime_utils.datetimes_from_radar(radar2, epoch=True)
new_radar.time["data"] = np.append(r1num, r2num)
new_radar.time["units"] = datetime_utils.EPOCH_UNITS
for var in list(new_radar.fields.keys()):
sh1 = radar1.fields[var]["data"].shape
sh2 = radar2.fields[var]["data"].shape
new_field = np.ma.zeros([sh1[0] + sh2[0], max([sh1[1], sh2[1]])]) - 9999.0
new_field[0 : sh1[0], 0 : sh1[1]] = radar1.fields[var]["data"]
new_field[sh1[0] :, 0 : sh2[1]] = radar2.fields[var]["data"]
new_radar.fields[var]["data"] = new_field
# radar locations
# TODO moving platforms - any more?
if (
len(radar1.latitude["data"])
== 1 & len(radar2.latitude["data"])
== 1 & len(radar1.longitude["data"])
== 1 & len(radar2.longitude["data"])
== 1 & len(radar1.altitude["data"])
== 1 & len(radar2.altitude["data"])
== 1
):
lat1 = float(radar1.latitude["data"])
lon1 = float(radar1.longitude["data"])
alt1 = float(radar1.altitude["data"])
lat2 = float(radar2.latitude["data"])
lon2 = float(radar2.longitude["data"])
alt2 = float(radar2.altitude["data"])
if (lat1 != lat2) or (lon1 != lon2) or (alt1 != alt2):
ones1 = np.ones(len(radar1.time["data"]), dtype="float32")
ones2 = np.ones(len(radar2.time["data"]), dtype="float32")
new_radar.latitude["data"] = np.append(ones1 * lat1, ones2 * lat2)
new_radar.longitude["data"] = np.append(ones1 * lon1, ones2 * lon2)
new_radar.latitude["data"] = np.append(ones1 * alt1, ones2 * alt2)
else:
new_radar.latitude["data"] = radar1.latitude["data"]
new_radar.longitude["data"] = radar1.longitude["data"]
new_radar.altitude["data"] = radar1.altitude["data"]
else:
new_radar.latitude["data"] = np.append(
radar1.latitude["data"], radar2.latitude["data"]
)
new_radar.longitude["data"] = np.append(
radar1.longitude["data"], radar2.longitude["data"]
)
new_radar.altitude["data"] = np.append(
radar1.altitude["data"], radar2.altitude["data"]
)
return new_radar
|
"""
Default config for Workload Automation. DO NOT MODIFY this file. This file
gets copied to ~/.workload_automation/config.py on initial run of run_workloads.
Add your configuration to that file instead.
"""
# *** WARNING: ***
# Configuration listed in this file is NOT COMPLETE. This file sets the default
# configuration for WA and gives EXAMPLES of other configuration available. It
# is not supposed to be an exhaustive list.
# PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE
# EXTENSIONS AND THEIR CONFIGURATION.
# This defines when the device will be rebooted during Workload Automation execution. #
# #
# Valid policies are: #
# never: The device will never be rebooted. #
# as_needed: The device will only be rebooted if the need arises (e.g. if it #
# becomes unresponsive #
# initial: The device will be rebooted when the execution first starts, just before executing #
# the first workload spec. #
# each_spec: The device will be rebooted before running a new workload spec. #
# each_iteration: The device will be rebooted before each new iteration. #
# #
reboot_policy = "as_needed"
# Defines the order in which the agenda spec will be executed. At the moment, #
# the following execution orders are supported: #
# #
# by_iteration: The first iteration of each workload spec is executed one ofter the other, #
# so all workloads are executed before proceeding on to the second iteration. #
# This is the default if no order is explicitly specified. #
# If multiple sections were specified, this will also split them up, so that specs #
# in the same section are further apart in the execution order. #
# by_section: Same as "by_iteration", but runn specs from the same section one after the other #
# by_spec: All iterations of the first spec are executed before moving on to the next #
# spec. This may also be specified as ``"classic"``, as this was the way #
# workloads were executed in earlier versions of WA. #
# random: Randomisizes the order in which specs run. #
execution_order = "by_iteration"
# This indicates when a job will be re-run.
# Possible values:
# OK: This iteration has completed and no errors have been detected
# PARTIAL: One or more instruments have failed (the iteration may still be running).
# FAILED: The workload itself has failed.
# ABORTED: The user interupted the workload
#
# If set to an empty list, a job will not be re-run ever.
retry_on_status = ["FAILED", "PARTIAL"]
# How many times a job will be re-run before giving up
max_retries = 3
####################################################################################################
######################################### Device Settings ##########################################
####################################################################################################
# Specify the device you want to run workload automation on. This must be a #
# string with the ID of the device. At the moment, only 'TC2' is supported. #
# #
device = "generic_android"
# Configuration options that will be passed onto the device. These are obviously device-specific, #
# so check the documentation for the particular device to find out which options and values are #
# valid. The settings listed below are common to all devices #
# #
device_config = dict(
# The name used by adb to identify the device. Use "adb devices" in bash to list
# the devices currently seen by adb.
# adb_name='10.109.173.2:5555',
# The directory on the device that WA will use to push files to
# working_directory='/sdcard/wa-working',
# This specifies the device's CPU cores. The order must match how they
# appear in cpufreq. The example below is for TC2.
# core_names = ['a7', 'a7', 'a7', 'a15', 'a15']
# Specifies cluster mapping for the device's cores.
# core_clusters = [0, 0, 0, 1, 1]
)
####################################################################################################
################################### Instrumention Configuration ####################################
####################################################################################################
# This defines the additionnal instrumentation that will be enabled during workload execution, #
# which in turn determines what additional data (such as /proc/interrupts content or Streamline #
# traces) will be available in the results directory. #
# #
instrumentation = [
# Records the time it took to run the workload
"execution_time",
# Collects /proc/interrupts before and after execution and does a diff.
"interrupts",
# Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
"cpufreq",
# Gets energy usage from the workload form HWMON devices
# NOTE: the hardware needs to have the right sensors in order for this to work
#'hwmon',
# Run perf in the background during workload execution and then collect the results. perf is a
# standard Linux performance analysis tool.
#'perf',
# Collect Streamline traces during workload execution. Streamline is part of DS-5
#'streamline',
# Collects traces by interacting with Ftrace Linux kernel internal tracer
#'trace-cmd',
# Obtains the power consumption of the target device's core measured by National Instruments
# Data Acquisition(DAQ) device.
#'daq',
# Collects CCI counter data.
#'cci_pmu_logger',
# Collects FPS (Frames Per Second) and related metrics (such as jank) from
# the View of the workload (Note: only a single View per workload is
# supported at the moment, so this is mainly useful for games).
#'fps',
]
####################################################################################################
################################# Result Processors Configuration ##################################
####################################################################################################
# Specifies how results will be processed and presented. #
# #
result_processors = [
# Creates a status.txt that provides a summary status for the run
"status",
# Creates a results.txt file for each iteration that lists all collected metrics
# in "name = value (units)" format
"standard",
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the .csv format.
"csv",
# Creates a summary.csv that contains summary metrics for all iterations of all
# all in the .csv format. Summary metrics are defined on per-worklod basis
# are typically things like overall scores. The contents of summary.csv are
# always a subset of the contents of results.csv (if it is generated).
#'summary_csv',
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the JSON format
#'json',
# Write results to an sqlite3 database. By default, a new database will be
# generated for each run, however it is possible to specify a path to an
# existing DB file (see result processor configuration below), in which
# case results from multiple runs may be stored in the one file.
#'sqlite',
]
####################################################################################################
################################### Logging output Configuration ###################################
####################################################################################################
# Specify the format of logging messages. The format uses the old formatting syntax: #
# #
# http://docs.python.org/2/library/stdtypes.html#string-formatting-operations #
# #
# The attributes that can be used in formats are listested here: #
# #
# http://docs.python.org/2/library/logging.html#logrecord-attributes #
# #
logging = {
# Log file format
"file format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Verbose console output format
"verbose format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Regular console output format
"regular format": "%(levelname)-8s %(message)s",
# Colouring the console output
"colour_enabled": True,
}
####################################################################################################
#################################### Instruments Configuration #####################################
####################################################################################################
# Instrumention Configuration is related to specific insturment's settings. Some of the #
# instrumentations require specific settings in order for them to work. These settings are #
# specified here. #
# Note that these settings only take effect if the corresponding instrument is
# enabled above.
####################################################################################################
######################################## perf configuration ########################################
# The hardware events such as instructions executed, cache-misses suffered, or branches
# mispredicted to be reported by perf. Events can be obtained from the device by tpying
# 'perf list'.
# perf_events = ['migrations', 'cs']
# The perf options which can be obtained from man page for perf-record
# perf_options = '-a -i'
####################################################################################################
####################################### hwmon configuration ########################################
# The kinds of sensors hwmon instrument will look for
# hwmon_sensors = ['energy', 'temp']
####################################################################################################
###################################### trace-cmd configuration #####################################
# trace-cmd events to be traced. The events can be found by rooting on the device then type
# 'trace-cmd list -e'
# trace_events = ['power*']
####################################################################################################
######################################### DAQ configuration ########################################
# The host address of the machine that runs the daq Server which the insturment communicates with
# daq_server_host = '10.1.17.56'
# The port number for daq Server in which daq insturment communicates with
# daq_server_port = 56788
# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
# daq_resistor_values = [0.002, 0.002]
####################################################################################################
################################### cci_pmu_logger configuration ###################################
# The events to be counted by PMU
# NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400)
# cci_pmu_events = ['0x63', '0x83']
# The name of the events which will be used when reporting PMU counts
# cci_pmu_event_labels = ['event_0x63', 'event_0x83']
# The period (in jiffies) between counter reads
# cci_pmu_period = 15
####################################################################################################
################################### fps configuration ##############################################
# Data points below this FPS will dropped as not constituting "real" gameplay. The assumption
# being that while actually running, the FPS in the game will not drop below X frames per second,
# except on loading screens, menus, etc, which should not contribute to FPS calculation.
# fps_drop_threshold=5
# If set to True, this will keep the raw dumpsys output in the results directory (this is maily
# used for debugging). Note: frames.csv with collected frames data will always be generated
# regardless of this setting.
# fps_keep_raw=False
####################################################################################################
################################# Result Processor Configuration ###################################
####################################################################################################
# Specifies an alternative database to store results in. If the file does not
# exist, it will be created (the directiory of the file must exist however). If
# the file does exist, the results will be added to the existing data set (each
# run as a UUID, so results won't clash even if identical agendas were used).
# Note that in order for this to work, the version of the schema used to generate
# the DB file must match that of the schema used for the current run. Please
# see "What's new" secition in WA docs to check if the schema has changed in
# recent releases of WA.
# sqlite_database = '/work/results/myresults.sqlite'
# If the file specified by sqlite_database exists, setting this to True will
# cause that file to be overwritten rather than updated -- existing results in
# the file will be lost.
# sqlite_overwrite = False
# distribution: internal
####################################################################################################
#################################### Resource Getter configuration #################################
####################################################################################################
# The location on your system where /arm/scratch is mounted. Used by
# Scratch resource getter.
# scratch_mount_point = '/arm/scratch'
# end distribution
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Original implementation by Rene de Jong. Updated by Sascha Bischoff.
import logging
from wlauto import LinuxDevice, Parameter
from wlauto.common.gem5.device import BaseGem5Device
from wlauto.utils import types
class Gem5LinuxDevice(BaseGem5Device, LinuxDevice):
"""
Implements gem5 Linux device.
This class allows a user to connect WA to a simulation using gem5. The
connection to the device is made using the telnet connection of the
simulator, and is used for all commands. The simulator does not have ADB
support, and therefore we need to fall back to using standard shell
commands.
Files are copied into the simulation using a VirtIO 9P device in gem5. Files
are copied out of the simulated environment using the m5 writefile command
within the simulated system.
When starting the workload run, the simulator is automatically started by
Workload Automation, and a connection to the simulator is established. WA
will then wait for Android to boot on the simulated system (which can take
hours), prior to executing any other commands on the device. It is also
possible to resume from a checkpoint when starting the simulation. To do
this, please append the relevant checkpoint commands from the gem5
simulation script to the gem5_discription argument in the agenda.
Host system requirements:
* VirtIO support. We rely on diod on the host system. This can be
installed on ubuntu using the following command:
sudo apt-get install diod
Guest requirements:
* VirtIO support. We rely on VirtIO to move files into the simulation.
Please make sure that the following are set in the kernel
configuration:
CONFIG_NET_9P=y
CONFIG_NET_9P_VIRTIO=y
CONFIG_9P_FS=y
CONFIG_9P_FS_POSIX_ACL=y
CONFIG_9P_FS_SECURITY=y
CONFIG_VIRTIO_BLK=y
* m5 binary. Please make sure that the m5 binary is on the device and
can by found in the path.
"""
name = "gem5_linux"
platform = "linux"
parameters = [
Parameter("core_names", default=[], override=True),
Parameter("core_clusters", default=[], override=True),
Parameter(
"host",
default="localhost",
override=True,
description="Host name or IP address for the device.",
),
Parameter(
"login_prompt",
kind=types.list_of_strs,
default=["login:", "AEL login:", "username:"],
mandatory=False,
),
Parameter(
"login_password_prompt",
kind=types.list_of_strs,
default=["password:"],
mandatory=False,
),
]
# Overwritten from Device. For documentation, see corresponding method in
# Device.
def __init__(self, **kwargs):
self.logger = logging.getLogger("Gem5LinuxDevice")
LinuxDevice.__init__(self, **kwargs)
BaseGem5Device.__init__(self)
def login_to_device(self):
# Wait for the login prompt
prompt = self.login_prompt + [self.sckt.UNIQUE_PROMPT]
i = self.sckt.expect(prompt, timeout=10)
# Check if we are already at a prompt, or if we need to log in.
if i < len(prompt) - 1:
self.sckt.sendline("{}".format(self.username))
password_prompt = self.login_password_prompt + [
r"# ",
self.sckt.UNIQUE_PROMPT,
]
j = self.sckt.expect(password_prompt, timeout=self.delay)
if j < len(password_prompt) - 2:
self.sckt.sendline("{}".format(self.password))
self.sckt.expect([r"# ", self.sckt.UNIQUE_PROMPT], timeout=self.delay)
def capture_screen(self, filepath):
if BaseGem5Device.capture_screen(self, filepath):
return
# If we didn't manage to do the above, call the parent class.
self.logger.warning(
"capture_screen: falling back to parent class implementation"
)
LinuxDevice.capture_screen(self, filepath)
def initialize(self, context):
self.resize_shell()
self.deploy_m5(context, force=False)
|
"""Louie version information."""
NAME = "Louie"
DESCRIPTION = "Signal dispatching mechanism"
VERSION = "1.1"
|
import os
import sqlite3
import json
import uuid
from datetime import datetime, timedelta
from contextlib import contextmanager
from wlauto import ResultProcessor, settings, Parameter
from wlauto.exceptions import ResultProcessorError
from wlauto.utils.types import boolean
SCHEMA_VERSION = '0.0.2'
SCHEMA = ['CREATE TABLE runs (\n uuid text,\n start_time datetime,\n end_time datetime,\n duration integer\n )', 'CREATE TABLE workload_specs (\n id text,\n run_oid text,\n number_of_iterations integer,\n label text,\n workload_name text,\n boot_parameters text,\n runtime_parameters text,\n workload_parameters text\n )', 'CREATE TABLE metrics (\n spec_oid int,\n iteration integer,\n metric text,\n value text,\n units text,\n lower_is_better integer\n )', 'CREATE VIEW results AS\n SELECT uuid as run_uuid, spec_id, label as workload, iteration, metric, value, units, lower_is_better\n FROM metrics AS m INNER JOIN (\n SELECT ws.OID as spec_oid, ws.id as spec_id, uuid, label\n FROM workload_specs AS ws INNER JOIN runs AS r ON ws.run_oid = r.OID\n ) AS wsr ON wsr.spec_oid = m.spec_oid\n ', 'CREATE TABLE __meta (\n schema_version text\n )', (SCHEMA_VERSION)]
(datetime, (lambda x: ()))
(timedelta, (lambda x: ()))
(uuid.UUID, str)
class SqliteResultProcessor(ResultProcessor):
name = 'sqlite'
description = '\n Stores results in an sqlite database.\n\n This may be used accumulate results of multiple runs in a single file.\n\n '
name = 'sqlite'
parameters = [('database'), ('overwrite')]
def initialize(self, context):
self._last_spec = None
self._run_oid = None
self._spec_oid = None
if (not (self.database)):
()
elif self.overwrite:
(self.database)
()
else:
()
(context.run_info.uuid)
def process_iteration_result(self, result, context):
if (self._last_spec != context.spec):
(context.spec)
metrics = [(self._spec_oid, context.current_iteration, m.name, (m.value), m.units, (m.lower_is_better)) for m in result.metrics]
with () as conn:
('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)
def process_run_result(self, result, context):
info = context.run_info
with () as conn:
('UPDATE runs SET start_time=?, end_time=?, duration=?\n WHERE OID=?', (info.start_time, info.end_time, info.duration, self._run_oid))
def validate(self):
if (not self.database):
self.database = (settings.output_directory, 'results.sqlite')
self.database = ((self.database))
def _initdb(self):
with () as conn:
for command in SCHEMA:
(command)
def _validate_schema_version(self):
with () as conn:
try:
c = ('SELECT schema_version FROM __meta')
found_version = ()[0]
except sqlite3.OperationalError:
message = (self.database)
raise (message)
if (found_version != SCHEMA_VERSION):
message = 'Schema version in {} ({}) does not match current version ({}).'
raise ((self.database, found_version, SCHEMA_VERSION))
def _update_run(self, run_uuid):
with () as conn:
('INSERT INTO runs (uuid) VALUES (?)', (run_uuid,))
()
c = ('SELECT OID FROM runs WHERE uuid=?', (run_uuid,))
self._run_oid = ()[0]
def _update_spec(self, spec):
self._last_spec = spec
spec_tuple = (spec.id, self._run_oid, spec.number_of_iterations, spec.label, spec.workload_name, (spec.boot_parameters), (spec.runtime_parameters), (spec.workload_parameters))
with () as conn:
('INSERT INTO workload_specs VALUES (?,?,?,?,?,?,?,?)', spec_tuple)
()
c = ('SELECT OID FROM workload_specs WHERE run_oid=? AND id=?', (self._run_oid, spec.id))
self._spec_oid = ()[0]
@contextmanager
def _open_connecton(self):
conn = (self.database)
try:
(yield conn)
finally:
() |
# Copyright 2012-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=no-member
# pylint: disable=attribute-defined-outside-init
import os
import time
from wlauto import settings, Workload, Executable, Parameter
from wlauto.exceptions import ConfigError, WorkloadError
from wlauto.utils.types import boolean
TXT_RESULT_NAME = "cyclictest_result.txt"
RESULT_INTERPRETATION = {
"T": "Thread",
"P": "Priority",
"C": "Clock",
}
class Cyclictest(Workload):
name = "cyclictest"
description = """
Measures the amount of time that passes between when a timer expires and
when the thread which set the timer actually runs.
Cyclic test works by taking a time snapshot just prior to waiting for a specific
time interval (t1), then taking another time snapshot after the timer
finishes (t2), then comparing the theoretical wakeup time with the actual
wakeup time (t2 -(t1 + sleep_time)). This value is the latency for that
timers wakeup.
"""
parameters = [
Parameter(
"clock",
allowed_values=["monotonic", "realtime"],
default="realtime",
description=("specify the clock to be used during the test."),
),
Parameter(
"duration",
kind=int,
default=30,
description=("Specify the length for the test to run in seconds."),
),
Parameter(
"quiet",
kind=boolean,
default=True,
description=("Run the tests quiet and print only a summary on exit."),
),
Parameter(
"thread",
kind=int,
default=8,
description=("Set the number of test threads"),
),
Parameter(
"latency",
kind=int,
default=1000000,
description=("Write the value to /dev/cpu_dma_latency"),
),
Parameter(
"extra_parameters",
kind=str,
default="",
description=(
"Any additional command line parameters to append to the "
"existing parameters above. A list can be found at "
"https://rt.wiki.kernel.org/index.php/Cyclictest or "
"in the help page ``cyclictest -h``"
),
),
Parameter(
"clear_file_cache",
kind=boolean,
default=True,
description=("Clear file caches before starting test"),
),
Parameter(
"screen_off",
kind=boolean,
default=True,
description=(
"If true it will turn the screen off so that onscreen "
"graphics do not effect the score. This is predominantly "
"for devices without a GPU"
),
),
]
def setup(self, context):
self.cyclictest_on_device = "cyclictest"
self.cyclictest_result = os.path.join(
self.device.working_directory, TXT_RESULT_NAME
)
self.cyclictest_command = (
"{} --clock={} --duration={}s --thread={} --latency={} {} {} > {}"
)
self.device_binary = None
if not self.device.is_rooted:
raise WorkloadError(
"This workload requires a device with root premissions to run"
)
host_binary = context.resolver.get(
Executable(self, self.device.abi, "cyclictest")
)
self.device_binary = self.device.install(host_binary)
self.cyclictest_command = self.cyclictest_command.format(
self.device_binary,
0 if self.clock == "monotonic" else 1,
self.duration,
self.thread,
self.latency,
"--quiet" if self.quiet else "",
self.extra_parameters,
self.cyclictest_result,
)
if self.clear_file_cache:
self.device.execute("sync")
self.device.set_sysfile_value("/proc/sys/vm/drop_caches", 3)
if self.device.platform == "android":
if self.screen_off and self.device.is_screen_on:
self.device.execute("input keyevent 26")
def run(self, context):
self.device.execute(self.cyclictest_command, self.duration * 2, as_root=True)
def update_result(self, context):
self.device.pull_file(self.cyclictest_result, context.output_directory)
# Parsing the output
# Standard Cyclictest Output:
# T: 0 (31974) P:95 I:1000 C:4990 Min:9 Act:37 Avg:31 Max:59
with open(os.path.join(context.output_directory, TXT_RESULT_NAME)) as f:
for line in f:
if line.find("C:") is not -1:
# Key = T: 0 (31974) P:95 I:1000
# Remaing = 49990 Min:9 Act:37 Avg:31 Max:59
# sperator = C:
(key, sperator, remaing) = line.partition("C:")
index = key.find("T")
key = key.replace(key[index], RESULT_INTERPRETATION["T"])
index = key.find("P")
key = key.replace(key[index], RESULT_INTERPRETATION["P"])
index = sperator.find("C")
sperator = sperator.replace(
sperator[index], RESULT_INTERPRETATION["C"]
)
metrics = (sperator + remaing).split()
# metrics is now in the from of ['Min:', '9', 'Act:', '37', 'Avg:', '31' , 'Max', '59']
for i in range(0, len(metrics), 2):
full_key = key + " " + metrics[i][:-1]
value = int(metrics[i + 1])
context.result.add_metric(full_key, value, "microseconds")
def teardown(self, context):
if self.device.platform == "android":
if self.screen_off:
self.device.ensure_screen_is_on()
self.device.execute("rm -f {}".format(self.cyclictest_result))
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101,W0201
import os
import re
from collections import defaultdict
from wlauto import Workload, Parameter, File
from wlauto.utils.types import caseless_string
from wlauto.exceptions import WorkloadError
class Recentfling(Workload):
name = "recentfling"
description = """
Tests UI jank on android devices.
For this workload to work, ``recentfling.sh`` and ``defs.sh`` must be placed
in ``~/.workload_automation/dependencies/recentfling/``. These can be found
in the [AOSP Git repository](https://android.googlesource.com/platform/system/extras/+/master/tests/).
To change the apps that are opened at the start of the workload you will need
to modify the ``defs.sh`` file. You will need to add your app to ``dfltAppList``
and then add a variable called ``{app_name}Activity`` with the name of the
activity to launch (where ``{add_name}`` is the name you put into ``dfltAppList``).
You can get a list of activities available on your device by running
``adb shell pm list packages -f``
"""
supported_platforms = ["android"]
parameters = [
Parameter(
"loops", kind=int, default=3, description="The number of test iterations."
),
]
def initialise(self, context): # pylint: disable=no-self-use
if context.device.get_sdk_version() < 23:
raise WorkloadError(
"This workload relies on ``dumpsys gfxinfo`` \
only present in Android M and onwards"
)
def setup(self, context):
self.defs_host = context.resolver.get(File(self, "defs.sh"))
self.recentfling_host = context.resolver.get(File(self, "recentfling.sh"))
self.device.push_file(self.recentfling_host, self.device.working_directory)
self.device.push_file(self.defs_host, self.device.working_directory)
self._kill_recentfling()
self.device.ensure_screen_is_on()
def run(self, context):
cmd = "echo $$>{dir}/pidfile; exec {dir}/recentfling.sh -i {}; rm {dir}/pidfile"
cmd = cmd.format(self.loops, dir=self.device.working_directory)
try:
self.output = self.device.execute(cmd, timeout=120)
except KeyboardInterrupt:
self._kill_recentfling()
raise
def update_result(self, context):
group_names = [
"90th Percentile",
"95th Percentile",
"99th Percentile",
"Jank",
"Jank%",
]
count = 0
for line in self.output.strip().splitlines():
p = re.compile(
"Frames: \d+ latency: (?P<pct90>\d+)/(?P<pct95>\d+)/(?P<pct99>\d+) Janks: (?P<jank>\d+)\((?P<jank_pct>\d+)%\)"
)
match = p.search(line)
if match:
count += 1
if line.startswith("AVE: "):
group_names = ["Average " + g for g in group_names]
count = 0
for metric in zip(group_names, match.groups()):
context.result.add_metric(
metric[0],
metric[1],
None,
classifiers={"loop": count or "Average"},
)
def teardown(self, context):
self.device.delete_file(
self.device.path.join(self.device.working_directory, "recentfling.sh")
)
self.device.delete_file(
self.device.path.join(self.device.working_directory, "defs.sh")
)
def _kill_recentfling(self):
pid = self.device.execute(
"cat {}/pidfile".format(self.device.working_directory)
)
if pid:
self.device.kill(pid.strip(), signal="SIGKILL")
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from time import time
class HtrunLogger(object):
"""! Yet another logger flavour"""
def __init__(self, prn_lock, name):
self.__prn_lock = prn_lock
self.__name = name
def __prn_func(self, text, nl=True):
"""! Prints and flushes data to stdout"""
with self.__prn_lock:
if nl and not text.endswith("\n"):
text += "\n"
sys.stdout.write(text)
sys.stdout.flush()
def __prn_log_human(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
timestamp_str = strftime("%y-%m-%d %H:%M:%S", gmtime(timestamp))
frac, whole = modf(timestamp)
s = "[%s.%d][%s][%s] %s" % (timestamp_str, frac, self.__name, level, text)
self.__prn_func(s, nl=True)
def __prn_log(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
s = "[%.2f][%s][%s] %s" % (timestamp, self.__name, level, text)
self.__prn_func(s, nl=True)
def prn_dbg(self, text, timestamp=None):
self.__prn_log("DBG", text, timestamp)
def prn_wrn(self, text, timestamp=None):
self.__prn_log("WRN", text, timestamp)
def prn_err(self, text, timestamp=None):
self.__prn_log("ERR", text, timestamp)
def prn_inf(self, text, timestamp=None):
self.__prn_log("INF", text, timestamp)
def prn_txt(self, text, timestamp=None):
self.__prn_log("TXT", text, timestamp)
def prn_txd(self, text, timestamp=None):
self.__prn_log("TXD", text, timestamp)
def prn_rxd(self, text, timestamp=None):
self.__prn_log("RXD", text, timestamp)
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_host_tests import is_host_test
from mbed_host_tests import get_host_test
from mbed_host_tests import get_plugin_caps
from mbed_host_tests import get_host_test_list
class BasicHostTestsTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_basic_get_host_test(self):
self.assertNotEqual(None, get_host_test("default"))
self.assertNotEqual(None, get_host_test("default_auto"))
def test_basic_is_host_test(self):
self.assertFalse(is_host_test(""))
self.assertFalse(is_host_test(None))
self.assertTrue(is_host_test("default"))
self.assertTrue(is_host_test("default_auto"))
def test_get_host_test_list(self):
d = get_host_test_list()
self.assertIs(type(d), dict)
self.assertIn("default", d)
self.assertIn("default_auto", d)
def test_get_plugin_caps(self):
d = get_plugin_caps()
self.assertIs(type(d), dict)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
#
import sys
input = open(sys.argv[1], "r")
output = open(sys.argv[2], "w")
for line in input:
if line[0] == ">":
print("@HTW-" + line[1:-1], file=output)
continue
else:
print(line[:-1], file=output)
print("+", file=output)
print("H" * len(line[:-1]), file=output)
input.close()
output.close()
|
# Main entry point for the plugin.
# Author: Yuri van Geffen
import sublime, sublime_plugin
import os
import threading
import queue
import asyncore
import socket
from itertools import chain
import re
settings = sublime.load_settings("subdebug")
TCP_IP = "127.0.0.1"
TCP_PORT = 8172
BUFFER_SIZE = 1024
BASEDIR = settings.get("basedir", "")
STEP_ON_CONNECT = settings.get("step_on_connect", False)
# Handles incoming and outgoing messages for the MobDebug client
class SubDebugHandler(asyncore.dispatcher):
def __init__(self, socket, handler_id):
asyncore.dispatcher.__init__(self, socket)
self.handler_id = handler_id
msg_queue.put(b"STEP\n" if STEP_ON_CONNECT else b"RUN\n")
for view_name, row in state_handler.breakpoints():
msg_queue.put("SETB {0} {1}\n".format(view_name, row).encode("latin-1"))
# Reads the message-code of incomming messages and passes
# them to the right function
def handle_read(self):
data = self.recv(BUFFER_SIZE)
if data:
print((self.handler_id, "Received: ", data))
split = data.split()
if split[0] in message_parsers:
message_parsers[split[0]](split)
def handle_write(self):
if not msg_queue.empty():
msg = msg_queue.get()
print(("Sending: ", msg))
self.send(msg)
def handle_error(self):
raise
# Starts listening on TCP_PORT and accepts incoming connections
# before passing them to an instance of SubDebugHandler
class SubDebugServer(asyncore.dispatcher):
def __init__(self, host, port):
asyncore.dispatcher.__init__(self)
self.handler_id = 0
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((host, port))
self.listen(1)
print(("Started listening on: ", host, ":", port))
def handle_accept(self):
pair = self.accept()
if pair is not None:
(conn_sock, client_address) = pair
print(("Incoming connection: ", client_address))
SubDebugHandler(conn_sock, ++self.handler_id)
def handle_close(self):
print("Closing server.")
self.close()
def handle_error(self):
self.close()
# Lets the user run the script (until breakpoint)
class RunCommand(sublime_plugin.WindowCommand):
def run(self):
print("Running until breakpoint...")
msg_queue.put(b"RUN\n")
state_handler.remove_line_marker()
# Lets the user step to the next line
class StepCommand(sublime_plugin.WindowCommand):
def run(self):
print("Stepping to next line...")
msg_queue.put(b"STEP\n")
# Lets the user step to the next line
class ToggleBreakpointCommand(sublime_plugin.TextCommand):
def run(self, edit):
view_name = simplify_path(self.view.file_name())
row, _ = self.view.rowcol(self.view.sel()[0].begin())
print(("Toggling breakpoint:", view_name, row))
state_handler.toggle_breakpoint(view_name, row + 1)
# Lets the user pick a base directory from where the lua is executed
class SetBasedirCommand(sublime_plugin.WindowCommand):
def run(self):
# Ran if the user want to choose their own base directory
def choose_other(path):
global BASEDIR
BASEDIR = path.replace("\\", "/")
if BASEDIR[-1] != "/":
BASEDIR += "/"
print(("BASEDIR:", BASEDIR))
# Ran if the user has chosen a base directory option
def selected_folder(index):
global BASEDIR
if index != -1: # The last option lets the user choose a base dir themself
if index == len(folders) - 1:
sublime.active_window().show_input_panel(
"Give the base directory path.",
BASEDIR,
choose_other,
None,
None,
)
else:
BASEDIR = folders[index] + "/"
state_handler.clear_state()
print(("BASEDIR:", BASEDIR))
folders = list(chain.from_iterable([w.folders() for w in sublime.windows()]))
folders = [f.replace("\\", "/") for f in folders]
folders.insert(len(folders), "Choose other directory...")
sublime.active_window().show_quick_panel(folders, selected_folder)
# Lets the user step to the next line
class ToggleStepOnConnectCommand(sublime_plugin.WindowCommand):
def run(self):
global STEP_ON_CONNECT
STEP_ON_CONNECT = not STEP_ON_CONNECT
print(("Step on connect:", STEP_ON_CONNECT))
def is_checked(self):
return STEP_ON_CONNECT or False
# =========Incomming message parsers=========#
# Called when the "202 Paused" message is received
def paused_command(args):
state_handler.set_line_marker(args[2].decode("utf-8"), int(args[3]))
# Mapping from incomming messages to the functions that parse them
message_parsers = {
b"202": paused_command,
}
# ===========================================#
class StateHandler:
# Initiates object by checking which views are available and
# clearing the state
def __init__(self):
self.clear_state()
self.update_regions()
def clear_state(self):
self.state = {}
self.update_regions()
# Gets all available views in sublime and adds the missing ones to the state
def add_missing_views(self):
views = [v for v in sum([w.views() for w in sublime.windows()], [])]
self.views = {
simplify_path(v.file_name()): v for v in views if v.file_name() != None
}
print((self.views))
for view_name, view in list(self.views.items()):
if view_name not in self.state:
self.state[view_name] = []
# Updates all views with the available state-objects using the
# assigned functions
def update_regions(self):
self.add_missing_views()
# Iterate over all files in the state
for view_name, regions in list(self.state.items()):
# Remove all old regions
for reg_type_name in self.region_types:
self.views[view_name].erase_regions(reg_type_name)
region_sets = {}
# Iterate over all regions in that file
for reg_type, line in regions:
if reg_type == "line_marker" or ("line_marker", line) not in regions:
if reg_type not in region_sets:
region_sets[reg_type] = []
region_sets[reg_type].append(
sublime.Region(self.views[view_name].text_point(line - 1, 0))
)
# Register all new regions except the line-marker with sublime
for reg_name, v in list(region_sets.items()):
print(("Adding region:", view_name, reg_name, v))
self.views[view_name].add_regions(
reg_name, v, *self.region_types[reg_name]
)
def set_line_marker(self, view_name, line_number):
view_name = simplify_path(view_name)
print(("Setting line marker:", view_name, line_number))
self.add_missing_views()
if view_name in self.views:
self.state.setdefault(view_name, [])
self.state[view_name] = [
(k, v) for k, v in self.state[view_name] if k != "line_marker"
]
self.state[view_name].append(("line_marker", line_number))
self.update_regions()
def remove_line_marker(self):
for name, view in list(self.state.items()):
self.state[name] = [(t, n) for t, n in view if t != "line_marker"]
self.update_regions()
def toggle_breakpoint(self, view_name, line_number):
self.add_missing_views()
if (
view_name in self.views
and ("breakpoint", line_number) in self.state[view_name]
):
self.remove_breakpoint(view_name, line_number)
else:
self.set_breakpoint(view_name, line_number)
self.update_regions()
def set_breakpoint(self, view_name, line_number):
self.state.setdefault(view_name, [])
self.state[view_name].append(("breakpoint", line_number))
msg_queue.put("SETB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def remove_breakpoint(self, view_name, line_number):
self.state[view_name].remove(("breakpoint", line_number))
msg_queue.put("DELB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def breakpoints(self):
ret = []
for k, v in list(self.state.items()):
for t in v:
if t[0] == "breakpoint":
ret.append((k, t[1]))
return ret
views = {}
state = {}
region_types = {
"breakpoint": ("keyword", "circle"),
"line_marker": ("keyword", "bookmark"),
}
def plugin_unloaded():
settings.set("basedir", BASEDIR)
settings.set("step_on_connect", STEP_ON_CONNECT)
print("Closing down the server...")
server.close()
def simplify_path(path):
path = path.replace("\\", "/").replace(BASEDIR, "")
path = re.sub("\.lua$", "", path) # Strip ".lua" from the path
return path
# Open a threadsafe message queue
msg_queue = queue.Queue()
state_handler = StateHandler()
# Start listening and open the asyncore loop
server = SubDebugServer(TCP_IP, TCP_PORT)
if os.name == "posix":
thread = threading.Thread(target=asyncore.loop, kwargs={"use_poll": True})
else:
thread = threading.Thread(target=asyncore.loop)
thread.start()
|
from django.contrib import sitemaps
from django.core.urlresolvers import reverse
class StaticViewSitemap(sitemaps.Sitemap):
priority = 0.5
changefreq = "monthly"
def items(self):
return [
"landpage",
"robots",
"humans",
"google_plus_verify",
"terms",
"privacy",
]
def location(self, item):
return reverse(item)
# https://docs.djangoproject.com/en/1.8/ref/contrib/sitemaps/
|
from django.conf.urls import patterns, include, url
from publisher.views import catalog
from publisher.views import my_publication
from publisher.views import publication
urlpatterns = patterns(
"",
# Publications(s)
url(r"^publish$", catalog.catalog_page),
url(r"^publication/(\d+)$", publication.publication_page),
url(r"^publication/(\d+)/peer_review_modal$", publication.peer_review_modal),
url(r"^publication/(\d+)/save_peer_review$", publication.save_peer_review),
url(r"^publication/(\d+)/delete_peer_review$", publication.delete_peer_review),
# My Publications
url(r"^my_publications$", my_publication.my_publications_page),
url(r"^refresh_publications_table$", my_publication.refresh_publications_table),
url(r"^my_publication_modal$", my_publication.my_publication_modal),
url(r"^save_publication$", my_publication.save_publication),
url(r"^delete_publication$", my_publication.delete_publication),
)
|
"""added goal properties
Revision ID: 5018059c5c8f
Revises: 16b4a243d41d
Create Date: 2015-09-23 11:56:01.897992
"""
# revision identifiers, used by Alembic.
revision = "5018059c5c8f"
down_revision = "16b4a243d41d"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"goalproperties",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("is_variable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"goals_goalproperties",
sa.Column("goal_id", sa.Integer(), nullable=False),
sa.Column("property_id", sa.Integer(), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("value_translation_id", sa.Integer(), nullable=True),
sa.Column("from_level", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["goal_id"], ["goals.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["property_id"], ["goalproperties.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(
["value_translation_id"], ["translationvariables.id"], ondelete="RESTRICT"
),
sa.PrimaryKeyConstraint("goal_id", "property_id", "from_level"),
)
op.add_column(
"goals",
sa.Column("name", sa.String(length=255), nullable=False, server_default=""),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column("goals", "name")
op.drop_table("goals_goalproperties")
op.drop_table("goalproperties")
### end Alembic commands ###
|
# Demonstration of `applib` features
import logging
from applib.base import Cmdln, Application
from applib.misc import require_option
from applib import textui, sh, _cmdln as cmdln
LOG = logging.getLogger(__name__)
application = Application("demo-app", "CompanyNameHere", "1.2")
@cmdln.option("", "--foo", action="store_true", help="*must pass --foo")
class Commands(Cmdln):
name = "demo-app"
def initialize(self):
require_option(self.options, "foo")
@cmdln.alias("cd")
@cmdln.option(
"-t", "--show-time", action="store_true", help="Also show the current time"
)
def do_currentdate(self, subcmd, opts):
"""${cmd_name}: Show the current date
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
from datetime import datetime
now = datetime.now()
LOG.debug("datetime.now = %s", now)
if opts.show_time:
print(now)
else:
print((now.date()))
def do_ls(self, subcmd, opts):
"""${cmd_name}: Show directory listing (runs 'ls')
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
print((sh.run("ls")[0].decode("utf-8")))
def do_makeerror(self, subcmd, opts, what):
"""${cmd_name}: Make an error. Use -v to see full traceback
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
LOG.debug("About to make an error! %s", what)
textui.askyesno("Press enter to proceed:", default=True)
1 / 0
@cmdln.option("", "--no-break", action="store_true", help="Don't break from loop")
def do_think(self, subcmd, opts, length=200):
"""${cmd_name}: Progress bar example
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
import time
length = int(length)
for x in textui.ProgressBar.iterate(
list(range(length)), post="Thought {total} thoughts in time {elapsed}"
):
if x == length - 1 and not opts.no_break:
break # test that break doesn't mess up output
time.sleep(0.1)
def do_multable(self, subcmd, opts, number=10, times=25):
"""${cmd_name}: Print multiplication table
To demonstrate `colprint` feature
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
textui.colprint(
[
[str(x * y) for y in range(1, 1 + int(times))]
for x in range(1, 1 + int(number))
]
)
if __name__ == "__main__":
application.run(Commands)
|
# Copyright (c) 2015-2016, Activision Publishing, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from assertpy import assert_that, fail
class TestType(object):
def test_is_type_of(self):
assert_that("foo").is_type_of(str)
assert_that(123).is_type_of(int)
assert_that(0.456).is_type_of(float)
# assert_that(234L).is_type_of(long)
assert_that(["a", "b"]).is_type_of(list)
assert_that(("a", "b")).is_type_of(tuple)
assert_that({"a": 1, "b": 2}).is_type_of(dict)
assert_that(set(["a", "b"])).is_type_of(set)
assert_that(None).is_type_of(type(None))
assert_that(Foo()).is_type_of(Foo)
assert_that(Bar()).is_type_of(Bar)
def test_is_type_of_failure(self):
try:
assert_that("foo").is_type_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be of type <int>, but was not."
)
def test_is_type_of_bad_arg_failure(self):
try:
assert_that("foo").is_type_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a type")
def test_is_type_of_subclass_failure(self):
try:
assert_that(Bar()).is_type_of(Foo)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).starts_with("Expected <")
assert_that(str(ex)).ends_with(":Bar> to be of type <Foo>, but was not.")
def test_is_instance_of(self):
assert_that("foo").is_instance_of(str)
assert_that(123).is_instance_of(int)
assert_that(0.456).is_instance_of(float)
# assert_that(234L).is_instance_of(long)
assert_that(["a", "b"]).is_instance_of(list)
assert_that(("a", "b")).is_instance_of(tuple)
assert_that({"a": 1, "b": 2}).is_instance_of(dict)
assert_that(set(["a", "b"])).is_instance_of(set)
assert_that(None).is_instance_of(type(None))
assert_that(Foo()).is_instance_of(Foo)
assert_that(Bar()).is_instance_of(Bar)
assert_that(Bar()).is_instance_of(Foo)
def test_is_instance_of_failure(self):
try:
assert_that("foo").is_instance_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be instance of class <int>, but was not."
)
def test_is_instance_of_bad_arg_failure(self):
try:
assert_that("foo").is_instance_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a class")
class Foo(object):
pass
class Bar(Foo):
pass
|
import sys
import math
import scipy
import pylab
import scipy.io.wavfile as wav
import wave
from scipy import signal
from itertools import product
import numpy
def readWav():
"""
Reads a sound wave from a standard input and finds its parameters.
"""
# Read the sound wave from the input.
sound_wave = wave.open(sys.argv[1], "r")
# Get parameters of the sound wave.
nframes = sound_wave.getnframes()
framerate = sound_wave.getframerate()
params = sound_wave.getparams()
duration = nframes / float(framerate)
print("frame rate: %d " % (framerate,))
print("nframes: %d" % (nframes,))
print("duration: %f seconds" % (duration,))
print(scipy.array(sound_wave))
return (sound_wave, nframes, framerate, duration, params)
def getDuration(sound_file):
"""
Returns the duration of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return nframes / float(framerate)
def getFrameRate(sound_file):
"""
Returns the frame rate of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return framerate
def get_channels_no(sound_file):
"""
Returns number of channels of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return nchannels
def plotSoundWave(rate, sample):
"""
Plots a given sound wave.
"""
t = scipy.linspace(0, 2, 2 * rate, endpoint=False)
pylab.figure("Sound wave")
T = int(0.0001 * rate)
pylab.plot(
t[:T],
sample[:T],
)
pylab.show()
def plotPartials(binFrequencies, maxFreq, magnitudes):
"""
Calculates and plots the power spectrum of a given sound wave.
"""
T = int(maxFreq)
pylab.figure("Power spectrum")
pylab.plot(
binFrequencies[:T],
magnitudes[:T],
)
pylab.xlabel("Frequency (Hz)")
pylab.ylabel("Power spectrum (|X[k]|^2)")
pylab.show()
def plotPowerSpectrum(FFT, binFrequencies, maxFreq):
"""
Calculates and plots the power spectrum of a given sound wave.
"""
T = int(maxFreq)
pylab.figure("Power spectrum")
pylab.plot(
binFrequencies[:T],
scipy.absolute(FFT[:T]) * scipy.absolute(FFT[:T]),
)
pylab.xlabel("Frequency (Hz)")
pylab.ylabel("Power spectrum (|X[k]|^2)")
pylab.show()
def get_frequencies_axis(framerate, fft_length):
binResolution = float(framerate) / float(fft_length)
binFreqs = []
for k in range(fft_length):
binFreq = k * binResolution
binFreqs.append(binFreq)
return binFreqs
def get_next_power_2(n):
"""
Returns the closest number that is smaller than n that is a power of 2.
"""
power = 1
while power < n:
power *= 2
if power > 1:
return power / 2
else:
return 1
class MIDI_Detector(object):
"""
Class for MIDI notes detection given a .wav file.
"""
def __init__(self, wav_file):
self.wav_file = wav_file
self.minFreqConsidered = 20
self.maxFreqConsidered = 5000
self.low_f0s = [
27.5,
29.135,
30.868,
32.703,
34.648,
37.708,
38.891,
41.203,
43.654,
46.249,
48.999,
51.913,
55.0,
58.27,
61.735,
65.406,
69.296,
73.416,
77.782,
82.407,
]
def detect_MIDI_notes(self):
"""
The algorithm for calculating midi notes from a given wav file.
"""
(framerate, sample) = wav.read(self.wav_file)
if get_channels_no(self.wav_file) > 1:
sample = sample.mean(axis=1)
duration = getDuration(self.wav_file)
midi_notes = []
# Consider only files with a duration longer than 0.18 seconds.
if duration > 0.18:
(
FFT,
filteredFreqs,
maxFreq,
magnitudes,
significant_freq,
) = self.calculateFFT(duration, framerate, sample)
# plotPowerSpectrum(FFT, filteredFreqs, 1000)
clusters = self.clusterFrequencies(filteredFreqs)
averagedClusters = self.getClustersMeans(clusters)
f0_candidates = self.getF0Candidates(averagedClusters)
midi_notes = self.matchWithMIDINotes(f0_candidates)
"""
OCTAVE CORRECTION METHOD
"""
"""
# Include a note with a significant magnitude:
# if its magnitude is higher than the sum of magnitudes
# of all other spectral peaks
# include it in the list of detected notes and
# remove the note that's octave lower than this one
# if it was also detected.
if significant_freq > 0:
significant_midi_notes = self.matchWithMIDINotes([
significant_freq])
significant_midi_note = significant_midi_notes[0]
if significant_midi_note not in midi_notes:
midi_notes.append(significant_midi_note)
midi_notes = self.remove_lower_octave(
significant_midi_note, midi_notes)
"""
return midi_notes
def remove_lower_octave(self, upper_octave, midi_notes):
lower_octave = upper_octave - 12
if lower_octave in midi_notes:
midi_notes.remove(lower_octave)
return midi_notes
def get_candidates_with_partials(self, frequencies, magnitudes):
print(frequencies)
partial_margin = 11.0 # Hz
# A list of frequencies of each candidate.
candidates_freq = []
# A list of magnitudes of frequencies of each candidate.
candidates_magnitude = []
for i in range(len(frequencies)):
partials, partial_magnitudes = self.find_partials(
frequencies[i:], frequencies[i], magnitudes[i:]
)
candidates_freq.append(partials)
candidates_magnitude.append(partial_magnitudes)
return (candidates_freq, candidates_magnitude)
def calculateFFT(self, duration, framerate, sample):
"""
Calculates FFT for a given sound wave.
Considers only frequencies with the magnitudes higher than
a given threshold.
"""
fft_length = int(duration * framerate)
# For the FFT to work much faster take the length that is a power of 2.
fft_length = get_next_power_2(fft_length)
FFT = numpy.fft.fft(sample, n=fft_length)
""" ADJUSTING THRESHOLD - HIGHEST SPECTRAL PEAK METHOD"""
threshold = 0
power_spectra = []
frequency_bin_with_max_spectrum = 0
for i in range(len(FFT) / 2):
power_spectrum = scipy.absolute(FFT[i]) * scipy.absolute(FFT[i])
if power_spectrum > threshold:
threshold = power_spectrum
frequency_bin_with_max_spectrum = i
power_spectra.append(power_spectrum)
max_power_spectrum = threshold
threshold *= 0.1
binFrequencies = []
magnitudes = []
binResolution = float(framerate) / float(fft_length)
sum_of_significant_spectra = 0
# For each bin calculate the corresponding frequency.
for k in range(len(FFT)):
binFreq = k * binResolution
# Truncating the FFT so we consider only hearable frequencies.
if binFreq > self.maxFreqConsidered:
FFT = FFT[:k]
break
elif binFreq > self.minFreqConsidered:
# Consider only the frequencies
# with magnitudes higher than the threshold.
power_spectrum = power_spectra[k]
if power_spectrum > threshold:
magnitudes.append(power_spectrum)
binFrequencies.append(binFreq)
# Sum all significant power spectra
# except the max power spectrum.
if power_spectrum != max_power_spectrum:
sum_of_significant_spectra += power_spectrum
significant_freq = 0.0
if max_power_spectrum > sum_of_significant_spectra:
significant_freq = frequency_bin_with_max_spectrum * binResolution
# Max. frequency considered after truncating.
# maxFreq = rate without truncating.
maxFreq = len(FFT) / duration
return (FFT, binFrequencies, maxFreq, magnitudes, significant_freq)
# Code for STFT taken from:
# http://stackoverflow.com/questions/2459295/stft-and-istft-in-python
def STFT(self, x, samplingFreq, framesz, hop):
"""
Computes STFT for a given sound wave using Hanning window.
"""
framesamp = int(framesz * samplingFreq)
print("FRAMESAMP: " + str(framesamp))
hopsamp = int(hop * samplingFreq)
print("HOP SAMP: " + str(hopsamp))
# Modification: using Hanning window instead of Hamming - by Pertusa
w = signal.hann(framesamp)
X = numpy.array(
[
numpy.fft.fft(w * x[i : i + framesamp])
for i in range(0, len(x) - framesamp, hopsamp)
]
)
return X
def plotMagnitudeSpectrogram(self, rate, sample, framesz, hop):
"""
Calculates and plots the magnitude spectrum of a given sound wave.
"""
X = self.STFT(sample, rate, framesz, hop)
# Plot the magnitude spectrogram.
pylab.figure("Magnitude spectrogram")
pylab.imshow(
scipy.absolute(X.T), origin="lower", aspect="auto", interpolation="nearest"
)
pylab.xlabel("Time")
pylab.ylabel("Frequency")
pylab.show()
def getFilteredFFT(self, FFT, duration, threshold):
"""
Returns a list of frequencies with the magnitudes higher
than a given threshold.
"""
significantFreqs = []
for i in range(len(FFT)):
power_spectrum = scipy.absolute(FFT[i]) * scipy.absolute(FFT[i])
if power_spectrum > threshold:
significantFreqs.append(i / duration)
return significantFreqs
def clusterFrequencies(self, freqs):
"""
Clusters frequencies.
"""
if len(freqs) == 0:
return {}
clusteredFreqs = {}
bin = 0
clusteredFreqs[0] = [freqs[0]]
for i in range(len(freqs) - 1):
dist = self.calcDistance(freqs[i], freqs[i + 1])
if dist < 2.0:
clusteredFreqs[bin].append(freqs[i + 1])
else:
bin += 1
clusteredFreqs[bin] = [freqs[i + 1]]
return clusteredFreqs
def getClustersMeans(self, clusters):
"""
Given clustered frequencies finds a mean of each cluster.
"""
means = []
for bin, freqs in clusters.items():
means.append(sum(freqs) / len(freqs))
return means
def getDistances(self, freqs):
"""
Returns a list of distances between each frequency.
"""
distances = {
(freqs[i], freqs[j]): self.calcDistance(freqs[i], freqs[j])
for (i, j) in product(list(range(len(freqs))), repeat=2)
}
distances = {
freq_pair: dist for freq_pair, dist in distances.items() if dist < 2.0
}
return distances
def calcDistance(self, freq1, freq2):
"""
Calculates distance between frequencies taking into account that
the frequencies of pitches increase logarithmically.
"""
difference = abs(freq1 - freq2)
log = math.log((freq1 + freq2) / 2)
return difference / log
def getF0Candidates(self, frequencies):
"""
Given frequencies finds possible F0 candidates
by discarding potential harmonic frequencies.
"""
f0_candidates = []
"""
MODIFICATION: CONSIDER ONLY MIDDLE RANGE FREQUENCIES
"""
"""
if len(frequencies) > 0 and frequencies[0] < 83.0:
low_freq_candidate = self.find_low_freq_candidate(frequencies)
if low_freq_candidate > 0.0:
f0_candidates.append(low_freq_candidate)
#frequencies = self.filterOutHarmonics(
frequencies, low_freq_candidate)
"""
while len(frequencies) > 0:
f0_candidate = frequencies[0]
f0_candidates.append(f0_candidate)
frequencies.remove(f0_candidate)
frequencies = self.filterOutHarmonics(frequencies, f0_candidate)
return f0_candidates
def filterOutHarmonics(self, frequencies, f0_candidate):
"""
Given frequencies and an f0 candidate remove
all possible harmonics of this f0 candidate.
"""
# If an integer frequency is a multiple of another frequency
# then it is its harmonic. This constant was found empirically.
REMAINDER_THRESHOLD = 0.2
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
return [f for f in frequencies if not is_multiple(f, f0_candidate)]
def find_low_freq_candidate(self, frequencies):
REMAINDER_THRESHOLD = 0.05
f0_candidates = []
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
best_candidate = -1
max_no_partials = 0
for low_f0 in self.low_f0s:
num_of_partials = 0
for f in frequencies:
if is_multiple(f, low_f0):
num_of_partials += 1
if num_of_partials > max_no_partials:
max_no_partials = num_of_partials
best_candidate = low_f0
return best_candidate
def find_partials(self, frequencies, f0_candidate, magnitudes):
"""
Given frequencies, frequency magnitudes and an f0 candidate
return the partials and magnitudes of this f0 candidate.
"""
REMAINDER_THRESHOLD = 0.05
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
partials = []
partial_magnitudes = []
for i in range(len(frequencies)):
if is_multiple(frequencies[i], f0_candidate):
partials.append(frequencies[i])
partial_magnitudes.append(magnitudes[i])
return (partials, partial_magnitudes)
def matchWithMIDINotes(self, f0_candidates):
midi_notes = []
for freq in f0_candidates:
# Formula for calculating MIDI note number.
midi_notes.append(int(round(69 + 12 * math.log(freq / 440) / math.log(2))))
return midi_notes
if __name__ == "__main__":
MIDI_detector = MIDI_Detector(sys.argv[1])
midi_notes = MIDI_detector.detect_MIDI_notes()
print(midi_notes)
|
__author__ = "Ahmed Hani Ibrahim"
class Action(object):
def GetActionName(self):
return self.__name
def SetActionName(self, name):
self.__name = name
def __init__(self, name):
self.__name = name
|
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI(
"_simple_example",
_version=0x2601,
_types=b"\x00\x00\x04\x0D\x00\x00\x03\x03\x00\x00\x01\x0F\x00\x00\x02\x01\x00\x00\x07\x01",
_globals=(
b"\x00\x00\x00\x23printf",
0,
),
)
|
#!/usr/bin/env python
from dnslib import *
packet = binascii.unhexlify(
b"d5ad818000010005000000000377777706676f6f676c6503636f6d0000010001c00c0005000100000005000803777777016cc010c02c0001000100000005000442f95b68c02c0001000100000005000442f95b63c02c0001000100000005000442f95b67c02c0001000100000005000442f95b93"
)
d = DNSRecord.parse(packet)
# The default text representation of the DNSRecord is in zone file format
print(d)
|
from app import app
if __name__ == "__main__":
app.run()
|
# coding: utf-8
from flask import render_template, Blueprint, redirect, request, url_for
from ..forms import SigninForm, SignupForm
from ..utils.account import signin_user, signout_user
from ..utils.permissions import VisitorPermission, UserPermission
from ..models import db, User
bp = Blueprint("account", __name__)
@bp.route("/signin", methods=["GET", "POST"])
@VisitorPermission()
def signin():
"""Signin"""
form = SigninForm()
if form.validate_on_submit():
signin_user(form.user)
return redirect(url_for("site.index"))
return render_template("account/signin/signin.html", form=form)
@bp.route("/signup", methods=["GET", "POST"])
@VisitorPermission()
def signup():
"""Signup"""
form = SignupForm()
if form.validate_on_submit():
params = form.data.copy()
params.pop("repassword")
user = User(**params)
db.session.add(user)
db.session.commit()
signin_user(user)
return redirect(url_for("site.index"))
return render_template("account/signup/signup.html", form=form)
@bp.route("/signout")
def signout():
"""Signout"""
signout_user()
return redirect(request.referrer or url_for("site.index"))
|
from app import app, db
import unittest
import os
import tempfile
from flask import json
TEST_DB = "test.db"
class BasicTestCase(unittest.TestCase):
def test_index(self):
"""inital test. ensure flask was set up correctly"""
tester = app.test_client(self)
response = tester.get("/", content_type="html/text")
self.assertEqual(response.status_code, 200)
def test_database(self):
"""inital test. ensure that the database exists"""
tester = os.path.exists("flaskr.db")
self.assertTrue(tester)
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
"""Set up a blank temp database before each test"""
basedir = os.path.abspath(os.path.dirname(__file__))
app.config["TESTING"] = True
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///" + os.path.join(
basedir, TEST_DB
)
self.app = app.test_client()
db.create_all()
def tearDown(self):
"""Destroy blank temp database after each test"""
db.drop_all()
def login(self, username, password):
"""Login helper function"""
return self.app.post(
"/login",
data=dict(username=username, password=password),
follow_redirects=True,
)
def logout(self):
"""Logout helper function"""
return self.app.get("/logout", follow_redirects=True)
# assert functions
def test_empty_db(self):
"""Ensure database is blank"""
rv = self.app.get("/")
self.assertIn(b"No entries yet. Add some!", rv.data)
def test_login_logout(self):
"""Test login and logout using helper functions"""
rv = self.login(app.config["USERNAME"], app.config["PASSWORD"])
self.assertIn(b"You were logged in", rv.data)
rv = self.logout()
self.assertIn(b"You were logged out", rv.data)
rv = self.login(app.config["USERNAME"] + "x", app.config["PASSWORD"])
self.assertIn(b"Invalid username", rv.data)
rv = self.login(app.config["USERNAME"], app.config["PASSWORD"] + "x")
self.assertIn(b"Invalid password", rv.data)
def test_messages(self):
"""Ensure that user can post messages"""
self.login(app.config["USERNAME"], app.config["PASSWORD"])
rv = self.app.post(
"/add",
data=dict(title="<Hello>", text="<strong>HTML</strong> allowed here"),
follow_redirects=True,
)
self.assertNotIn(b"No entries here so far", rv.data)
self.assertIn(b"<Hello>", rv.data)
self.assertIn(b"<strong>HTML</strong> allowed here", rv.data)
def test_delete_message(self):
"""Ensure the messages are being deleted"""
rv = self.app.get("/delete/1")
data = json.loads(rv.data)
self.assertEqual(data["status"], 1)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# encoding: utf-8
import json
data = [{"a": "A", "b": (2, 4), "c": 3.0}]
print("DATA:", repr(data))
unsorted = json.dumps(data)
print("JSON:", json.dumps(data))
print("SORT:", json.dumps(data, sort_keys=True))
first = json.dumps(data, sort_keys=True)
second = json.dumps(data, sort_keys=True)
print("UNSORTED MATCH:", unsorted == first)
print("SORTED MATCH :", first == second)
|
#!/usr/bin/env python
# Exercise 30: Else and If
people = 30
cars = 40
trucks = 15
if cars > people:
print("We should take the cars.")
elif cars < people:
print("We should not take the cars.")
else:
print("We can't decide.")
if trucks > cars:
print("That's too many trucks.")
elif trucks < cars:
print("Maybe we could take the trucks.")
else:
print("We still can't decide.")
if people > trucks:
print("Alright, let's just take the trucks.")
else:
print("Fine, let's stay home then.")
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import _thread
import time
mylock = _thread.allocate_lock() # Allocate a lock
num = 0 # Shared resource
def add_num(name):
global num
while True:
mylock.acquire() # Get the lock
# Do something to the shared resource
print(("Thread %s locked! num=%s" % (name, str(num))))
if num >= 5:
print(("Thread %s released! num=%s" % (name, str(num))))
mylock.release()
_thread.exit()
num += 1
print(("Thread %s released! num=%s" % (name, str(num))))
mylock.release() # Release the lock.
def test():
_thread.start_new_thread(add_num, ("A",))
_thread.start_new_thread(add_num, ("B",))
time.sleep(30)
if __name__ == "__main__":
test()
|
#!/usr/bin/env python
# encoding: utf-8
"""Expand shell variables in filenames.
"""
import os.path
import os
os.environ["MYVAR"] = "VALUE"
print(os.path.expandvars("/path/to/$MYVAR"))
|
#!/usr/bin/env python
import pyglet
from pyglet.window import key
from pyglet.window import mouse
window = pyglet.window.Window()
@window.event
def on_key_press(symbol, modifiers):
print("key %s was pressed" % symbol)
if symbol == key.A:
print('The "A" key was pressed.')
elif symbol == key.LEFT:
print("The left arrow key was pressed.")
elif symbol == key.ENTER:
print("The enter key was pressed.")
@window.event
def on_mouse_press(x, y, button, modifiers):
print("location: (%s, %s), button: %s" % (x, y, button))
if button == mouse.LEFT:
print("The left mouse button was pressed.")
@window.event
def on_draw():
window.clear()
pyglet.app.run()
|
number = 53
go = True
while go:
guess = int(raw_input('input a number please'))
if guess == number:
print 'correct'
go = False
elif guess < number:
print 'try a bigger one'
else:
print 'try a smaller one'
else:
print 'it\'s over'
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.txt")) as f:
README = f.read()
with open(os.path.join(here, "CHANGES.txt")) as f:
CHANGES = f.read()
requires = [
"pyramid",
"pyramid_chameleon",
"pyramid_debugtoolbar",
"pyramid_tm",
"SQLAlchemy",
"transaction",
"zope.sqlalchemy",
"waitress",
]
setup(
name="pyramid_pycharm",
version="0.0",
description="pyramid_pycharm",
long_description=README + "\n\n" + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author="",
author_email="",
url="",
keywords="web wsgi bfg pylons pyramid",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite="pyramid_pycharm",
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = pyramid_pycharm:main
[console_scripts]
initialize_pyramid_pycharm_db = pyramid_pycharm.scripts.initializedb:main
""",
)
|
from mako.template import Template
from mako.runtime import Context
from io import StringIO
mytemplate = ('hello, ${name}!')
buf = ()
ctx = (buf)
(ctx)
(()) |
#!/usr/bin/env python
"""Test for inequality
"""
import unittest
class InequalityTest(unittest.TestCase):
def testEqual(self):
self.assertNotEqual(1, 3 - 2)
def testNotEqual(self):
self.assertEqual(2, 3 - 2)
if __name__ == "__main__":
unittest.main()
|
from tests.common import parent_id, parent_name, child_id, child_parent_id, relation, child, parent
from eralchemy.main import _intermediary_to_markdown
import re
import pytest
column_re = ('(?P<key>\\*?)(?P<name>[^*].+) \\{label:"(?P<type>.+)"\\}')
def test_all_to_er():
tables = [child, parent]
relations = [relation]
output = (tables, relations)
for element in (relations + tables):
if (not (() in output)):
raise ()
def assert_column_well_rendered_to_er(col):
col_er = ()
col_parsed = (col_er)
if (not (('key') == ('*' if col.is_key else ''))):
raise ()
if (not (('name') == col.name)):
raise ()
if (not (('type') == col.type)):
raise ()
def test_column_to_er():
(parent_id)
(parent_name)
(child_id)
(child_parent_id)
def test_relation():
if (not (() in ['parent *--? child', 'child ?--* parent'])):
raise ()
def assert_table_well_rendered_to_er(table):
if (not (table.header_markdown == (('[' + table.name) + ']'))):
raise ()
table_er = ()
for col in table.columns:
if (not (() in table_er)):
raise ()
def test_table():
(child)
(parent) |
from django.http import Http404
from django.shortcuts import render_to_response
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
###########
# CHOICES #
###########
def choice_list(request, app_label, module_name, field_name, models):
m, f = lookup_field(app_label, module_name, field_name, models)
return render_to_response("databrowse/choice_list.html", {"model": m, "field": f})
def choice_detail(request, app_label, module_name, field_name, field_val, models):
m, f = lookup_field(app_label, module_name, field_name, models)
try:
label = dict(f.field.choices)[field_val]
except KeyError:
raise Http404("Invalid choice value given")
obj_list = m.objects(**{f.field.name: field_val})
numitems = request.GET.get("items")
items_per_page = [25, 50, 100]
if numitems and numitems.isdigit() and int(numitems) > 0:
paginator = Paginator(obj_list, numitems)
else:
# fall back to default
paginator = Paginator(obj_list, items_per_page[0])
page = request.GET.get("page")
try:
obj_list_page = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
obj_list_page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page.
obj_list_page = paginator.page(paginator.num_pages)
return render_to_response(
"databrowse/choice_detail.html",
{
"model": m,
"field": f,
"value": label,
"object_list": obj_list_page,
"items_per_page": items_per_page,
},
)
|
"""
This is testing project for KeyKeeper application.
"""
|
"""Dynamic REST (or DREST) is an extension of Django REST Framework.
DREST offers the following features on top of the standard DRF kit:
- Linked/embedded/sideloaded relationships
- Field inclusions/exlusions
- Field-based filtering/sorting
- Directory panel for the browsable API
- Optimizations
"""
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0001_initial"),
("tests", "0002_auto_20160310_1052"),
]
operations = [
migrations.AddField(
model_name="user",
name="favorite_pet_id",
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name="user",
name="favorite_pet_type",
field=models.ForeignKey(
blank=True, to="contenttypes.ContentType", null=True
), # noqa
preserve_default=True,
),
]
|
"""FamilySearch User submodule"""
# Python imports
# Magic
class User(object):
"""https://familysearch.org/developers/docs/api/resources#user"""
def __init__(self):
"""https://familysearch.org/developers/docs/api/examples#user"""
pass
def current_user(self):
"""https://familysearch.org/developers/docs/api/users/Current_User_resource"""
url = self.root_collection["response"]["collections"][0]["links"][
"current-user"
]["href"]
return url
def current_user_person(self):
"""https://familysearch.org/developers/docs/api/tree/Current_Tree_Person_resource"""
try:
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-person"
]["href"]
except KeyError:
self.update_collection("FSFT")
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-person"
]["href"]
return url
def agent(self, uid):
"""https://familysearch.org/developers/docs/api/users/Agent_resource"""
return self.user_base + "agents/" + uid
def current_user_history(self):
"""https://familysearch.org/developers/docs/api/users/Current_User_History_resource"""
try:
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-history"
]["href"]
except KeyError:
self.update_collection("FSFT")
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-history"
]["href"]
return url
|
"""
[Advanced] [In-development]
Export a program list to a single yaml file.
The export may contain machine specific paths.
and may need to be edited for portability
"""
from argparse import FileType
import logging
import sys
import yaml
from chalmers.utils.cli import add_selection_group, select_programs
log = logging.getLogger("chalmers.export")
def main(args):
export_data = []
programs = select_programs(args, filter_paused=False)
for prog in programs:
export_data.append({"program": dict(prog.raw_data)})
yaml.safe_dump(export_data, args.output, default_flow_style=False)
def add_parser(subparsers):
parser = subparsers.add_parser(
"export",
help='[IN DEVELOPMENT] Export current configuration to be installed with the "import" command',
description=__doc__,
)
add_selection_group(parser)
parser.add_argument("-o", "--output", type=FileType("w"), default=sys.stdout)
parser.set_defaults(main=main)
|
"""
Linux services, this module checks the existence of linux command line
programs on import
* systemd_service
* upstart_service
* sysv_service
* cron_service
In that order
"""
import logging
import platform
import sys
from . import cron_service, sysv_service, upstart_service, systemd_service
from chalmers import errors
# Fix for AWS Linux
if sys.version_info.major == 3:
system_dist = ("system",)
else:
system_dist = (b"system",)
platform._supported_dists += system_dist
log = logging.getLogger("chalmers.service")
class NoPosixSystemService(object):
def __init__(self, target_user=None):
supported_dists = platform._supported_dists + system_dist
linux = platform.linux_distribution(supported_dists=supported_dists)
raise errors.ChalmersError(
"Could not detect system service for platform %s (tried systemd, sysv init and upstart)"
% linux[0]
)
if systemd_service.check():
PosixSystemService = systemd_service.SystemdService
elif sysv_service.check():
PosixSystemService = sysv_service.SysVService
elif upstart_service.check():
PosixSystemService = upstart_service.UpstartService
else:
PosixSystemService = NoPosixSystemService
PosixLocalService = cron_service.CronService
|
import abc
import logging
import traceback
import servicemanager
import win32event, win32service, win32api
from win32serviceutil import ServiceFramework
log = logging.getLogger(__name__)
class WindowsService(object, ServiceFramework, metaclass=abc.ABCMeta):
"""
Base windows service class that provides all the nice things that a python
service needs
"""
def __init__(self, args):
try:
self._svc_name_ = args[0]
self._svc_display_name_ = args[0]
ServiceFramework.__init__(self, args)
self.stop_event = win32event.CreateEvent(None, 0, 0, None)
except Exception:
self.log("Error in WindowsService.__init__")
self.log(traceback.format_exc())
raise
def log(self, msg):
"Log to the NTEventlog"
servicemanager.LogInfoMsg(str(msg))
def sleep(self, sec):
win32api.Sleep(sec * 1000, True)
def SvcDoRun(self):
self.log("start")
self.ReportServiceStatus(win32service.SERVICE_START_PENDING)
try:
self.ReportServiceStatus(win32service.SERVICE_RUNNING)
self.log("start")
self.start()
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
# self.log('wait')
# win32event.WaitForSingleObject(self.stop_event, win32event.INFINITE)
self.log("done")
except Exception:
self.log("Error in WindowsService.SvcDoRun")
self.log(traceback.format_exc())
self.SvcStop()
def SvcStop(self):
pass
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.log("stopping")
self.stop()
self.log("stopped")
win32event.SetEvent(self.stop_event)
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
|
# Copyright (c) 2014 Johan Burke
# Distributed under the MIT software license. See http://www.opensource.org/licenses/mit-license.php.
from ..pyelliptic.ecc import *
from ..threads.threadutils import *
from ..constants import *
from .key import *
import hashlib
from struct import *
import sys
def encodeInt(val, alphabet=ALPHABET):
base = len(alphabet)
result = ""
while val > 0:
rem = val % base
result = str(alphabet[rem]) + result
val = val // base
return result
class Address:
def __init__(self, hashValue, version=VERSION):
self.version = version
self.hashValue = hashValue
self.encodedValue = ""
def encodeVersion(self):
# return the version as a big-endian unsigned byte.
return pack(">B", self.version)
def encode(self):
a = self.encodeVersion() + self.hashValue
sha = hashlib.new("sha512")
sha.update(a)
sha.update(sha.digest())
checksum = sha.digest()[0:2]
intValue = int.from_bytes(a + checksum, "big")
# this value is in base 64
self.encodedValue = encodeInt(intValue)
def genKey():
curve = ECC()
pubKey = curve.get_pubkey()
sha = hashlib.new("sha512")
sha.update(pubKey)
ripemd = hashlib.new("ripemd160")
ripemd.update(sha.digest())
sha.update(ripemd.digest())
ripemd.update(sha.digest())
# safePrint(ripemd.digest())
a = Address(ripemd.digest())
a.encode()
key = Key(pubKey, curve.get_privkey(), a.encodedValue)
return key
|
from anymesh import AnyMesh, AnyMeshDelegateProtocol
class LeftDelegate(AnyMeshDelegateProtocol):
def connected_to(self, device_info):
print(("left connected to " + device_info.name))
def disconnected_from(self, name):
pass
def received_msg(self, message):
print(("left received message from " + message.sender))
print(("message: " + message.data["msg"]))
leftMesh.request("right", {"msg": "back at ya righty!"})
class RightDelegate(AnyMeshDelegateProtocol):
def connected_to(self, device_info):
print(("right connected to " + device_info.name))
rightMesh.request("left", {"msg": "hey lefty!"})
def disconnected_from(self, name):
pass
def received_msg(self, message):
print(("right received message from " + message.sender))
print(("message: " + message.data["msg"]))
leftMesh = AnyMesh("left", "global", LeftDelegate())
rightMesh = AnyMesh("right", "global", RightDelegate())
AnyMesh.run()
|
import unittest
import doctest
import urwid
def load_tests(loader, tests, ignore):
module_doctests = [
urwid.widget,
urwid.wimp,
urwid.decoration,
urwid.display_common,
urwid.main_loop,
urwid.monitored_list,
urwid.raw_display,
"urwid.split_repr", # override function with same name
urwid.util,
urwid.signals,
]
for m in module_doctests:
tests.addTests(
doctest.DocTestSuite(
m, optionflags=doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL
)
)
return tests
|
import logging
log = logging.getLogger(__name__)
EXCLUDED_LOG_VARS = [
"threadName",
"name",
"thread",
"created",
"process",
"processName",
"args",
"module",
"filename",
"levelno",
"exc_text",
"pathname",
"lineno",
"msg",
"exc_info",
"message",
"funcName",
"relativeCreated",
"levelname",
"msecs",
"asctime",
]
def register_logging(logger, client_config, cls):
found = False
for handler in logger.handlers:
if isinstance(handler, cls):
found = True
reg_handler = handler
if not found:
reg_handler = cls(client_config=client_config)
logger.addHandler(reg_handler)
return reg_handler
def unregister_logger(logger, handler):
logger.removeHandler(handler)
|
import uuid
import datetime
from appenlight_client.timing import get_local_storage
from appenlight_client.timing import default_timer
from appenlight_client.client import PY3
import logging
log = logging.getLogger(__name__)
class AppenlightWSGIWrapper(object):
__version__ = "0.3"
def __init__(self, app, appenlight_client):
self.app = app
self.appenlight_client = appenlight_client
def __call__(self, environ, start_response):
"""Run the application and conserve the traceback frames.
also determine if we got 404
"""
environ["appenlight.request_id"] = str(uuid.uuid4())
appenlight_storage = get_local_storage()
# clear out thread stats on request start
appenlight_storage.clear()
app_iter = None
detected_data = []
create_report = False
traceback = None
http_status = 200
start_time = default_timer()
def detect_headers(status, headers, *k, **kw):
detected_data[:] = status[:3], headers
return start_response(status, headers, *k, **kw)
# inject client instance reference to environ
if "appenlight.client" not in environ:
environ["appenlight.client"] = self.appenlight_client
# some bw. compat stubs
def local_report(message, include_traceback=True, http_status=200):
environ["appenlight.force_send"] = True
def local_log(level, message):
environ["appenlight.force_send"] = True
environ["appenlight.report"] = local_report
environ["appenlight.log"] = local_log
if "appenlight.tags" not in environ:
environ["appenlight.tags"] = {}
if "appenlight.extra" not in environ:
environ["appenlight.extra"] = {}
try:
app_iter = self.app(environ, detect_headers)
return app_iter
except Exception:
if hasattr(app_iter, "close"):
app_iter.close()
# we need that here
traceback = self.appenlight_client.get_current_traceback()
# by default reraise exceptions for app/FW to handle
if self.appenlight_client.config["reraise_exceptions"]:
raise
try:
start_response(
"500 INTERNAL SERVER ERROR",
[("Content-Type", "text/html; charset=utf-8")],
)
except Exception:
environ["wsgi.errors"].write(
"AppenlightWSGIWrapper middleware catched exception "
"in streamed response at a point where response headers "
"were already sent.\n"
)
else:
return "Server Error"
finally:
# report 500's and 404's
# report slowness
end_time = default_timer()
appenlight_storage.thread_stats["main"] = end_time - start_time
delta = datetime.timedelta(seconds=(end_time - start_time))
stats, slow_calls = appenlight_storage.get_thread_stats()
if "appenlight.view_name" not in environ:
environ["appenlight.view_name"] = getattr(
appenlight_storage, "view_name", ""
)
if detected_data and detected_data[0]:
http_status = int(detected_data[0])
if self.appenlight_client.config["slow_requests"] and not environ.get(
"appenlight.ignore_slow"
):
# do we have slow calls/request ?
if (
delta >= self.appenlight_client.config["slow_request_time"]
or slow_calls
):
create_report = True
if "appenlight.__traceback" in environ and not environ.get(
"appenlight.ignore_error"
):
# get traceback gathered by pyramid tween
traceback = environ["appenlight.__traceback"]
del environ["appenlight.__traceback"]
http_status = 500
create_report = True
if (
traceback
and self.appenlight_client.config["report_errors"]
and not environ.get("appenlight.ignore_error")
):
http_status = 500
create_report = True
elif self.appenlight_client.config["report_404"] and http_status == 404:
create_report = True
if create_report:
self.appenlight_client.py_report(
environ,
traceback,
message=None,
http_status=http_status,
start_time=datetime.datetime.utcfromtimestamp(start_time),
end_time=datetime.datetime.utcfromtimestamp(end_time),
request_stats=stats,
slow_calls=slow_calls,
)
# dereference
del traceback
self.appenlight_client.save_request_stats(
stats, view_name=environ.get("appenlight.view_name", "")
)
if self.appenlight_client.config["logging"]:
records = self.appenlight_client.log_handlers_get_records()
self.appenlight_client.log_handlers_clear_records()
self.appenlight_client.py_log(
environ,
records=records,
r_uuid=environ["appenlight.request_id"],
created_report=create_report,
)
# send all data we gathered immediately at the end of request
self.appenlight_client.check_if_deliver(
self.appenlight_client.config["force_send"]
or environ.get("appenlight.force_send")
)
|
""" Backup & Recovery helper functions. """
import logging
import os
import re
import shutil
import SOAPpy
import statvfs
import sys
import tarfile
import time
from os.path import getsize
import backup_exceptions
import backup_recovery_constants
import gcs_helper
from backup_recovery_constants import APP_BACKUP_DIR_LOCATION
from backup_recovery_constants import APP_DIR_LOCATION
from backup_recovery_constants import BACKUP_DIR_LOCATION
from backup_recovery_constants import BACKUP_ROLLBACK_SUFFIX
from backup_recovery_constants import StorageTypes
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
import appscale_info
from constants import APPSCALE_DATA_DIR
from google.appengine.api.appcontroller_client import AppControllerClient
# The port that the SOAP server listens to.
UA_SERVER_PORT = 4343
def delete_local_backup_file(local_file):
"""Removes the local backup file.
Args:
local_file: A str, the path to the backup file to delete.
"""
if not remove(local_file):
logging.warning(
"No local backup file '{0}' to delete. " "Skipping...".format(local_file)
)
def delete_secondary_backup(base_path):
"""Deletes the secondary backup if it exists, upon successful backup.
Args:
base_path: A str, the full path of the backup file without the secondary
suffix.
"""
if not remove("{0}{1}".format(base_path, BACKUP_ROLLBACK_SUFFIX)):
logging.warning("No secondary backup to remove. Skipping...")
def does_file_exist(path):
"""Checks if the given file is in the local filesystem.
Args:
path: A str, the path to the file.
Returns:
True on success, False otherwise.
"""
return os.path.isfile(path)
def enough_disk_space(service):
"""Checks if there's enough available disk space for a new backup.
Returns:
True on success, False otherwise.
"""
available_space = get_available_disk_space()
logging.debug("Available space: {0}".format(available_space))
backup_size = get_backup_size(service)
logging.debug("Backup size: {0}".format(backup_size))
if backup_size > available_space * backup_recovery_constants.PADDING_PERCENTAGE:
logging.warning("Not enough space for a backup.")
return False
return True
def get_available_disk_space():
"""Returns the amount of available disk space under /opt/appscale.
Returns:
An int, the available disk space in bytes.
"""
stat_struct = os.statvfs(os.path.dirname(BACKUP_DIR_LOCATION))
return stat_struct[statvfs.F_BAVAIL] * stat_struct[statvfs.F_BSIZE]
def get_backup_size(service):
"""Sums up the size of the snapshot files that consist the backup for the
given service.
Args:
service: A str, the service for which we'll calculate the backup size.
Returns:
An int, the total size of the files consisting the backup in bytes.
"""
backup_files = get_snapshot_paths(service)
total_size = sum(getsize(file) for file in backup_files)
return total_size
def get_snapshot_paths(service):
"""Returns a list of file names holding critical data for the given service.
Args:
service: A str, the service for which we're getting the data files.
Currently there is support for Cassandra and Zookeeper.
Returns:
A list of full paths.
"""
file_list = []
if service != "cassandra":
return file_list
look_for = "snapshots"
data_dir = "{0}/{1}".format(APPSCALE_DATA_DIR, service)
for full_path, _, file in os.walk(data_dir):
if look_for in full_path:
file_list.append(full_path)
logging.debug("List of data paths for '{0}': {1}".format(service, file_list))
return file_list
def move_secondary_backup(base_path):
"""Moves the secondary backup back in place, if it exists, upon an un
successful backup attempt.
Args:
base_path: A str, the final full path of the backup file after this move.
"""
source = "{0}{1}".format(base_path, BACKUP_ROLLBACK_SUFFIX)
target = base_path
if not rename(source, target):
logging.warning("No secondary backup to restore. Skipping...")
def mkdir(path):
"""Creates a dir with the given path.
Args:
path: A str, the name of the dir to create.
Returns:
True on success, False otherwise.
"""
try:
os.mkdir(path)
except OSError:
logging.error("OSError while creating dir '{0}'".format(path))
return False
return True
def makedirs(path):
"""Creates a dir with the given path and all directories in between.
Args:
path: A str, the name of the dir to create.
Returns:
True on success, False otherwise.
"""
try:
os.makedirs(path)
except OSError:
logging.error("OSError while creating dir '{0}'".format(path))
return False
return True
def rename(source, destination):
"""Renames source file into destination.
Args:
source: A str, the path of the file to rename.
destination: A str, the destination path.
Returns:
True on success, False otherwise.
"""
try:
os.rename(source, destination)
except OSError:
logging.error(
"OSError while renaming '{0}' to '{1}'".format(source, destination)
)
return False
return True
def remove(path):
"""Deletes the given file from the filesystem.
Args:
path: A str, the path of the file to delete.
Returns:
True on success, False otherwise.
"""
try:
os.remove(path)
except OSError:
logging.error("OSError while deleting '{0}'".format(path))
return False
return True
def tar_backup_files(file_paths, target):
"""Tars all snapshot files for a given snapshot name.
Args:
file_paths: A list of files to tar up.
target: A str, the full path to the tar file to be created.
Returns:
The path to the tar file, None otherwise.
"""
backup_file_location = target
# Rename previous backup, if it exists.
if not rename(
backup_file_location,
"{0}{1}".format(backup_file_location, BACKUP_ROLLBACK_SUFFIX),
):
logging.warning(
"'{0}' not found. Skipping file rename...".format(backup_file_location)
)
# Tar up the backup files.
tar = tarfile.open(backup_file_location, "w")
for name in file_paths:
tar.add(name)
tar.close()
return backup_file_location
def untar_backup_files(source):
"""Restores a previous backup into the Cassandra directory structure
from a tar ball.
Args:
source: A str, the path to the backup tar.
Raises:
BRException: On untar issues.
"""
logging.info("Untarring backup file '{0}'...".format(source))
try:
tar = tarfile.open(source, "r:gz")
tar.extractall(path="/")
tar.close()
except tarfile.TarError as tar_error:
logging.exception(tar_error)
raise backup_exceptions.BRException(
"Exception while untarring backup file '{0}'.".format(source)
)
logging.info("Done untarring '{0}'.".format(source))
def app_backup(storage, full_bucket_name=None):
"""Saves the app source code at the backups location on the filesystem.
Args:
storage: A str, one of the StorageTypes class members.
full_bucket_name: A str, the name of the backup file to upload to remote
storage.
Returns:
True on success, False otherwise.
"""
# Create app backups dir if it doesn't exist.
if not makedirs(APP_BACKUP_DIR_LOCATION):
logging.warning(
"Dir '{0}' already exists. Skipping dir creation...".format(
APP_BACKUP_DIR_LOCATION
)
)
for dir_path, _, filenames in os.walk(APP_DIR_LOCATION):
for filename in filenames:
# Copy source code tars to backups location.
source = "{0}/{1}".format(dir_path, filename)
destination = "{0}/{1}".format(APP_BACKUP_DIR_LOCATION, filename)
try:
shutil.copy(source, destination)
except:
logging.error("Error while backing up '{0}'. ".format(source))
delete_app_tars(APP_BACKUP_DIR_LOCATION)
return False
# Upload to GCS.
if storage == StorageTypes.GCS:
source = "{0}/{1}".format(APP_DIR_LOCATION, filename)
destination = "{0}/apps/{1}".format(full_bucket_name, filename)
logging.debug("Destination: {0}".format(destination))
if not gcs_helper.upload_to_bucket(destination, source):
logging.error("Error while uploading '{0}' to GCS. ".format(source))
delete_app_tars(APP_BACKUP_DIR_LOCATION)
return False
return True
def app_restore(storage, bucket_name=None):
"""Restores the app source code from the backups location on the filesystem.
Args:
storage: A str, one of the StorageTypes class members.
bucket_name: A str, the name of the bucket to restore apps from.
Returns:
True on success, False otherwise.
"""
# Create app backups dir if it doesn't exist.
if not makedirs(APP_BACKUP_DIR_LOCATION):
logging.warning(
"Dir '{0}' already exists. Skipping dir creation...".format(
APP_BACKUP_DIR_LOCATION
)
)
# Download from GCS to backups location.
if storage == StorageTypes.GCS:
objects = gcs_helper.list_bucket(bucket_name)
for app_path in objects:
if not app_path.startswith(gcs_helper.APPS_GCS_PREFIX):
continue
# Only keep the relative name of the app file.
# E.g. myapp.tar.gz (app_file) out of apps/myapp.tar.gz (app_path)
app_file = app_path[len(gcs_helper.APPS_GCS_PREFIX) :]
source = "gs://{0}/{1}".format(bucket_name, app_path)
destination = "{0}/{1}".format(APP_BACKUP_DIR_LOCATION, app_file)
if not gcs_helper.download_from_bucket(source, destination):
logging.error("Error while downloading '{0}' from GCS.".format(source))
delete_app_tars(APP_BACKUP_DIR_LOCATION)
return False
# Deploy apps.
apps_to_deploy = [
os.path.join(APP_BACKUP_DIR_LOCATION, app)
for app in os.listdir(APP_BACKUP_DIR_LOCATION)
]
if not deploy_apps(apps_to_deploy):
logging.error(
"Failed to successfully deploy one or more of the "
"following apps: {0}".format(apps_to_deploy)
)
return False
return True
def delete_app_tars(location):
"""Deletes applications tars from the designated location.
Args:
location: A str, the path to the application tar(s) to be deleted.
Returns:
True on success, False otherwise.
"""
for dir_path, _, filenames in os.walk(location):
for filename in filenames:
if not remove("{0}/{1}".format(dir_path, filename)):
return False
return True
def deploy_apps(app_paths):
"""Deploys all apps that reside in /opt/appscale/apps.
Args:
app_paths: A list of the full paths of the apps to be deployed.
Returns:
True on success, False otherwise.
"""
uaserver = SOAPpy.SOAPProxy(
"https://{0}:{1}".format(appscale_info.get_db_master_ip(), UA_SERVER_PORT)
)
acc = AppControllerClient(appscale_info.get_login_ip(), appscale_info.get_secret())
# Wait for Cassandra to come up after a restore.
time.sleep(15)
for app_path in app_paths:
# Extract app ID.
app_id = app_path[app_path.rfind("/") + 1 : app_path.find(".")]
if not app_id:
logging.error(
"Malformed source code archive. Cannot complete "
"application recovery for '{}'. Aborting...".format(app_path)
)
return False
# Retrieve app admin via uaserver.
app_data = uaserver.get_app_data(app_id, appscale_info.get_secret())
app_admin_re = re.search("\napp_owner:(.+)\n", app_data)
if app_admin_re:
app_admin = app_admin_re.group(1)
else:
logging.error(
"Missing application data. Cannot complete application "
"recovery for '{}'. Aborting...".format(app_id)
)
return False
file_suffix = re.search("\.(.*)\Z", app_path).group(1)
logging.warning(
"Restoring app '{}', from '{}', with owner '{}'.".format(
app_id, app_path, app_admin
)
)
acc.upload_app(app_path, file_suffix, app_admin)
return True
|
' This process grooms the datastore cleaning up old state and\ncalculates datastore statistics. Removes tombstoned items for garbage\ncollection.\n'
import datetime
import logging
import os
import random
import re
import sys
import threading
import time
import appscale_datastore_batch
import dbconstants
import datastore_server
import entity_utils
from zkappscale import zktransaction as zk
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_distributed
from google.appengine.api.memcache import memcache_distributed
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import entity_pb
from google.appengine.datastore.datastore_query import Cursor
from google.appengine.ext import db
from google.appengine.ext.db import stats
from google.appengine.ext.db import metadata
from google.appengine.api import datastore_errors
(((__file__), '../lib/'))
import appscale_info
import constants
(((__file__), '../AppDashboard/lib/'))
from app_dashboard_data import InstanceInfo
from app_dashboard_data import ServerStatus
from app_dashboard_data import RequestInfo
from dashboard_logs import RequestLogLine
(((__file__), '../AppTaskQueue/'))
from distributed_tq import TaskName
class DatastoreGroomer(threading.Thread):
'Scans the entire database for each application.'
LOCK_POLL_PERIOD = ((4 * 60) * 60)
DB_ERROR_PERIOD = 30
BATCH_SIZE = 100
PRIVATE_KINDS = '__(.*)__'
PROTECTED_KINDS = '_(.*)_'
TASK_NAME_TIMEOUT = ((24 * 60) * 60)
LOG_STORAGE_TIMEOUT = (((24 * 60) * 60) * 7)
APPSCALE_APPLICATIONS = ['apichecker', 'appscaledashboard']
NO_COMPOSITES = 'NO_COMPS_INDEXES_HERE'
DASHBOARD_DATA_TIMEOUT = (60 * 60)
DASHBOARD_DATA_MODELS = [InstanceInfo, ServerStatus, RequestInfo]
DASHBOARD_BATCH = 1000
GROOMER_STATE_PATH = '/appscale/groomer_state'
GROOMER_STATE_DELIMITER = '||'
CLEAN_ENTITIES_TASK = 'entities'
CLEAN_ASC_INDICES_TASK = 'asc-indices'
CLEAN_DSC_INDICES_TASK = 'dsc-indices'
CLEAN_KIND_INDICES_TASK = 'kind-indices'
CLEAN_LOGS_TASK = 'logs'
CLEAN_TASKS_TASK = 'tasks'
CLEAN_DASHBOARD_TASK = 'dashboard'
LOG_PROGRESS_FREQUENCY = (60 * 5)
def __init__(self, zoo_keeper, table_name, ds_path):
'Constructor.\n\n Args:\n zk: ZooKeeper client.\n table_name: The database used (ie, cassandra)\n ds_path: The connection path to the datastore_server.\n '
log_format = ('%(asctime)s %(levelname)s %(filename)s: %(lineno)s %(message)s')
(log_format)
('Logging started')
(self)
self.zoo_keeper = zoo_keeper
self.table_name = table_name
self.db_access = None
self.ds_access = None
self.datastore_path = ds_path
self.stats = {}
self.namespace_info = {}
self.num_deletes = 0
self.composite_index_cache = {}
self.entities_checked = 0
self.journal_entries_cleaned = 0
self.index_entries_checked = 0
self.index_entries_delete_failures = 0
self.index_entries_cleaned = 0
self.last_logged = ()
self.groomer_state = []
def stop(self):
'Stops the groomer thread.'
()
def run(self):
'Starts the main loop of the groomer thread.'
while True:
('Trying to get groomer lock.')
if ():
('Got the groomer lock.')
()
try:
(zk.DS_GROOM_LOCK_PATH)
except zk.ZKTransactionException as zk_exception:
(((zk_exception)))
except zk.ZKInternalException as zk_exception:
(((zk_exception)))
else:
('Did not get the groomer lock.')
sleep_time = (1, self.LOCK_POLL_PERIOD)
(((sleep_time / 60.0)))
(sleep_time)
def get_groomer_lock(self):
'Tries to acquire the lock to the datastore groomer.\n\n Returns:\n True on success, False otherwise.\n '
return (zk.DS_GROOM_LOCK_PATH)
def get_entity_batch(self, last_key):
'Gets a batch of entites to operate on.\n\n Args:\n last_key: The last key from a previous query.\n Returns:\n A list of entities.\n '
return (dbconstants.APP_ENTITY_TABLE, dbconstants.APP_ENTITY_SCHEMA, last_key, '', self.BATCH_SIZE)
def reset_statistics(self):
'Reinitializes statistics.'
self.stats = {}
self.namespace_info = {}
self.num_deletes = 0
self.journal_entries_cleaned = 0
def remove_deprecated_dashboard_data(self, model_type):
'Remove entities that do not have timestamps in Dashboard data.\n\n AppScale 2.3 and earlier lacked a timestamp attribute.\n\n Args:\n model_type: A class type for a ndb model.\n '
query = ()
entities = (self.DASHBOARD_BATCH)
counter = 0
for entity in entities:
if (not (entity, 'timestamp')):
()
counter += 1
if (counter > 0):
((counter, ()))
def remove_old_dashboard_data(self):
'Removes old statistics from the AppScale dashboard application.'
last_cursor = None
last_model = None
if (((self.groomer_state) > 1) and (self.groomer_state[0] == self.CLEAN_DASHBOARD_TASK)):
last_model = self.DASHBOARD_DATA_MODELS[(self.groomer_state[1])]
if ((self.groomer_state) > 2):
last_cursor = (self.groomer_state[2])
(constants.DASHBOARD_APP_ID)
timeout = (() - ())
for model_number in ((self.DASHBOARD_DATA_MODELS)):
model_type = self.DASHBOARD_DATA_MODELS[model_number]
if (last_model and (model_type != last_model)):
continue
counter = 0
while True:
query = ((model_type.timestamp < timeout))
(entities, next_cursor, more) = (self.BATCH_SIZE)
for entity in entities:
()
counter += 1
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((counter, model_type.__class__.__name__))
self.last_logged = ()
if more:
last_cursor = next_cursor
([self.CLEAN_DASHBOARD_TASK, (model_number), ()])
else:
break
if (model_number != ((self.DASHBOARD_DATA_MODELS) - 1)):
([self.CLEAN_DASHBOARD_TASK, ((model_number + 1))])
last_model = None
last_cursor = None
if (counter > 0):
((counter, model_type))
(model_type)
return
def clean_journal_entries(self, txn_id, key):
'Remove journal entries that are no longer needed. Assumes\n transaction numbers are only increasing.\n\n Args:\n txn_id: An int of the transaction number to delete up to.\n key: A str, the entity table key for which we are deleting.\n Returns:\n True on success, False otherwise.\n '
if (txn_id == 0):
return True
start_row = (key, 0)
end_row = (key, ((txn_id) - 1))
last_key = start_row
keys_to_delete = []
while True:
try:
results = (dbconstants.JOURNAL_TABLE, dbconstants.JOURNAL_SCHEMA, last_key, end_row, self.BATCH_SIZE)
if ((results) == 0):
return True
keys_to_delete = []
for item in results:
((())[0])
(dbconstants.JOURNAL_TABLE, keys_to_delete)
self.journal_entries_cleaned += (keys_to_delete)
except dbconstants.AppScaleDBConnectionError as db_error:
((keys_to_delete, db_error))
('Backing off!')
(self.DB_ERROR_PERIOD)
return False
except Exception as exception:
((exception))
('Backing off!')
(self.DB_ERROR_PERIOD)
return False
def hard_delete_row(self, row_key):
'Does a hard delete on a given row key to the entity\n table.\n\n Args:\n row_key: A str representing the row key to delete.\n Returns:\n True on success, False otherwise.\n '
try:
(dbconstants.APP_ENTITY_TABLE, [row_key])
except dbconstants.AppScaleDBConnectionError as db_error:
((row_key, db_error))
return False
except Exception as exception:
((exception))
return False
return True
def load_composite_cache(self, app_id):
'Load the composite index cache for an application ID.\n\n Args:\n app_id: A str, the application ID.\n Returns:\n True if the application has composites. False otherwise.\n '
start_key = (app_id, 'index', '')
end_key = (app_id, 'index', dbconstants.TERMINATING_STRING)
results = (dbconstants.METADATA_TABLE, dbconstants.METADATA_TABLE, start_key, end_key, dbconstants.MAX_NUMBER_OF_COMPOSITE_INDEXES)
list_result = []
for list_item in results:
for (_, value) in ():
(value['data'])
self.composite_index_cache[app_id] = self.NO_COMPOSITES
kind_index_dictionary = {}
for index in list_result:
new_index = ()
(index)
kind = ()
if (kind in kind_index_dictionary):
(new_index)
else:
kind_index_dictionary[kind] = [new_index]
if kind_index_dictionary:
self.composite_index_cache[app_id] = kind_index_dictionary
return True
return False
def acquire_lock_for_key(self, app_id, key, retries, retry_time):
'Acquires a lock for a given entity key.\n\n Args:\n app_id: The application ID.\n key: A string containing an entity key.\n retries: An integer specifying the number of times to retry.\n retry_time: How many seconds to wait before each retry.\n Returns:\n A transaction ID.\n Raises:\n ZKTransactionException if unable to acquire a lock from ZooKeeper.\n '
root_key = (dbconstants.KIND_SEPARATOR)[0]
root_key += dbconstants.KIND_SEPARATOR
txn_id = (app_id)
try:
(app_id, txn_id, root_key)
except zk.ZKTransactionException as zkte:
((app_id, (zkte)))
if (retries > 0):
(((zkte), retries))
(retry_time)
return ()
(app_id, txn_id)
raise zkte
return txn_id
def release_lock_for_key(self, app_id, key, txn_id, retries, retry_time):
'Releases a lock for a given entity key.\n\n Args:\n app_id: The application ID.\n key: A string containing an entity key.\n txn_id: A transaction ID.\n retries: An integer specifying the number of times to retry.\n retry_time: How many seconds to wait before each retry.\n '
root_key = (dbconstants.KIND_SEPARATOR)[0]
root_key += dbconstants.KIND_SEPARATOR
try:
(app_id, txn_id)
except zk.ZKTransactionException as zkte:
((zkte))
if (retries > 0):
((txn_id, retries))
(retry_time)
()
else:
(app_id, txn_id)
def fetch_entity_dict_for_references(self, references):
'Fetches a dictionary of valid entities for a list of references.\n\n Args:\n references: A list of index references to entities.\n Returns:\n A dictionary of validated entities.\n '
keys = []
for item in references:
((())[0][self.ds_access.INDEX_REFERENCE_COLUMN])
keys = ((keys))
entities = (dbconstants.APP_ENTITY_TABLE, keys, dbconstants.APP_ENTITY_SCHEMA)
entities_by_app = {}
for key in entities:
app = (self.ds_access._SEPARATOR)[0]
if (app not in entities_by_app):
entities_by_app[app] = {}
entities_by_app[app][key] = entities[key]
entities = {}
for app in entities_by_app:
app_entities = entities_by_app[app]
app_entities = (app, app_entities)
app_entities = (app_entities)
for key in keys:
if (key not in app_entities):
continue
if (dbconstants.APP_ENTITY_SCHEMA[0] not in app_entities[key]):
continue
entities[key] = app_entities[key][dbconstants.APP_ENTITY_SCHEMA[0]]
return entities
def lock_and_delete_indexes(self, references, direction, entity_key):
"For a list of index entries that have the same entity, lock the entity\n and delete the indexes.\n\n Since another process can update an entity after we've determined that\n an index entry is invalid, we need to re-check the index entries after\n locking their entity key.\n\n Args:\n references: A list of references to an entity.\n direction: The direction of the index.\n entity_key: A string containing the entity key.\n "
if (direction == datastore_pb.Query_Order.ASCENDING):
table_name = dbconstants.ASC_PROPERTY_TABLE
else:
table_name = dbconstants.DSC_PROPERTY_TABLE
app = (self.ds_access._SEPARATOR)[0]
try:
txn_id = ()
except zk.ZKTransactionException:
self.index_entries_delete_failures += 1
return
entities = (references)
refs_to_delete = []
for reference in references:
index_elements = (self.ds_access._SEPARATOR)
prop_name = index_elements[self.ds_access.PROP_NAME_IN_SINGLE_PROP_INDEX]
if (not (reference, entities, direction, prop_name)):
((())[0])
(((refs_to_delete), [refs_to_delete[0]]))
try:
(table_name, refs_to_delete)
self.index_entries_cleaned += (refs_to_delete)
except Exception:
('Unable to delete indexes')
self.index_entries_delete_failures += 1
()
def lock_and_delete_kind_index(self, reference):
"For a list of index entries that have the same entity, lock the entity\n and delete the indexes.\n\n Since another process can update an entity after we've determined that\n an index entry is invalid, we need to re-check the index entries after\n locking their entity key.\n\n Args:\n reference: A dictionary containing a kind reference.\n "
table_name = dbconstants.APP_KIND_TABLE
entity_key = ()[0]
app = (self.ds_access._SEPARATOR)[0]
try:
txn_id = ()
except zk.ZKTransactionException:
self.index_entries_delete_failures += 1
return
entities = ([reference])
if (entity_key not in entities):
index_to_delete = (())[0]
(([index_to_delete]))
try:
(table_name, [index_to_delete])
self.index_entries_cleaned += 1
except dbconstants.AppScaleDBConnectionError:
('Unable to delete index.')
self.index_entries_delete_failures += 1
()
def clean_up_indexes(self, direction):
'Deletes invalid single property index entries.\n\n This is needed because we do not delete index entries when updating or\n deleting entities. With time, this results in queries taking an increasing\n amount of time.\n\n Args:\n direction: The direction of the index.\n '
if (direction == datastore_pb.Query_Order.ASCENDING):
table_name = dbconstants.ASC_PROPERTY_TABLE
task_id = self.CLEAN_ASC_INDICES_TASK
else:
table_name = dbconstants.DSC_PROPERTY_TABLE
task_id = self.CLEAN_DSC_INDICES_TASK
if (((self.groomer_state) > 1) and (self.groomer_state[0] == task_id)):
start_key = self.groomer_state[1]
else:
start_key = ''
end_key = dbconstants.TERMINATING_STRING
while True:
references = ()
if ((references) == 0):
break
self.index_entries_checked += (references)
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((self.index_entries_checked))
self.last_logged = ()
first_ref = (())[0]
((self.index_entries_checked, [first_ref], direction))
last_start_key = start_key
start_key = (())[0]
if (start_key == last_start_key):
raise ('An infinite loop was detected while fetching references.')
entities = (references)
invalid_refs = {}
for reference in references:
prop_name = (self.ds_access._SEPARATOR)[3]
if (not (reference, entities, direction, prop_name)):
entity_key = (())[0][self.ds_access.INDEX_REFERENCE_COLUMN]
if (entity_key not in invalid_refs):
invalid_refs[entity_key] = []
(reference)
for entity_key in invalid_refs:
(invalid_refs[entity_key], direction, entity_key)
([task_id, start_key])
def clean_up_kind_indices(self):
'Deletes invalid kind index entries.\n\n This is needed because the datastore does not delete kind index entries\n when deleting entities.\n '
table_name = dbconstants.APP_KIND_TABLE
task_id = self.CLEAN_KIND_INDICES_TASK
start_key = ''
end_key = dbconstants.TERMINATING_STRING
if ((self.groomer_state) > 1):
start_key = self.groomer_state[1]
while True:
references = ()
if ((references) == 0):
break
self.index_entries_checked += (references)
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((self.index_entries_checked))
self.last_logged = ()
first_ref = (())[0]
(((references), [first_ref]))
last_start_key = start_key
start_key = (())[0]
if (start_key == last_start_key):
raise ('An infinite loop was detected while fetching references.')
entities = (references)
for reference in references:
entity_key = ()[0]
if (entity_key not in entities):
(reference)
([task_id, start_key])
def clean_up_composite_indexes(self):
'Deletes old composite indexes and bad references.\n\n Returns:\n True on success, False otherwise.\n '
return True
def get_composite_indexes(self, app_id, kind):
'Fetches the composite indexes for a kind.\n\n Args:\n app_id: The application ID.\n kind: A string, the kind for which we need composite indexes.\n Returns:\n A list of composite indexes.\n '
if (not kind):
return []
if (app_id in self.composite_index_cache):
if (self.composite_index_cache[app_id] == self.NO_COMPOSITES):
return []
elif (kind in self.composite_index_cache[app_id]):
return self.composite_index_cache[app_id][kind]
else:
return []
else:
if (app_id):
if (kind in self.composite_index_cache[app_id]):
return self.composite_index_cache[kind]
return []
def delete_indexes(self, entity):
'Deletes indexes for a given entity.\n\n Args:\n entity: An EntityProto.\n '
return
def delete_composite_indexes(self, entity, composites):
'Deletes composite indexes for an entity.\n\n Args:\n entity: An EntityProto.\n composites: A list of datastore_pb.CompositeIndexes composite indexes.\n '
row_keys = ([entity], composites)
(dbconstants.COMPOSITE_TABLE, row_keys)
def fix_badlisted_entity(self, key, version):
'Places the correct entity given the current one is from a blacklisted\n transaction.\n\n Args:\n key: The key to the entity table.\n version: The bad version of the entity.\n Returns:\n True on success, False otherwise.\n '
app_prefix = (key)
root_key = (key)
try:
txn_id = (app_prefix)
if (app_prefix, txn_id, root_key):
valid_id = (app_prefix, version, key)
ds_distributed = (app_prefix)
bad_key = (key, version)
good_key = (key, valid_id)
good_entry = (self.db_access, good_key)
bad_entry = (self.db_access, bad_key)
kind = None
if good_entry:
kind = (())
elif bad_entry:
kind = (())
composites = (app_prefix, kind)
if bad_entry:
(bad_entry)
(bad_entry, composites)
if good_entry:
pass
else:
pass
del ds_distributed
else:
success = False
except zk.ZKTransactionException as zk_exception:
((zk_exception))
success = False
except zk.ZKInternalException as zk_exception:
((zk_exception))
success = False
except dbconstants.AppScaleDBConnectionError as db_exception:
((db_exception))
success = False
finally:
if (not success):
if (not (app_prefix, txn_id)):
((app_prefix, txn_id))
try:
(app_prefix, txn_id)
except zk.ZKTransactionException as zk_exception:
raise
except zk.ZKInternalException as zk_exception:
raise
return True
def process_tombstone(self, key, entity, version):
'Processes any entities which have been soft deleted.\n Does an actual delete to reclaim disk space.\n\n Args:\n key: The key to the entity table.\n entity: The entity in string serialized form.\n version: The version of the entity in the datastore.\n Returns:\n True if a hard delete occurred, False otherwise.\n '
success = False
app_prefix = (key)
root_key = (key)
try:
if (app_prefix, version):
((version, key))
return True
return (key, version)
except zk.ZKTransactionException as zk_exception:
((zk_exception))
(self.DB_ERROR_PERIOD)
return False
except zk.ZKInternalException as zk_exception:
((zk_exception))
(self.DB_ERROR_PERIOD)
return False
txn_id = 0
try:
txn_id = (app_prefix)
except zk.ZKTransactionException as zk_exception:
((zk_exception))
('Backing off!')
(self.DB_ERROR_PERIOD)
return False
except zk.ZKInternalException as zk_exception:
((zk_exception))
('Backing off!')
(self.DB_ERROR_PERIOD)
return False
try:
if (app_prefix, txn_id, root_key):
success = (key)
if success:
success = ((txn_id + 1), key)
else:
success = False
except zk.ZKTransactionException as zk_exception:
((zk_exception))
('Backing off!')
(self.DB_ERROR_PERIOD)
success = False
except zk.ZKInternalException as zk_exception:
((zk_exception))
('Backing off!')
(self.DB_ERROR_PERIOD)
success = False
finally:
if (not success):
try:
if (not (app_prefix, txn_id)):
((app_prefix, txn_id))
(app_prefix, txn_id)
except zk.ZKTransactionException as zk_exception:
((zk_exception))
except zk.ZKInternalException as zk_exception:
((zk_exception))
if success:
try:
(app_prefix, txn_id)
except Exception as exception:
((exception))
self.num_deletes += 1
((key, success))
return success
def initialize_kind(self, app_id, kind):
'Puts a kind into the statistics object if\n it does not already exist.\n Args:\n app_id: The application ID.\n kind: A string representing an entity kind.\n '
if (app_id not in self.stats):
self.stats[app_id] = {kind: {'size': 0, 'number': 0}}
if (kind not in self.stats[app_id]):
self.stats[app_id][kind] = {'size': 0, 'number': 0}
def initialize_namespace(self, app_id, namespace):
'Puts a namespace into the namespace object if\n it does not already exist.\n Args:\n app_id: The application ID.\n namespace: A string representing a namespace.\n '
if (app_id not in self.namespace_info):
self.namespace_info[app_id] = {namespace: {'size': 0, 'number': 0}}
if (namespace not in self.namespace_info[app_id]):
self.namespace_info[app_id] = {namespace: {'size': 0, 'number': 0}}
if (namespace not in self.namespace_info[app_id]):
self.stats[app_id][namespace] = {'size': 0, 'number': 0}
def process_statistics(self, key, entity, size):
'Processes an entity and adds to the global statistics.\n\n Args:\n key: The key to the entity table.\n entity: EntityProto entity.\n size: A int of the size of the entity.\n Returns:\n True on success, False otherwise.\n '
kind = (())
namespace = ()
if (not kind):
((entity))
return False
if (self.PROTECTED_KINDS, kind):
return True
if (self.PRIVATE_KINDS, kind):
return True
app_id = ()
if (not app_id):
((kind))
return False
if (app_id in self.APPSCALE_APPLICATIONS):
return True
(app_id, kind)
(app_id, namespace)
self.namespace_info[app_id][namespace]['size'] += size
self.namespace_info[app_id][namespace]['number'] += 1
self.stats[app_id][kind]['size'] += size
self.stats[app_id][kind]['number'] += 1
return True
def txn_blacklist_cleanup(self):
'Clean up old transactions and removed unused references\n to reap storage.\n\n Returns:\n True on success, False otherwise.\n '
return True
def verify_entity(self, entity, key, txn_id):
'Verify that the entity is not blacklisted. Clean up old journal\n entries if it is valid.\n\n Args:\n entity: The entity to verify.\n key: The key to the entity table.\n txn_id: An int, a transaction ID.\n Returns:\n True on success, False otherwise.\n '
app_prefix = (key)
try:
if (not (app_prefix, txn_id)):
(txn_id, key)
else:
((txn_id, key))
return True
return (key, txn_id)
except zk.ZKTransactionException as zk_exception:
((zk_exception))
(self.DB_ERROR_PERIOD)
return True
except zk.ZKInternalException as zk_exception:
((zk_exception))
(self.DB_ERROR_PERIOD)
return True
return True
def process_entity(self, entity):
'Processes an entity by updating statistics, indexes, and removes\n tombstones.\n\n Args:\n entity: The entity to operate on.\n Returns:\n True on success, False otherwise.\n '
(((entity)))
key = (())[0]
one_entity = entity[key][dbconstants.APP_ENTITY_SCHEMA[0]]
version = entity[key][dbconstants.APP_ENTITY_SCHEMA[1]]
((entity))
if (one_entity == datastore_server.TOMBSTONE):
return (key, one_entity, version)
ent_proto = ()
(one_entity)
(ent_proto, key, version)
(key, ent_proto, (one_entity))
return True
def create_namespace_entry(self, namespace, size, number, timestamp):
'Puts a namespace into the datastore.\n\n Args:\n namespace: A string, the namespace.\n size: An int representing the number of bytes taken by a namespace.\n number: The total number of entities in a namespace.\n timestamp: A datetime.datetime object.\n Returns:\n True on success, False otherwise.\n '
entities_to_write = []
namespace_stat = ()
(namespace_stat)
if (namespace != ''):
namespace_entry = ()
(namespace_entry)
try:
(entities_to_write)
except datastore_errors.InternalError as internal_error:
((internal_error))
return False
('Done creating namespace stats')
return True
def create_kind_stat_entry(self, kind, size, number, timestamp):
'Puts a kind statistic into the datastore.\n\n Args:\n kind: The entity kind.\n size: An int representing the number of bytes taken by entity kind.\n number: The total number of entities.\n timestamp: A datetime.datetime object.\n Returns:\n True on success, False otherwise.\n '
kind_stat = ()
kind_entry = ()
entities_to_write = [kind_stat, kind_entry]
try:
(entities_to_write)
except datastore_errors.InternalError as internal_error:
((internal_error))
return False
('Done creating kind stat')
return True
def create_global_stat_entry(self, app_id, size, number, timestamp):
'Puts a global statistic into the datastore.\n\n Args:\n app_id: The application identifier.\n size: The number of bytes of all entities.\n number: The total number of entities of an application.\n timestamp: A datetime.datetime object.\n Returns:\n True on success, False otherwise.\n '
global_stat = ()
try:
(global_stat)
except datastore_errors.InternalError as internal_error:
((internal_error))
return False
('Done creating global stat')
return True
def remove_old_tasks_entities(self):
'Queries for old tasks and removes the entity which tells\n use whether a named task was enqueued.\n\n Returns:\n True on success.\n '
if (((self.groomer_state) > 1) and (self.groomer_state[0] == self.CLEAN_TASKS_TASK)):
last_cursor = (self.groomer_state[1])
else:
last_cursor = None
(constants.DASHBOARD_APP_ID)
timeout = (() - ())
counter = 0
((()))
((timeout))
while True:
query = ()
if last_cursor:
(last_cursor)
('timestamp <', timeout)
entities = (self.BATCH_SIZE)
if ((entities) == 0):
break
last_cursor = ()
for entity in entities:
((entity.timestamp))
()
counter += 1
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((counter))
self.last_logged = self.LOG_PROGRESS_FREQUENCY
([self.CLEAN_TASKS_TASK, last_cursor])
((counter))
return True
def clean_up_entities(self):
if (((self.groomer_state) > 1) and (self.groomer_state[0] == self.CLEAN_ENTITIES_TASK)):
last_key = self.groomer_state[1]
else:
last_key = ''
while True:
try:
((self.BATCH_SIZE))
entities = (last_key)
if (not entities):
break
for entity in entities:
(entity)
last_key = (())[0]
self.entities_checked += (entities)
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((self.entities_checked))
self.last_logged = ()
([self.CLEAN_ENTITIES_TASK, last_key])
except datastore_errors.Error as error:
((error))
(self.DB_ERROR_PERIOD)
except dbconstants.AppScaleDBConnectionError as connection_error:
((connection_error))
(self.DB_ERROR_PERIOD)
def register_db_accessor(self, app_id):
'Gets a distributed datastore object to interact with\n the datastore for a certain application.\n\n Args:\n app_id: The application ID.\n Returns:\n A distributed_datastore.DatastoreDistributed object.\n '
ds_distributed = (app_id, self.datastore_path)
('datastore_v3', ds_distributed)
('memcache', ())
os.environ['APPLICATION_ID'] = app_id
os.environ['APPNAME'] = app_id
os.environ['AUTH_DOMAIN'] = 'appscale.com'
return ds_distributed
def remove_old_logs(self, log_timeout):
'Removes old logs.\n\n Args:\n log_timeout: The timeout value in seconds.\n\n Returns:\n True on success, False otherwise.\n '
if (((self.groomer_state) > 1) and (self.groomer_state[0] == self.CLEAN_LOGS_TASK)):
last_cursor = (self.groomer_state[1])
else:
last_cursor = None
(constants.DASHBOARD_APP_ID)
if log_timeout:
timeout = (() - ())
query = ((RequestLogLine.timestamp < timeout))
((timeout))
else:
query = ()
counter = 0
((()))
while True:
(entities, next_cursor, more) = (self.BATCH_SIZE)
for entity in entities:
((entity))
()
counter += 1
if (() > (self.last_logged + self.LOG_PROGRESS_FREQUENCY)):
((counter))
self.last_logged = ()
if more:
last_cursor = next_cursor
([self.CLEAN_LOGS_TASK, ()])
else:
break
((counter))
return True
def remove_old_statistics(self):
'Does a range query on the current batch of statistics and\n deletes them.\n '
for app_id in (()):
(app_id)
query = ()
entities = ()
(((entities)))
for entity in entities:
((entity))
()
query = ()
entities = ()
(((entities)))
for entity in entities:
((entity))
()
((app_id))
def update_namespaces(self, timestamp):
'Puts the namespace information into the datastore for applications to\n access.\n\n Args:\n timestamp: A datetime time stamp to know which stat items belong\n together.\n Returns:\n True if there were no errors, False otherwise.\n '
for app_id in (()):
ds_distributed = (app_id)
namespaces = (())
for namespace in namespaces:
size = self.namespace_info[app_id][namespace]['size']
number = self.namespace_info[app_id][namespace]['number']
if (not (namespace, size, number, timestamp)):
return False
((app_id, self.namespace_info[app_id]))
del ds_distributed
return True
def update_statistics(self, timestamp):
'Puts the statistics into the datastore for applications\n to access.\n\n Args:\n timestamp: A datetime time stamp to know which stat items belong\n together.\n Returns:\n True if there were no errors, False otherwise.\n '
for app_id in (()):
ds_distributed = (app_id)
total_size = 0
total_number = 0
kinds = (())
for kind in kinds:
size = self.stats[app_id][kind]['size']
number = self.stats[app_id][kind]['number']
total_size += size
total_number += number
if (not (kind, size, number, timestamp)):
return False
if (not (app_id, total_size, total_number, timestamp)):
return False
((app_id, self.stats[app_id]))
((app_id, total_size, total_number))
((self.num_deletes))
del ds_distributed
return True
def update_groomer_state(self, state):
"Updates the groomer's internal state and persists the state to\n ZooKeeper.\n\n Args:\n state: A list of strings representing the ID of the task to resume along\n with any additional data about the task.\n "
zk_data = (state)
try:
(self.GROOMER_STATE_PATH, zk_data)
except zk.ZKInternalException as zkie:
(zkie)
self.groomer_state = state
def run_groomer(self):
'Runs the grooming process. Loops on the entire dataset sequentially\n and updates stats, indexes, and transactions.\n '
self.db_access = (self.table_name)
self.ds_access = ()
('Groomer started')
start = ()
()
self.composite_index_cache = {}
tasks = [{'id': self.CLEAN_ENTITIES_TASK, 'description': 'clean up entities', 'function': self.clean_up_entities, 'args': []}, {'id': self.CLEAN_ASC_INDICES_TASK, 'description': 'clean up ascending indices', 'function': self.clean_up_indexes, 'args': [datastore_pb.Query_Order.ASCENDING]}, {'id': self.CLEAN_DSC_INDICES_TASK, 'description': 'clean up descending indices', 'function': self.clean_up_indexes, 'args': [datastore_pb.Query_Order.DESCENDING]}, {'id': self.CLEAN_KIND_INDICES_TASK, 'description': 'clean up kind indices', 'function': self.clean_up_kind_indices, 'args': []}, {'id': self.CLEAN_LOGS_TASK, 'description': 'clean up old logs', 'function': self.remove_old_logs, 'args': [self.LOG_STORAGE_TIMEOUT]}, {'id': self.CLEAN_TASKS_TASK, 'description': 'clean up old tasks', 'function': self.remove_old_tasks_entities, 'args': []}, {'id': self.CLEAN_DASHBOARD_TASK, 'description': 'clean up old dashboard items', 'function': self.remove_old_dashboard_data, 'args': []}]
groomer_state = (self.GROOMER_STATE_PATH)
((groomer_state))
if groomer_state:
((self.GROOMER_STATE_DELIMITER))
for task_number in ((tasks)):
task = tasks[task_number]
if (((self.groomer_state) > 0) and (self.groomer_state[0] != '') and (self.groomer_state[0] != task['id'])):
continue
((task['description']))
try:
(*task['args'])
if (task_number != ((tasks) - 1)):
next_task = tasks[(task_number + 1)]
([next_task['id']])
except Exception as exception:
((task['description']))
(exception)
([])
timestamp = ()
if (not (timestamp)):
('There was an error updating the statistics')
if (not (timestamp)):
('There was an error updating the namespaces')
del self.db_access
del self.ds_access
time_taken = (() - start)
((self.journal_entries_cleaned))
((self.index_entries_checked))
((self.index_entries_cleaned))
if (self.index_entries_delete_failures > 0):
((self.index_entries_delete_failures))
(((time_taken)))
def main():
'This main function allows you to run the groomer manually.'
zk_connection_locations = ()
zookeeper = ()
db_info = ()
table = db_info[':table']
master = ()
datastore_path = (master)
ds_groomer = (zookeeper, table, datastore_path)
('Trying to get groomer lock.')
if ():
('Got the groomer lock.')
try:
()
except Exception as exception:
(((exception)))
try:
(zk.DS_GROOM_LOCK_PATH)
except zk.ZKTransactionException as zk_exception:
(((zk_exception)))
except zk.ZKInternalException as zk_exception:
(((zk_exception)))
finally:
()
else:
('Did not get the groomer lock.')
if (__name__ == '__main__'):
() |
#!/usr/bin/env python
# Programmer: Navraj Chohan <nlake44@gmail.com>
import os
import sys
import time
import unittest
from flexmock import flexmock
import kazoo.client
import kazoo.exceptions
import kazoo.protocol
import kazoo.protocol.states
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
from dbconstants import *
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
from zkappscale import zktransaction as zk
from zkappscale.zktransaction import ZKTransactionException
class TestZookeeperTransaction(unittest.TestCase):
""" """
def setUp(self):
self.appid = "appid"
self.handle = None
def test_increment_and_get_counter(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").with_args(
self.appid
).and_return("/rootpath")
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo",
create="create",
delete_async="delete_async",
connected=lambda: True,
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").and_return(None)
fake_counter = flexmock(name="fake_counter", value="value")
fake_counter.value = 1
fake_counter.should_receive("__add__").and_return(2)
fake_zookeeper.should_receive("Counter").and_return(fake_counter)
# mock out deleting the zero id we get the first time around
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# assert, make sure we got back our id
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual((0, 1), transaction.increment_and_get_counter(self.appid, 1))
def test_create_sequence_node(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").with_args(
self.appid
).and_return("/rootpath")
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo", create="create", delete="delete", connected=lambda: True
)
fake_zookeeper.should_receive("start")
# mock out zookeeper.create for txn id
path_to_create = "/rootpath/" + self.appid
zero_path = path_to_create + "/0"
nonzero_path = path_to_create + "/1"
fake_zookeeper.should_receive("retry").with_args(
"create",
str,
value=str,
acl=None,
makepath=bool,
sequence=bool,
ephemeral=bool,
).and_return(zero_path).and_return(nonzero_path)
# mock out deleting the zero id we get the first time around
fake_zookeeper.should_receive("retry").with_args("delete", zero_path)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# assert, make sure we got back our id
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
1, transaction.create_sequence_node("/rootpath/" + self.appid, "now")
)
def test_create_node(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").with_args(
self.appid
).and_return("/rootpath")
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo", create="create", connected=lambda: True
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args(
"create",
str,
value=str,
acl=None,
makepath=bool,
sequence=bool,
ephemeral=bool,
)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# mock out zookeeper.create for txn id
path_to_create = "/rootpath/" + self.appid
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
None, transaction.create_node("/rootpath/" + self.appid, "now")
)
def test_get_transaction_id(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").with_args(
self.appid
).and_return("/rootpath/" + self.appid)
path_to_create = "/rootpath/" + self.appid + "/" + zk.APP_TX_PREFIX
zk.ZKTransaction.should_receive("get_txn_path_before_getting_id").with_args(
self.appid
).and_return(path_to_create)
# mock out time.time
flexmock(time)
time.should_receive("time").and_return(1000)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(name="fake_zoo", connected=lambda: True)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry")
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# mock out making the txn id
zk.ZKTransaction.should_receive("create_sequence_node").with_args(
path_to_create, "1000"
).and_return(1)
# mock out zookeeper.create for is_xg
xg_path = path_to_create + "/1/" + zk.XG_PREFIX
zk.ZKTransaction.should_receive("get_xg_path").and_return(xg_path)
zk.ZKTransaction.should_receive("create_node").with_args(xg_path, "1000")
# assert, make sure we got back our id
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(1, transaction.get_transaction_id(self.appid, is_xg=True))
def test_get_txn_path_before_getting_id(self):
# mock out initializing a ZK connection
flexmock(zk.ZKTransaction)
fake_zookeeper = flexmock(name="fake_zoo")
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry")
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
zk.ZKTransaction.should_receive("get_app_root_path").and_return("app_root_path")
expected = zk.PATH_SEPARATOR.join(
["app_root_path", zk.APP_TX_PATH, zk.APP_TX_PREFIX]
)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
expected, transaction.get_txn_path_before_getting_id(self.appid)
)
def test_get_xg_path(self):
# mock out initializing a ZK connection
flexmock(zk.ZKTransaction)
fake_zookeeper = flexmock(name="fake_zoo")
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry")
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
tx_id = 100
tx_str = zk.APP_TX_PREFIX + "%010d" % tx_id
zk.ZKTransaction.should_receive("get_app_root_path").and_return("app_root_path")
expected = zk.PATH_SEPARATOR.join(
["app_root_path", zk.APP_TX_PATH, tx_str, zk.XG_PREFIX]
)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(expected, transaction.get_xg_path("xxx", 100))
def test_is_in_transaction(self):
# shared mocks
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_path").and_return(
"/transaction/path"
)
fake_zookeeper = flexmock(
name="fake_zoo", exists="exists", connected=lambda: True
)
fake_zookeeper.should_receive("start")
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# test when the transaction is running
zk.ZKTransaction.should_receive("is_blacklisted").and_return(False)
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.is_in_transaction(self.appid, 1))
# and when it's not
zk.ZKTransaction.should_receive("is_blacklisted").and_return(False)
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(
False
)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(False, transaction.is_in_transaction(self.appid, 1))
# and when it's blacklisted
zk.ZKTransaction.should_receive("is_blacklisted").and_return(True)
fake_transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertRaises(
zk.ZKTransactionException, transaction.is_in_transaction, self.appid, 1
)
def test_acquire_lock(self):
# mock out waitForConnect
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_lock_root_path").and_return(
"/lock/root/path"
)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").and_return(
"/rootpath/" + self.appid
)
fake_zookeeper = flexmock(name="fake_zoo", get="get", connected=lambda: True)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry")
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# first, test out getting a lock for a regular transaction, that we don't
# already have the lock for
zk.ZKTransaction.should_receive("is_in_transaction").and_return(False)
zk.ZKTransaction.should_receive("acquire_additional_lock").and_return(True)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.acquire_lock(self.appid, "txid", "somekey"))
# next, test when we're in a transaction and we already have the lock
zk.ZKTransaction.should_receive("is_in_transaction").and_return(True)
zk.ZKTransaction.should_receive("get_transaction_lock_list_path").and_return(
"/rootpath/" + self.appid + "/tx1"
)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
["/lock/root/path"]
)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.acquire_lock(self.appid, "txid", "somekey"))
# next, test when we're in a non-XG transaction and we're not in the lock
# root path
zk.ZKTransaction.should_receive("is_in_transaction").and_return(True)
zk.ZKTransaction.should_receive("get_transaction_lock_list_path").and_return(
"/rootpath/" + self.appid + "/tx1"
)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
["/lock/root/path2"]
)
zk.ZKTransaction.should_receive("is_xg").and_return(False)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertRaises(
zk.ZKTransactionException,
transaction.acquire_lock,
self.appid,
"txid",
"somekey",
)
# next, test when we're in a XG transaction and we're not in the lock
# root path
zk.ZKTransaction.should_receive("is_in_transaction").and_return(True)
zk.ZKTransaction.should_receive("get_transaction_lock_list_path").and_return(
"/rootpath/" + self.appid + "/tx1"
)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
["/lock/root/path2"]
)
zk.ZKTransaction.should_receive("is_xg").and_return(True)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.acquire_lock(self.appid, "txid", "somekey"))
def test_acquire_additional_lock(self):
# mock out waitForConnect
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("check_transaction")
zk.ZKTransaction.should_receive("get_transaction_path").and_return("/txn/path")
zk.ZKTransaction.should_receive("get_lock_root_path").and_return(
"/lock/root/path"
)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").and_return(
"/rootpath/" + self.appid
)
fake_zookeeper = flexmock(
name="fake_zoo",
create="create",
create_async="create_async",
get="get",
set_async="set_async",
connected=lambda: True,
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args(
"create",
str,
makepath=bool,
sequence=bool,
ephemeral=bool,
value=str,
acl=None,
).and_return("/some/lock/path")
fake_zookeeper.should_receive("retry").with_args(
"create_async",
str,
value=str,
acl=None,
ephemeral=bool,
makepath=bool,
sequence=bool,
)
fake_zookeeper.should_receive("retry").with_args(
"create_async",
str,
value=str,
acl=str,
ephemeral=bool,
makepath=bool,
sequence=bool,
)
lock_list = ["path1", "path2", "path3"]
lock_list_str = zk.LOCK_LIST_SEPARATOR.join(lock_list)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[lock_list_str]
)
fake_zookeeper.should_receive("retry").with_args("set_async", str, str)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
True,
transaction.acquire_additional_lock(self.appid, "txid", "somekey", False),
)
# Test for when we want to create a new ZK node for the lock path
self.assertEqual(
True,
transaction.acquire_additional_lock(self.appid, "txid", "somekey", True),
)
# Test for existing max groups
lock_list = ["path" + str(num + 1) for num in range(zk.MAX_GROUPS_FOR_XG)]
lock_list_str = zk.LOCK_LIST_SEPARATOR.join(lock_list)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[lock_list_str]
)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertRaises(
zk.ZKTransactionException,
transaction.acquire_additional_lock,
self.appid,
"txid",
"somekey",
False,
)
# Test for when there is a node which already exists.
fake_zookeeper.should_receive("retry").with_args(
"create", str, str, None, bool, bool, bool
).and_raise(kazoo.exceptions.NodeExistsError)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertRaises(
zk.ZKTransactionException,
transaction.acquire_additional_lock,
self.appid,
"txid",
"somekey",
False,
)
def test_check_transaction(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_transaction_prefix_path").with_args(
self.appid
).and_return("/rootpath")
zk.ZKTransaction.should_receive("is_blacklisted").and_return(False)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo", exists="exists", connected=lambda: True
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.check_transaction(self.appid, 1))
# Check to make sure it raises exception for blacklisted transactions.
zk.ZKTransaction.should_receive("is_blacklisted").and_return(True)
self.assertRaises(
zk.ZKTransactionException, transaction.check_transaction, self.appid, 1
)
zk.ZKTransaction.should_receive("is_blacklisted").and_return(False)
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(
False
)
self.assertRaises(
zk.ZKTransactionException, transaction.check_transaction, self.appid, 1
)
def test_is_xg(self):
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo", exists="exists", connected=lambda: True
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.is_xg(self.appid, 1))
def test_release_lock(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("check_transaction")
zk.ZKTransaction.should_receive("get_transaction_path").and_return("/rootpath")
zk.ZKTransaction.should_receive("get_transaction_lock_list_path").and_return(
"/rootpath"
)
zk.ZKTransaction.should_receive("is_xg").and_return(False)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo",
exists="exists",
get="get",
delete="delete",
delete_async="delete_async",
get_children="get_children",
connected=lambda: True,
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
["/1/2/3"]
)
fake_zookeeper.should_receive("retry").with_args("delete_async", str)
fake_zookeeper.should_receive("retry").with_args("delete", str)
fake_zookeeper.should_receive("retry").with_args(
"get_children", str
).and_return(["1", "2"])
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.release_lock(self.appid, 1))
zk.ZKTransaction.should_receive("is_xg").and_return(True)
self.assertEqual(True, transaction.release_lock(self.appid, 1))
# Check to make sure it raises exception for blacklisted transactions.
zk.ZKTransaction.should_receive("is_xg").and_return(False)
fake_zookeeper.should_receive("retry").with_args("get", str).and_raise(
kazoo.exceptions.NoNodeError
)
self.assertRaises(
zk.ZKTransactionException, transaction.release_lock, self.appid, 1
)
def test_is_blacklisted(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_blacklist_root_path").and_return(
"bl_root_path"
)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo",
create="create",
exists="exists",
get_children="get_children",
connected=lambda: True,
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args(
"create", str, str, None, bool, bool, bool
).and_return()
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
fake_zookeeper.should_receive("retry").with_args(
"get_children", str
).and_return(["1", "2"])
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.is_blacklisted(self.appid, 1))
def test_register_updated_key(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("get_valid_transaction_path").and_return(
"/txn/path"
)
zk.ZKTransaction.should_receive("get_transaction_path").and_return("/txn/path")
zk.ZKTransaction.should_receive("get_blacklist_root_path").and_return(
"bl_root_path"
)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo",
exists="exists",
set_async="set_async",
connected=lambda: True,
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
fake_zookeeper.should_receive("retry").with_args("set_async", str, str)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
True, transaction.register_updated_key(self.appid, "1", "2", "somekey")
)
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(
False
)
self.assertRaises(
ZKTransactionException,
transaction.register_updated_key,
self.appid,
"1",
"2",
"somekey",
)
def test_try_garbage_collection(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("update_node")
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo",
exists="exists",
get="get",
get_children="get_children",
create="create",
delete="delete",
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[str(time.time() + 10000)]
)
fake_zookeeper.should_receive("retry").with_args(
"get_children", str
).and_return(["1", "2", "3"])
fake_zookeeper.should_receive("retry").with_args(
"create", str, value=str, acl=None, ephemeral=bool
)
fake_zookeeper.should_receive("retry").with_args("delete", str)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
# Put the last time we ran GC way into the future.
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(
False, transaction.try_garbage_collection(self.appid, "/some/path")
)
# Make it so we recently ran the GC
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[str(time.time())]
)
self.assertEqual(
False, transaction.try_garbage_collection(self.appid, "/some/path")
)
# Make it so we ran the GC a long time ago.
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[str(time.time() - 1000)]
)
self.assertEqual(
True, transaction.try_garbage_collection(self.appid, "/some/path")
)
# No node means we have not run the GC before, so run it.
fake_zookeeper.should_receive("retry").with_args("get", str).and_raise(
kazoo.exceptions.NoNodeError
)
self.assertEqual(
True, transaction.try_garbage_collection(self.appid, "/some/path")
)
def test_notify_failed_transaction(self):
pass
# TODO
def test_execute_garbage_collection(self):
# mock out getTransactionRootPath
flexmock(zk.ZKTransaction)
zk.ZKTransaction.should_receive("notify_failed_transaction")
# mock out initializing a ZK connection
fake_zookeeper = flexmock(
name="fake_zoo", exists="exists", get="get", get_children="get_children"
)
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("exists", str).and_return(True)
fake_zookeeper.should_receive("retry").with_args("get", str).and_return(
[str(time.time() + 10000)]
)
fake_zookeeper.should_receive("retry").with_args(
"get_children", str
).and_return(["1", "2", "3"])
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
transaction.execute_garbage_collection(self.appid, "some/path")
def test_get_lock_with_path(self):
flexmock(zk.ZKTransaction)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(name="fake_zoo", create="create")
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args(
"create", str, value=str, acl=None, ephemeral=bool
).and_return(True)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.get_lock_with_path("path"))
fake_zookeeper.should_receive("retry").with_args(
"create", str, value=str, acl=None, ephemeral=bool
).and_raise(kazoo.exceptions.NodeExistsError)
self.assertEqual(False, transaction.get_lock_with_path("some/path"))
def test_release_lock_with_path(self):
flexmock(zk.ZKTransaction)
# mock out initializing a ZK connection
fake_zookeeper = flexmock(name="fake_zoo", delete="delete")
fake_zookeeper.should_receive("start")
fake_zookeeper.should_receive("retry").with_args("delete", str)
flexmock(kazoo.client)
kazoo.client.should_receive("KazooClient").and_return(fake_zookeeper)
transaction = zk.ZKTransaction(host="something", start_gc=False)
self.assertEqual(True, transaction.release_lock_with_path("some/path"))
fake_zookeeper.should_receive("retry").with_args("delete", str).and_raise(
kazoo.exceptions.NoNodeError
)
self.assertRaises(
ZKTransactionException, transaction.release_lock_with_path, "some/path"
)
if __name__ == "__main__":
unittest.main()
|
import cgi
import datetime
import wsgiref.handlers
from google.appengine.ext import webapp
class MainPage(webapp.RequestHandler):
def get(self):
('<html><body>')
('<p>Hello</p>')
('</body></html>')
application = ([('/', MainPage)])
def main():
(application)
if (__name__ == '__main__'):
() |
#!/usr/bin/env python
from google.appengine._internal.django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write(
"Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n"
% __file__
)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.__iter__)
def __iter__(self):
return iter(self.file)
|
'\nXML serializer.\n'
from google.appengine._internal.django.conf import settings
from google.appengine._internal.django.core.serializers import base
from google.appengine._internal.django.db import models, DEFAULT_DB_ALIAS
from google.appengine._internal.django.utils.xmlutils import SimplerXMLGenerator
from google.appengine._internal.django.utils.encoding import smart_unicode
from xml.dom import pulldom
class Serializer(base.Serializer):
'\n Serializes a QuerySet to XML.\n '
def indent(self, level):
if (('indent', None) is not None):
(('\n' + ((' ' * ('indent', None)) * level)))
def start_serialization(self):
'\n Start serialization -- open the XML document and the root element.\n '
self.xml = (self.stream, ('encoding', settings.DEFAULT_CHARSET))
()
('django-objects', {'version': '1.0'})
def end_serialization(self):
'\n End serialization -- end the document.\n '
(0)
('django-objects')
()
def start_object(self, obj):
'\n Called as each object is handled.\n '
if (not (obj, '_meta')):
raise (('Non-model object (%s) encountered during serialization' % (obj)))
(1)
obj_pk = ()
if (obj_pk is None):
attrs = {'model': (obj._meta)}
else:
attrs = {'pk': (()), 'model': (obj._meta)}
('object', attrs)
def end_object(self, obj):
'\n Called after handling all fields for an object.\n '
(1)
('object')
def handle_field(self, obj, field):
'\n Called to handle each field on an object (except for ForeignKeys and\n ManyToManyFields)\n '
(2)
('field', {'name': field.name, 'type': ()})
if ((obj, field.name) is not None):
((obj))
else:
('None')
('field')
def handle_fk_field(self, obj, field):
'\n Called to handle a ForeignKey (we need to treat them slightly\n differently from regular fields).\n '
(field)
related = (obj, field.name)
if (related is not None):
if (self.use_natural_keys and (related, 'natural_key')):
related = ()
for key_value in related:
('natural', {})
((key_value))
('natural')
else:
if (field.rel.field_name == related._meta.pk.name):
related = ()
else:
related = (related, field.rel.field_name)
((related))
else:
('None')
('field')
def handle_m2m_field(self, obj, field):
"\n Called to handle a ManyToManyField. Related objects are only\n serialized as references to the object's PK (i.e. the related *data*\n is not dumped, just the relation).\n "
if field.rel.through._meta.auto_created:
(field)
if (self.use_natural_keys and (field.rel.to, 'natural_key')):
def handle_m2m(value):
natural = ()
('object', {})
for key_value in natural:
('natural', {})
((key_value))
('natural')
('object')
else:
def handle_m2m(value):
('object')
for relobj in ():
(relobj)
('field')
def _start_relational_field(self, field):
'\n Helper to output the <field> element for relational fields\n '
(2)
('field', {'name': field.name, 'rel': field.rel.__class__.__name__, 'to': (field.rel.to._meta)})
class Deserializer(base.Deserializer):
'\n Deserialize XML.\n '
def __init__(self, stream_or_string, **options):
(stream_or_string)
self.event_stream = (self.stream)
self.db = ('using', DEFAULT_DB_ALIAS)
def __next__(self):
for (event, node) in self.event_stream:
if ((event == 'START_ELEMENT') and (node.nodeName == 'object')):
(node)
return (node)
raise StopIteration
def _handle_object(self, node):
'\n Convert an <object> node to a DeserializedObject.\n '
Model = (node, 'model')
if ('pk'):
pk = ('pk')
else:
pk = None
data = {Model._meta.pk.attname: (pk)}
m2m_data = {}
for field_node in ('field'):
field_name = ('name')
if (not field_name):
raise ("<field> node is missing the 'name' attribute")
field = (field_name)
if (field.rel and (field.rel, models.ManyToManyRel)):
m2m_data[field.name] = (field_node, field)
elif (field.rel and (field.rel, models.ManyToOneRel)):
data[field.attname] = (field_node, field)
else:
if ('None'):
value = None
else:
value = (())
data[field.name] = value
return ((), m2m_data)
def _handle_fk_field_node(self, node, field):
'\n Handle a <field> node for a ForeignKey\n '
if ('None'):
return None
elif (field.rel.to._default_manager, 'get_by_natural_key'):
keys = ('natural')
if keys:
field_value = [() for k in keys]
obj = (*field_value)
obj_pk = (obj, field.rel.field_name)
if field.rel.to._meta.pk.rel:
obj_pk = obj_pk.pk
else:
field_value = ()
obj_pk = (field_value)
return obj_pk
else:
field_value = ()
return (field_value)
def _handle_m2m_field_node(self, node, field):
'\n Handle a <field> node for a ManyToManyField.\n '
if (field.rel.to._default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = ('natural')
if keys:
field_value = [() for k in keys]
obj_pk = (*field_value).pk
else:
obj_pk = (('pk'))
return obj_pk
else:
m2m_convert = (lambda n: (('pk')))
return [(c) for c in ('object')]
def _get_model_from_node(self, node, attr):
'\n Helper to look up a model from a <object model=...> or a <field\n rel=... to=...> node.\n '
model_identifier = (attr)
if (not model_identifier):
raise (("<%s> node is missing the required '%s' attribute" % (node.nodeName, attr)))
try:
Model = (*('.'))
except TypeError:
Model = None
if (Model is None):
raise (("<%s> node has invalid model identifier: '%s'" % (node.nodeName, model_identifier)))
return Model
def getInnerText(node):
'\n Get all the inner text of a DOM node (recursively).\n '
inner_text = []
for child in node.childNodes:
if ((child.nodeType == child.TEXT_NODE) or (child.nodeType == child.CDATA_SECTION_NODE)):
(child.data)
elif (child.nodeType == child.ELEMENT_NODE):
((child))
else:
pass
return (inner_text) |
import os
import sys
if os.name == "posix":
def become_daemon(
our_home_dir=".", out_log="/dev/null", err_log="/dev/null", umask=0o22
):
"Robustly turn into a UNIX daemon, running in our_home_dir."
# First fork
try:
if os.fork() > 0:
sys.exit(0) # kill off parent
except OSError as e:
sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
os.setsid()
os.chdir(our_home_dir)
os.umask(umask)
# Second fork
try:
if os.fork() > 0:
os._exit(0)
except OSError as e:
sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
os._exit(1)
si = open("/dev/null", "r")
so = open(out_log, "a+", 0)
se = open(err_log, "a+", 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Set custom file descriptors so that they get proper buffering.
sys.stdout, sys.stderr = so, se
else:
def become_daemon(our_home_dir=".", out_log=None, err_log=None, umask=0o22):
"""
If we're not running under a POSIX system, just simulate the daemon
mode by doing redirections and directory changing.
"""
os.chdir(our_home_dir)
os.umask(umask)
sys.stdin.close()
sys.stdout.close()
sys.stderr.close()
if err_log:
sys.stderr = open(err_log, "a", 0)
else:
sys.stderr = NullDevice()
if out_log:
sys.stdout = open(out_log, "a", 0)
else:
sys.stdout = NullDevice()
class NullDevice:
"A writeable object that writes to nowhere -- like /dev/null."
def write(self, s):
pass
|
"""
Code used in a couple of places to work with the current thread's environment.
Current users include i18n and request prefix handling.
"""
try:
import threading
currentThread = threading.currentThread
except ImportError:
def currentThread():
return "no threading"
|
"""
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = "$Id: Types.py,v 1.19 2005/02/22 04:29:43 warnes Exp $"
from version import __version__
import collections
import base64
import cgi
import urllib.request, urllib.parse, urllib.error
import copy
import re
import time
from types import *
# SOAPpy modules
from .Errors import *
from NS import NS
from Utilities import encodeHexString, cleanDate
from .Config import Config
###############################################################################
# Utility functions
###############################################################################
def isPrivate(name):
return name[0] == "_"
def isPublic(name):
return name[0] != "_"
###############################################################################
# Types and Wrappers
###############################################################################
class anyType:
_validURIs = (NS.XSD, NS.XSD2, NS.XSD3, NS.ENC)
def __init__(self, data=None, name=None, typed=1, attrs=None):
if self.__class__ == anyType:
raise Error("anyType can't be instantiated directly")
if type(name) in (ListType, TupleType):
self._ns, self._name = name
else:
self._ns = self._validURIs[0]
self._name = name
self._typed = typed
self._attrs = {}
self._cache = None
self._type = self._typeName()
self._data = self._checkValueSpace(data)
if attrs != None:
self._setAttrs(attrs)
def __str__(self):
if hasattr(self, "_name") and self._name:
return "<%s %s at %d>" % (self.__class__, self._name, id(self))
return "<%s at %d>" % (self.__class__, id(self))
__repr__ = __str__
def _checkValueSpace(self, data):
return data
def _marshalData(self):
return str(self._data)
def _marshalAttrs(self, ns_map, builder):
a = ""
for attr, value in list(self._attrs.items()):
ns, n = builder.genns(ns_map, attr[0])
a += n + ' %s%s="%s"' % (ns, attr[1], cgi.escape(str(value), 1))
return a
def _fixAttr(self, attr):
if type(attr) in (StringType, UnicodeType):
attr = (None, attr)
elif type(attr) == ListType:
attr = tuple(attr)
elif type(attr) != TupleType:
raise AttributeError("invalid attribute type")
if len(attr) != 2:
raise AttributeError("invalid attribute length")
if type(attr[0]) not in (NoneType, StringType, UnicodeType):
raise AttributeError("invalid attribute namespace URI type")
return attr
def _getAttr(self, attr):
attr = self._fixAttr(attr)
try:
return self._attrs[attr]
except:
return None
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if type(value) is StringType:
value = str(value)
self._attrs[attr] = value
def _setAttrs(self, attrs):
if type(attrs) in (ListType, TupleType):
for i in range(0, len(attrs), 2):
self._setAttr(attrs[i], attrs[i + 1])
return
if type(attrs) == DictType:
d = attrs
elif isinstance(attrs, anyType):
d = attrs._attrs
else:
raise AttributeError("invalid attribute type")
for attr, value in list(d.items()):
self._setAttr(attr, value)
def _setMustUnderstand(self, val):
self._setAttr((NS.ENV, "mustUnderstand"), val)
def _getMustUnderstand(self):
return self._getAttr((NS.ENV, "mustUnderstand"))
def _setActor(self, val):
self._setAttr((NS.ENV, "actor"), val)
def _getActor(self):
return self._getAttr((NS.ENV, "actor"))
def _typeName(self):
return self.__class__.__name__[:-4]
def _validNamespaceURI(self, URI, strict):
if not hasattr(self, "_typed") or not self._typed:
return None
if URI in self._validURIs:
return URI
if not strict:
return self._ns
raise AttributeError("not a valid namespace for type %s" % self._type)
class voidType(anyType):
pass
class stringType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type:" % self._type)
return data
class untypedType(stringType):
def __init__(self, data=None, name=None, attrs=None):
stringType.__init__(self, data, name, 0, attrs)
class IDType(stringType):
pass
class NCNameType(stringType):
pass
class NameType(stringType):
pass
class ENTITYType(stringType):
pass
class IDREFType(stringType):
pass
class languageType(stringType):
pass
class NMTOKENType(stringType):
pass
class QNameType(stringType):
pass
class tokenType(anyType):
_validURIs = (NS.XSD2, NS.XSD3)
__invalidre = "[\n\t]|^ | $| "
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
if type(self.__invalidre) == StringType:
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError("invalid %s value" % self._type)
return data
class normalizedStringType(anyType):
_validURIs = (NS.XSD3,)
__invalidre = "[\n\r\t]"
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
if type(self.__invalidre) == StringType:
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError("invalid %s value" % self._type)
return data
class CDATAType(normalizedStringType):
_validURIs = (NS.XSD2,)
class booleanType(anyType):
def __int__(self):
return self._data
__nonzero__ = __int__
def _marshalData(self):
return ["false", "true"][self._data]
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if data in (0, "0", "false", ""):
return 0
if data in (1, "1", "true"):
return 1
raise ValueError("invalid %s value" % self._type)
class decimalType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType, FloatType):
raise Error("invalid %s value" % self._type)
return data
class floatType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if (
type(data) not in (IntType, LongType, FloatType)
or data < -3.4028234663852886e38
or data > 3.4028234663852886e38
):
raise ValueError("invalid %s value: %s" % (self._type, repr(data)))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class doubleType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if (
type(data) not in (IntType, LongType, FloatType)
or data < -1.7976931348623158e308
or data > 1.7976931348623157e308
):
raise ValueError("invalid %s value: %s" % (self._type, repr(data)))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class durationType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
try:
# A tuple or a scalar is OK, but make them into a list
if type(data) == TupleType:
data = list(data)
elif type(data) != ListType:
data = [data]
if len(data) > 6:
raise Exception("too many values")
# Now check the types of all the components, and find
# the first nonzero element along the way.
f = -1
for i in range(len(data)):
if data[i] == None:
data[i] = 0
continue
if type(data[i]) not in (IntType, LongType, FloatType):
raise Exception("element %d a bad type" % i)
if data[i] and f == -1:
f = i
# If they're all 0, just use zero seconds.
if f == -1:
self._cache = "PT0S"
return (0,) * 6
# Make sure only the last nonzero element has a decimal fraction
# and only the first element is negative.
d = -1
for i in range(f, len(data)):
if data[i]:
if d != -1:
raise Exception(
"all except the last nonzero element must be " "integers"
)
if data[i] < 0 and i > f:
raise Exception(
"only the first nonzero element can be negative"
)
elif data[i] != int(data[i]):
d = i
# Pad the list on the left if necessary.
if len(data) < 6:
n = 6 - len(data)
f += n
d += n
data = [0] * n + data
# Save index of the first nonzero element and the decimal
# element for _marshalData.
self.__firstnonzero = f
self.__decimal = d
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
t = 0
if d[self.__firstnonzero] < 0:
s = "-P"
else:
s = "P"
t = 0
for i in range(self.__firstnonzero, len(d)):
if d[i]:
if i > 2 and not t:
s += "T"
t = 1
if self.__decimal == i:
s += "%g" % abs(d[i])
else:
s += "%d" % int(abs(d[i]))
s += ["Y", "M", "D", "H", "M", "S"][i]
self._cache = s
return self._cache
class timeDurationType(durationType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class dateTimeType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.time()
if type(data) in (IntType, LongType):
data = list(time.gmtime(data)[:6])
elif type(data) == FloatType:
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 6:
raise Exception("not enough values")
if len(data) > 9:
raise Exception("too many values")
data = list(data[:6])
cleanDate(data)
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02d-%02dT%02d:%02d:%02d" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = "-" + s
f = d[5] - int(d[5])
if f != 0:
s += ("%g" % f)[1:]
s += "Z"
self._cache = s
return self._cache
class recurringInstantType(anyType):
_validURIs = (NS.XSD,)
def _checkValueSpace(self, data):
try:
if data == None:
data = list(time.gmtime(time.time())[:6])
if type(data) in (IntType, LongType):
data = list(time.gmtime(data)[:6])
elif type(data) == FloatType:
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 1:
raise Exception("not enough values")
if len(data) > 9:
raise Exception("too many values")
data = list(data[:6])
if len(data) < 6:
data += [0] * (6 - len(data))
f = len(data)
for i in range(f):
if data[i] == None:
if f < i:
raise Exception("only leftmost elements can be none")
else:
f = i
break
cleanDate(data, f)
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
e = list(d)
neg = ""
if not e[0]:
e[0] = "--"
else:
if e[0] < 0:
neg = "-"
e[0] = abs(e[0])
if e[0] < 100:
e[0] = "-" + "%02d" % e[0]
else:
e[0] = "%04d" % e[0]
for i in range(1, len(e)):
if e[i] == None or (i < 3 and e[i] == 0):
e[i] = "-"
else:
if e[i] < 0:
neg = "-"
e[i] = abs(e[i])
e[i] = "%02d" % e[i]
if d[5]:
f = abs(d[5] - int(d[5]))
if f:
e[5] += ("%g" % f)[1:]
s = "%s%s-%s-%sT%s:%s:%sZ" % ((neg,) + tuple(e))
self._cache = s
return self._cache
class timeInstantType(dateTimeType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class timePeriodType(dateTimeType):
_validURIs = (NS.XSD2, NS.ENC)
class timeType(anyType):
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[3:6]
elif type(data) == FloatType:
f = data - int(data)
data = list(time.gmtime(int(data))[3:6])
data[2] += f
elif type(data) in (IntType, LongType):
data = time.gmtime(data)[3:6]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[3:6]
elif len(data) > 3:
raise Exception("too many values")
data = [None, None, None] + list(data)
if len(data) < 6:
data += [0] * (6 - len(data))
cleanDate(data, 3)
data = data[3:]
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = ""
s = time.strftime("%H:%M:%S", (0, 0, 0) + d + (0, 0, -1))
f = d[2] - int(d[2])
if f != 0:
s += ("%g" % f)[1:]
s += "Z"
self._cache = s
return self._cache
class dateType(anyType):
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:3]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[0:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:3]
elif len(data) > 3:
raise Exception("too many values")
data = list(data)
if len(data) < 3:
data += [1, 1, 1][len(data) :]
data += [0, 0, 0]
cleanDate(data)
data = data[:3]
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = "-" + s
self._cache = s
return self._cache
class gYearMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:2]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[0:2]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception("too many values")
data = list(data)
if len(data) < 2:
data += [1, 1][len(data) :]
data += [1, 0, 0, 0]
cleanDate(data)
data = data[:2]
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = "-" + s
self._cache = s
return self._cache
class gYearType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:1]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if type(data[0]) == FloatType:
try:
s = int(data[0])
except:
s = int(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04dZ" % abs(d)
if d < 0:
s = "-" + s
self._cache = s
return self._cache
class centuryType(anyType):
_validURIs = (NS.XSD2, NS.ENC)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:1] / 100
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1] / 100
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if type(data[0]) == FloatType:
try:
s = int(data[0])
except:
s = int(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%02dZ" % abs(d)
if d < 0:
s = "-" + s
self._cache = s
return self._cache
class yearType(gYearType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[1:3]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[1:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception("too many values")
data = list(data)
if len(data) < 2:
data += [1, 1][len(data) :]
data = [0] + data + [0, 0, 0]
cleanDate(data, 1)
data = data[1:3]
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache == None:
self._cache = "--%02d-%02dZ" % self._data
return self._cache
class recurringDateType(gMonthDayType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[1:2]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[1:2]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if type(data[0]) == FloatType:
try:
s = int(data[0])
except:
s = int(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
if data[0] < 1 or data[0] > 12:
raise Exception("bad value")
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache == None:
self._cache = "--%02d--Z" % self._data
return self._cache
class monthType(gMonthType):
_validURIs = (NS.XSD2, NS.ENC)
class gDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[2:3]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[2:3]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if type(data[0]) == FloatType:
try:
s = int(data[0])
except:
s = int(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
if data[0] < 1 or data[0] > 31:
raise Exception("bad value")
else:
raise Exception("invalid type")
except Exception as e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache == None:
self._cache = "---%02dZ" % self._data
return self._cache
class recurringDayType(gDayType):
_validURIs = (NS.XSD2, NS.ENC)
class hexBinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache == None:
self._cache = encodeHexString(self._data)
return self._cache
class base64BinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache == None:
self._cache = base64.encodestring(self._data)
return self._cache
class base64Type(base64BinaryType):
_validURIs = (NS.ENC,)
class binaryType(anyType):
_validURIs = (NS.XSD, NS.ENC)
def __init__(self, data, name=None, typed=1, encoding="base64", attrs=None):
anyType.__init__(self, data, name, typed, attrs)
self._setAttr("encoding", encoding)
def _marshalData(self):
if self._cache == None:
if self._getAttr((None, "encoding")) == "base64":
self._cache = base64.encodestring(self._data)
else:
self._cache = encodeHexString(self._data)
return self._cache
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if attr[1] == "encoding":
if attr[0] != None or value not in ("base64", "hex"):
raise AttributeError("invalid encoding")
self._cache = None
anyType._setAttr(self, attr, value)
class anyURIType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache == None:
self._cache = urllib.parse.quote(self._data)
return self._cache
class uriType(anyURIType):
_validURIs = (NS.XSD,)
class uriReferenceType(anyURIType):
_validURIs = (NS.XSD2,)
class NOTATIONType(anyType):
def __init__(self, data, name=None, typed=1, attrs=None):
if self.__class__ == NOTATIONType:
raise Error("a NOTATION can't be instantiated directly")
anyType.__init__(self, data, name, typed, attrs)
class ENTITIESType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) in (StringType, UnicodeType):
return (data,)
if type(data) not in (ListType, TupleType) or [
x for x in data if type(x) not in (StringType, UnicodeType)
]:
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
return " ".join(self._data)
class IDREFSType(ENTITIESType):
pass
class NMTOKENSType(ENTITIESType):
pass
class integerType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType):
raise ValueError("invalid %s value" % self._type)
return data
class nonPositiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data > 0:
raise ValueError("invalid %s value" % self._type)
return data
class non_Positive_IntegerType(nonPositiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return "non-positive-integer"
class negativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data >= 0:
raise ValueError("invalid %s value" % self._type)
return data
class negative_IntegerType(negativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return "negative-integer"
class longType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if (
type(data) not in (IntType, LongType)
or data < -9223372036854775808
or data > 9223372036854775807
):
raise ValueError("invalid %s value" % self._type)
return data
class intType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if (
type(data) not in (IntType, LongType)
or data < -2147483648
or data > 2147483647
):
raise ValueError("invalid %s value" % self._type)
return data
class shortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < -32768 or data > 32767:
raise ValueError("invalid %s value" % self._type)
return data
class byteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < -128 or data > 127:
raise ValueError("invalid %s value" % self._type)
return data
class nonNegativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < 0:
raise ValueError("invalid %s value" % self._type)
return data
class non_Negative_IntegerType(nonNegativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return "non-negative-integer"
class unsignedLongType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if (
type(data) not in (IntType, LongType)
or data < 0
or data > 18446744073709551615
):
raise ValueError("invalid %s value" % self._type)
return data
class unsignedIntType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < 0 or data > 4294967295:
raise ValueError("invalid %s value" % self._type)
return data
class unsignedShortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < 0 or data > 65535:
raise ValueError("invalid %s value" % self._type)
return data
class unsignedByteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < 0 or data > 255:
raise ValueError("invalid %s value" % self._type)
return data
class positiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data <= 0:
raise ValueError("invalid %s value" % self._type)
return data
class positive_IntegerType(positiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return "positive-integer"
# Now compound types
class compoundType(anyType):
def __init__(self, data=None, name=None, typed=1, attrs=None):
if self.__class__ == compoundType:
raise Error("a compound can't be instantiated directly")
anyType.__init__(self, data, name, typed, attrs)
self._keyord = []
if type(data) == DictType:
self.__dict__.update(data)
def _aslist(self, item=None):
if item is not None:
return self.__dict__[self._keyord[item]]
else:
return [self.__dict__[x] for x in self._keyord]
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType, StringType):
item = item.encode(encoding)
return self.__dict__[item]
else:
retval = {}
def fun(x):
retval[x.encode(encoding)] = self.__dict__[x]
if hasattr(self, "_keyord"):
list(map(fun, self._keyord))
else:
for name in dir(self):
if isPublic(name):
retval[name] = getattr(self, name)
return retval
def __getitem__(self, item):
if type(item) == IntType:
return self.__dict__[self._keyord[item]]
else:
return getattr(self, item)
def __len__(self):
return len(self._keyord)
def __bool__(self):
return 1
def _keys(self):
return [x for x in list(self.__dict__.keys()) if x[0] != "_"]
def _addItem(self, name, value, attrs=None):
if name in self._keyord:
if type(self.__dict__[name]) != ListType:
self.__dict__[name] = [self.__dict__[name]]
self.__dict__[name].append(value)
else:
self.__dict__[name] = value
self._keyord.append(name)
def _placeItem(self, name, value, pos, subpos=0, attrs=None):
if subpos == 0 and type(self.__dict__[name]) != ListType:
self.__dict__[name] = value
else:
self.__dict__[name][subpos] = value
self._keyord[pos] = name
def _getItemAsList(self, name, default=[]):
try:
d = self.__dict__[name]
except:
return default
if type(d) == ListType:
return d
return [d]
def __str__(self):
return anyType.__str__(self) + ": " + str(self._asdict())
def __repr__(self):
return self.__str__()
class structType(compoundType):
pass
class headerType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data=None, typed=1, attrs=None):
structType.__init__(self, data, "Header", typed, attrs)
class bodyType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data=None, typed=1, attrs=None):
structType.__init__(self, data, "Body", typed, attrs)
class arrayType(collections.UserList, compoundType):
def __init__(
self,
data=None,
name=None,
attrs=None,
offset=0,
rank=None,
asize=0,
elemsname=None,
):
if data:
if type(data) not in (ListType, TupleType):
raise Error("Data must be a sequence")
collections.UserList.__init__(self, data)
compoundType.__init__(self, data, name, 0, attrs)
self._elemsname = elemsname or "item"
if data == None:
self._rank = rank
# According to 5.4.2.2 in the SOAP spec, each element in a
# sparse array must have a position. _posstate keeps track of
# whether we've seen a position or not. It's possible values
# are:
# -1 No elements have been added, so the state is indeterminate
# 0 An element without a position has been added, so no
# elements can have positions
# 1 An element with a position has been added, so all elements
# must have positions
self._posstate = -1
self._full = 0
if asize in ("", None):
asize = "0"
self._dims = [int(x) for x in str(asize).split(",")]
self._dims.reverse() # It's easier to work with this way
self._poss = [0] * len(self._dims) # This will end up
# reversed too
for i in range(len(self._dims)):
if self._dims[i] < 0 or self._dims[i] == 0 and len(self._dims) > 1:
raise TypeError("invalid Array dimensions")
if offset > 0:
self._poss[i] = offset % self._dims[i]
offset = int(offset / self._dims[i])
# Don't break out of the loop if offset is 0 so we test all the
# dimensions for > 0.
if offset:
raise AttributeError("invalid Array offset")
a = [None] * self._dims[0]
for i in range(1, len(self._dims)):
b = []
for j in range(self._dims[i]):
b.append(copy.deepcopy(a))
a = b
self.data = a
def _aslist(self, item=None):
if item is not None:
return self.data[int(item)]
else:
return self.data
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType, StringType):
item = item.encode(encoding)
return self.data[int(item)]
else:
retval = {}
def fun(x):
retval[str(x).encode(encoding)] = self.data[x]
list(map(fun, list(range(len(self.data)))))
return retval
def __getitem__(self, item):
try:
return self.data[int(item)]
except ValueError:
return getattr(self, item)
def __len__(self):
return len(self.data)
def __bool__(self):
return 1
def __str__(self):
return anyType.__str__(self) + ": " + str(self._aslist())
def _keys(self):
return [x for x in list(self.__dict__.keys()) if x[0] != "_"]
def _addItem(self, name, value, attrs):
if self._full:
raise ValueError("Array is full")
pos = attrs.get((NS.ENC, "position"))
if pos != None:
if self._posstate == 0:
raise AttributeError(
"all elements in a sparse Array must have a " "position attribute"
)
self._posstate = 1
try:
if pos[0] == "[" and pos[-1] == "]":
pos = [int(x) for x in pos[1:-1].split(",")]
pos.reverse()
if len(pos) == 1:
pos = pos[0]
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if pos:
raise Exception
elif len(pos) != len(self._dims):
raise Exception
else:
for i in range(len(self._dims)):
if pos[i] >= self._dims[i]:
raise Exception
curpos = pos
else:
raise Exception
except:
raise AttributeError("invalid Array element position %s" % str(pos))
else:
if self._posstate == 1:
raise AttributeError(
"only elements in a sparse Array may have a " "position attribute"
)
self._posstate = 0
curpos = self._poss
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
if pos == None:
self._poss[0] += 1
for i in range(len(self._dims) - 1):
if self._poss[i] < self._dims[i]:
break
self._poss[i] = 0
self._poss[i + 1] += 1
if self._dims[-1] and self._poss[-1] >= self._dims[-1]:
# self._full = 1
# FIXME: why is this occuring?
pass
def _placeItem(self, name, value, pos, subpos, attrs=None):
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
if self._dims[i] == 0:
curpos[0] = pos
break
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if self._dims[i] != 0 and pos:
raise Error("array index out of range")
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
class typedArrayType(arrayType):
def __init__(
self,
data=None,
name=None,
typed=None,
attrs=None,
offset=0,
rank=None,
asize=0,
elemsname=None,
complexType=0,
):
arrayType.__init__(self, data, name, attrs, offset, rank, asize, elemsname)
self._typed = 1
self._type = typed
self._complexType = complexType
class faultType(structType, Error):
def __init__(self, faultcode="", faultstring="", detail=None):
self.faultcode = faultcode
self.faultstring = faultstring
if detail != None:
self.detail = detail
structType.__init__(self, None, 0)
def _setDetail(self, detail=None):
if detail != None:
self.detail = detail
else:
try:
del self.detail
except AttributeError:
pass
def __repr__(self):
if getattr(self, "detail", None) != None:
return "<Fault %s: %s: %s>" % (
self.faultcode,
self.faultstring,
self.detail,
)
else:
return "<Fault %s: %s>" % (self.faultcode, self.faultstring)
__str__ = __repr__
def __call__(self):
return (self.faultcode, self.faultstring, self.detail)
class SOAPException(Exception):
def __init__(self, code="", string="", detail=None):
self.value = ("SOAPpy SOAP Exception", code, string, detail)
self.code = code
self.string = string
self.detail = detail
def __str__(self):
return repr(self.value)
class RequiredHeaderMismatch(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodNotFound(Exception):
def __init__(self, value):
(val, detail) = value.split(":")
self.value = val
self.detail = detail
def __str__(self):
return repr(self.value, self.detail)
class AuthorizationFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#######
# Convert complex SOAPpy objects to native python equivalents
#######
def simplify(object, level=0):
"""
Convert the SOAPpy objects and thier contents to simple python types.
This function recursively converts the passed 'container' object,
and all public subobjects. (Private subobjects have names that
start with '_'.)
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level > 10:
return object
if isinstance(object, faultType):
if object.faultstring == "Required Header Misunderstood":
raise RequiredHeaderMismatch(object.detail)
elif object.faultstring == "Method Not Found":
raise MethodNotFound(object.detail)
elif object.faultstring == "Authorization Failed":
raise AuthorizationFailed(object.detail)
elif object.faultstring == "Method Failed":
raise MethodFailed(object.detail)
else:
se = SOAPException(object.faultcode, object.faultstring, object.detail)
raise se
elif isinstance(object, arrayType):
data = object._aslist()
for k in range(len(data)):
data[k] = simplify(data[k], level=level + 1)
return data
elif isinstance(object, compoundType) or isinstance(object, structType):
data = object._asdict()
for k in list(data.keys()):
if isPublic(k):
data[k] = simplify(data[k], level=level + 1)
return data
elif type(object) == DictType:
for k in list(object.keys()):
if isPublic(k):
object[k] = simplify(object[k])
return object
elif type(object) == list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
else:
return object
def simplify_contents(object, level=0):
"""
Convert the contents of SOAPpy objects to simple python types.
This function recursively converts the sub-objects contained in a
'container' object to simple python types.
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level > 10:
return object
if isinstance(object, faultType):
for k in object._keys():
if isPublic(k):
setattr(object, k, simplify(object[k], level=level + 1))
raise object
elif isinstance(object, arrayType):
data = object._aslist()
for k in range(len(data)):
object[k] = simplify(data[k], level=level + 1)
elif isinstance(object, structType):
data = object._asdict()
for k in list(data.keys()):
if isPublic(k):
setattr(object, k, simplify(data[k], level=level + 1))
elif isinstance(object, compoundType):
data = object._asdict()
for k in list(data.keys()):
if isPublic(k):
object[k] = simplify(data[k], level=level + 1)
elif type(object) == DictType:
for k in list(object.keys()):
if isPublic(k):
object[k] = simplify(object[k])
elif type(object) == list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
|
"The Python datastore API used by app developers.\n\nDefines Entity, Query, and Iterator classes, as well as methods for all of the\ndatastore's calls. Also defines conversions between the Python classes and\ntheir PB counterparts.\n\nThe datastore errors are defined in the datastore_errors module. That module is\nonly required to avoid circular imports. datastore imports datastore_types,\nwhich needs BadValueError, so it can't be defined in datastore.\n"
import heapq
import itertools
import logging
import os
import re
import sys
import threading
import traceback
from xml.sax import saxutils
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import capabilities
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_query
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import entity_pb
MAX_ALLOWABLE_QUERIES = 30
MAXIMUM_RESULTS = 1000
DEFAULT_TRANSACTION_RETRIES = 3
READ_CAPABILITY = ('datastore_v3')
WRITE_CAPABILITY = ('datastore_v3')
_MAX_INDEXED_PROPERTIES = 20000
_MAX_ID_BATCH_SIZE = datastore_rpc._MAX_ID_BATCH_SIZE
Key = datastore_types.Key
typename = datastore_types.typename
STRONG_CONSISTENCY = datastore_rpc.Configuration.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore_rpc.Configuration.EVENTUAL_CONSISTENCY
_MAX_INT_32 = ((2 ** 31) - 1)
def NormalizeAndTypeCheck(arg, types):
'Normalizes and type checks the given argument.\n\n Args:\n arg: an instance or iterable of the given type(s)\n types: allowed type or tuple of types\n\n Returns:\n A (list, bool) tuple. The list is a normalized, shallow copy of the\n argument. The boolean is True if the argument was a sequence, False\n if it was a single object.\n\n Raises:\n AssertionError: types includes list or tuple.\n BadArgumentError: arg is not an instance or sequence of one of the given\n types.\n '
if (not (types, (list, tuple))):
types = (types,)
if (not ((list not in types) and (tuple not in types))):
raise ()
if (arg, types):
return ([arg], False)
else:
if (arg, str):
raise (('Expected an instance or iterable of %s; received %s (a %s).' % (types, arg, (arg))))
try:
arg_list = (arg)
except TypeError:
raise (('Expected an instance or iterable of %s; received %s (a %s).' % (types, arg, (arg))))
for val in arg_list:
if (not (val, types)):
raise (('Expected one of %s; received %s (a %s).' % (types, val, (val))))
return (arg_list, True)
def NormalizeAndTypeCheckKeys(keys):
'Normalizes and type checks that the given argument is a valid key or keys.\n\n A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and\n Entities, and normalizes to Keys.\n\n Args:\n keys: a Key or sequence of Keys\n\n Returns:\n A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.\n\n Raises:\n BadArgumentError: arg is not an instance or sequence of one of the given\n types.\n '
(keys, multiple) = (keys, (str, Entity, Key))
keys = [(key) for key in keys]
return (keys, multiple)
def _GetConfigFromKwargs(kwargs, convert_rpc=False, config_class=datastore_rpc.Configuration):
'Get a Configuration object from the keyword arguments.\n\n This is purely an internal helper for the various public APIs below\n such as Get().\n\n Args:\n kwargs: A dict containing the keyword arguments passed to a public API.\n convert_rpc: If the an rpc should be converted or passed on directly.\n config_class: The config class that should be generated.\n\n Returns:\n A UserRPC instance, or a Configuration instance, or None.\n\n Raises:\n TypeError if unexpected keyword arguments are present.\n '
if (not kwargs):
return None
rpc = ('rpc', None)
if (rpc is not None):
if (not (rpc, apiproxy_stub_map.UserRPC)):
raise ('rpc= argument should be None or a UserRPC instance')
if ('config' in kwargs):
raise ('Expected rpc= or config= argument but not both')
if (not convert_rpc):
if kwargs:
raise (('Unexpected keyword arguments: %s' % (kwargs)))
return rpc
read_policy = (rpc, 'read_policy', None)
kwargs['config'] = ()
return ()
class _BaseIndex(object):
(BUILDING, SERVING, DELETING, ERROR) = ((4))
ASCENDING = datastore_query.PropertyOrder.ASCENDING
DESCENDING = datastore_query.PropertyOrder.DESCENDING
def __init__(self, index_id, kind, has_ancestor, properties):
'Construct a datastore index instance.\n\n Args:\n index_id: Required long; Uniquely identifies the index\n kind: Required string; Specifies the kind of the entities to index\n has_ancestor: Required boolean; indicates if the index supports a query\n that filters entities by the entity group parent\n properties: Required list of (string, int) tuples; The entity properties\n to index. First item in a tuple is the property name and the second\n item is the sorting direction (ASCENDING|DESCENDING).\n The order of the properties is based on the order in the index.\n '
argument_error = datastore_errors.BadArgumentError
(index_id, 'index_id', argument_error)
(kind, 'kind', argument_error)
if (not (properties, (list, tuple))):
raise ('properties must be a list or a tuple')
for (idx, index_property) in (properties):
if (not (index_property, (list, tuple))):
raise (('property[%d] must be a list or a tuple' % idx))
if ((index_property) != 2):
raise (('property[%d] length should be 2 but was %d' % (idx, (index_property))))
(index_property[0], 'property name', argument_error)
(index_property[1], (self.ASCENDING, self.DESCENDING), 'sort direction')
self.__id = (index_id)
self.__kind = kind
self.__has_ancestor = (has_ancestor)
self.__properties = properties
@staticmethod
def __ValidateEnum(value, accepted_values, name='value', exception=datastore_errors.BadArgumentError):
(value, name, exception)
if (not (value in accepted_values)):
raise (('%s should be one of %s but was %d' % (name, (accepted_values), value)))
def _Id(self):
'Returns the index id, a long.'
return self.__id
def _Kind(self):
"Returns the index kind, a string. Empty string ('') if none."
return self.__kind
def _HasAncestor(self):
'Indicates if this is an ancestor index, a boolean.'
return self.__has_ancestor
def _Properties(self):
'Returns the index properties. a tuple of\n (index name as a string, [ASCENDING|DESCENDING]) tuples.\n '
return self.__properties
def __eq__(self, other):
return (self.__id == other.__id)
def __ne__(self, other):
return (self.__id != other.__id)
def __hash__(self):
return (self.__id)
class Index(_BaseIndex):
'A datastore index.'
Id = _BaseIndex._Id
Kind = _BaseIndex._Kind
HasAncestor = _BaseIndex._HasAncestor
Properties = _BaseIndex._Properties
class DatastoreAdapter(datastore_rpc.AbstractAdapter):
'Adapter between datatypes defined here (Entity etc.) and protobufs.\n\n See the base class in datastore_rpc.py for more docs.\n '
index_state_mappings = {entity_pb.CompositeIndex.ERROR: Index.ERROR, entity_pb.CompositeIndex.DELETED: Index.DELETING, entity_pb.CompositeIndex.READ_WRITE: Index.SERVING, entity_pb.CompositeIndex.WRITE_ONLY: Index.BUILDING}
index_direction_mappings = {entity_pb.Index_Property.ASCENDING: Index.ASCENDING, entity_pb.Index_Property.DESCENDING: Index.DESCENDING}
def key_to_pb(self, key):
return key._Key__reference
def pb_to_key(self, pb):
return (pb)
def entity_to_pb(self, entity):
return ()
def pb_to_entity(self, pb):
return (pb)
def pb_to_index(self, pb):
index_def = ()
properties = [((), (())) for property in ()]
index = ((), (), (), properties)
state = (())
return (index, state)
_adapter = ()
_thread_local = ()
_ENV_KEY = '__DATASTORE_CONNECTION_INITIALIZED__'
def _GetConnection():
'Retrieve a datastore connection local to the thread.'
connection = None
if (_ENV_KEY):
try:
connection = _thread_local.connection
except AttributeError:
raise
if (connection is None):
connection = ()
(connection)
return connection
def _SetConnection(connection):
'Sets the datastore connection local to the thread.'
_thread_local.connection = connection
os.environ[_ENV_KEY] = '1'
def _MakeSyncCall(service, call, request, response, config=None):
"The APIProxy entry point for a synchronous API call.\n\n Args:\n service: For backwards compatibility, must be 'datastore_v3'.\n call: String representing which function to call.\n request: Protocol buffer for the request.\n response: Protocol buffer for the response.\n config: Optional Configuration to use for this request.\n\n Returns:\n Response protocol buffer. Caller should always use returned value\n which may or may not be same as passed in 'response'.\n\n Raises:\n apiproxy_errors.Error or a subclass.\n "
conn = ()
if (request, datastore_pb.Query):
(request, config)
(request)
rpc = (config, call, request, response)
(rpc)
return response
def CreateRPC(service='datastore_v3', deadline=None, callback=None, read_policy=None):
"Create an rpc for use in configuring datastore calls.\n\n NOTE: This functions exists for backwards compatibility. Please use\n CreateConfig() instead. NOTE: the latter uses 'on_completion',\n which is a function taking an argument, wherease CreateRPC uses\n 'callback' which is a function without arguments.\n\n Args:\n service: Optional string; for backwards compatibility, must be\n 'datastore_v3'.\n deadline: Optional int or float, deadline for calls in seconds.\n callback: Optional callable, a callback triggered when this rpc\n completes; takes no arguments.\n read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to\n enable eventually consistent reads (i.e. reads that may be\n satisfied from an older version of the datastore in some cases).\n The default read policy may have to wait until in-flight\n transactions are committed.\n\n Returns:\n A UserRPC instance.\n "
if (not (service == 'datastore_v3')):
raise ()
conn = ()
config = None
if (deadline is not None):
config = ()
rpc = (config)
rpc.callback = callback
if (read_policy is not None):
rpc.read_policy = read_policy
return rpc
def CreateConfig(**kwds):
"Create a Configuration object for use in configuring datastore calls.\n\n This configuration can be passed to most datastore calls using the\n 'config=...' argument.\n\n Args:\n deadline: Optional deadline; default None (which means the\n system default deadline will be used, typically 5 seconds).\n on_completion: Optional callback function; default None. If\n specified, it will be called with a UserRPC object as argument\n when an RPC completes.\n read_policy: Optional read policy; set to EVENTUAL_CONSISTENCY to\n enable eventually consistent reads (i.e. reads that may be\n satisfied from an older version of the datastore in some cases).\n The default read policy may have to wait until in-flight\n transactions are committed.\n **kwds: Other keyword arguments as long as they are supported by\n datastore_rpc.Configuration().\n\n Returns:\n A datastore_rpc.Configuration instance.\n "
return ()
def CreateTransactionOptions(**kwds):
"Create a configuration object for use in configuring transactions.\n\n This configuration can be passed as run_in_transaction_option's first\n argument.\n\n Args:\n deadline: Optional deadline; default None (which means the\n system default deadline will be used, typically 5 seconds).\n on_completion: Optional callback function; default None. If\n specified, it will be called with a UserRPC object as argument\n when an RPC completes.\n xg: set to true to allow cross-group transactions (high replication\n datastore only)\n retries: set the number of retries for a transaction\n **kwds: Other keyword arguments as long as they are supported by\n datastore_rpc.TransactionOptions().\n\n Returns:\n A datastore_rpc.TransactionOptions instance.\n "
return ()
def PutAsync(entities, **kwargs):
'Asynchronously store one or more entities in the datastore.\n\n Identical to datastore.Put() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
extra_hook = ('extra_hook', None)
config = (kwargs)
if ((config, 'read_policy', None) == EVENTUAL_CONSISTENCY):
raise ('read_policy is only supported on read operations.')
(entities, multiple) = (entities, Entity)
for entity in entities:
if ():
raise (('Cannot put a partial entity: %s' % entity))
if ((not ()) or (not ())):
raise (('App and kind must not be empty, in entity: %s' % entity))
def local_extra_hook(keys):
num_keys = (keys)
num_entities = (entities)
if (num_keys != num_entities):
raise (('Put accepted %d entities but returned %d keys.' % (num_entities, num_keys)))
for (entity, key) in (entities, keys):
if (entity._Entity__key._Key__reference != key._Key__reference):
if (not (not ())):
raise ()
(key._Key__reference)
if multiple:
result = keys
else:
result = keys[0]
if extra_hook:
return (result)
return result
return (config, entities, local_extra_hook)
def Put(entities, **kwargs):
'Store one or more entities in the datastore.\n\n The entities may be new or previously existing. For new entities, Put() will\n fill in the app id and key assigned by the datastore.\n\n If the argument is a single Entity, a single Key will be returned. If the\n argument is a list of Entity, a list of Keys will be returned.\n\n Args:\n entities: Entity or list of Entities\n config: Optional Configuration to use for this request, must be specified\n as a keyword argument.\n\n Returns:\n Key or list of Keys\n\n Raises:\n TransactionFailedError, if the Put could not be committed.\n '
return ()
def GetAsync(keys, **kwargs):
'Asynchronously retrieves one or more entities from the datastore.\n\n Identical to datastore.Get() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
extra_hook = ('extra_hook', None)
config = (kwargs)
(keys, multiple) = (keys)
def local_extra_hook(entities):
if multiple:
result = entities
else:
if ((not entities) or (entities[0] is None)):
raise ()
result = entities[0]
if extra_hook:
return (result)
return result
return (config, keys, local_extra_hook)
def Get(keys, **kwargs):
'Retrieves one or more entities from the datastore.\n\n Retrieves the entity or entities with the given key(s) from the datastore\n and returns them as fully populated Entity objects, as defined below. If\n there is an error, raises a subclass of datastore_errors.Error.\n\n If keys is a single key or string, an Entity will be returned, or\n EntityNotFoundError will be raised if no existing entity matches the key.\n\n However, if keys is a list or tuple, a list of entities will be returned\n that corresponds to the sequence of keys. It will include entities for keys\n that were found and None placeholders for keys that were not found.\n\n Args:\n keys: Key or string or list of Keys or strings\n config: Optional Configuration to use for this request, must be specified\n as a keyword argument.\n\n Returns:\n Entity or list of Entity objects\n '
return ()
def GetIndexesAsync(**kwargs):
'Asynchronously retrieves the application indexes and their states.\n\n Identical to GetIndexes() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
extra_hook = ('extra_hook', None)
config = (kwargs)
def local_extra_hook(result):
if extra_hook:
return (result)
return result
return (config, local_extra_hook)
def GetIndexes(**kwargs):
'Retrieves the application indexes and their states.\n\n Args:\n config: Optional Configuration to use for this request, must be specified\n as a keyword argument.\n\n Returns:\n A list of (Index, Index.[BUILDING|SERVING|DELETING|ERROR]) tuples.\n An index can be in the following states:\n Index.BUILDING: Index is being built and therefore can not serve queries\n Index.SERVING: Index is ready to service queries\n Index.DELETING: Index is being deleted\n Index.ERROR: Index encounted an error in the BUILDING state\n '
return ()
def DeleteAsync(keys, **kwargs):
'Asynchronously deletes one or more entities from the datastore.\n\n Identical to datastore.Delete() except returns an asynchronous object. Call\n get_result() on the return value to block on the call.\n '
config = (kwargs)
if ((config, 'read_policy', None) == EVENTUAL_CONSISTENCY):
raise ('read_policy is only supported on read operations.')
(keys, _) = (keys)
return (config, keys)
def Delete(keys, **kwargs):
'Deletes one or more entities from the datastore. Use with care!\n\n Deletes the given entity(ies) from the datastore. You can only delete\n entities from your app. If there is an error, raises a subclass of\n datastore_errors.Error.\n\n Args:\n # the primary key(s) of the entity(ies) to delete\n keys: Key or string or list of Keys or strings\n config: Optional Configuration to use for this request, must be specified\n as a keyword argument.\n\n Raises:\n TransactionFailedError, if the Delete could not be committed.\n '
return ()
class Entity(dict):
'A datastore entity.\n\n Includes read-only accessors for app id, kind, and primary key. Also\n provides dictionary-style access to properties.\n '
__projection = False
def __init__(self, kind, parent=None, _app=None, name=None, id=None, unindexed_properties=[], namespace=None, **kwds):
"Constructor. Takes the kind and transaction root, which cannot be\n changed after the entity is constructed, and an optional parent. Raises\n BadArgumentError or BadKeyError if kind is invalid or parent is not an\n existing Entity or Key in the datastore.\n\n Args:\n # this entity's kind\n kind: string\n # if provided, this entity's parent. Its key must be complete.\n parent: Entity or Key\n # if provided, this entity's name.\n name: string\n # if provided, this entity's id.\n id: integer\n # if provided, a sequence of property names that should not be indexed\n # by the built-in single property indices.\n unindexed_properties: list or tuple of strings\n namespace: string\n # if provided, overrides the default namespace_manager setting.\n "
ref = ()
_app = (_app)
(_app)
_namespace = ('_namespace', None)
if kwds:
raise (('Excess keyword arguments ' + (kwds)))
if (namespace is None):
namespace = _namespace
elif (_namespace is not None):
raise ('Must not set both _namespace and namespace parameters.')
(kind, 'kind', datastore_errors.BadArgumentError)
if (parent is not None):
parent = (parent)
if (_app != ()):
raise ((" %s doesn't match parent's app %s" % (_app, ())))
if (namespace is None):
namespace = ()
elif (namespace != ()):
raise ((" %s doesn't match parent's namespace %s" % (namespace, ())))
(parent._Key__reference)
namespace = (namespace)
(ref, namespace)
last_path = ()
(('utf-8'))
if ((name is not None) and (id is not None)):
raise ('Cannot set both name and id on an Entity')
if (name is not None):
(name, 'name')
(('utf-8'))
if (id is not None):
(id, 'id')
(id)
(unindexed_properties)
self.__key = (ref)
def app(self):
'Returns the name of the application that created this entity, a\n string or None if not set.\n '
return ()
def namespace(self):
'Returns the namespace of this entity, a string or None.'
return ()
def kind(self):
"Returns this entity's kind, a string."
return ()
def is_saved(self):
'Returns if this entity has been saved to the datastore.'
last_path = ()[(- 1)]
return ((() ^ ()) and ())
def is_projection(self):
'Returns if this entity is a projection from full entity.\n\n Projected entities:\n - may not contain all properties from the original entity;\n - only contain single values for lists;\n - may not contain values with the same type as the original entity.\n '
return self.__projection
def key(self):
"Returns this entity's primary key, a Key instance."
return self.__key
def parent(self):
"Returns this entity's parent, as a Key. If this entity has no parent,\n returns None.\n "
return ()
def entity_group(self):
"Returns this entity's entity group as a Key.\n\n Note that the returned Key will be incomplete if this is a a root entity\n and its key is incomplete.\n "
return ()
def unindexed_properties(self):
"Returns this entity's unindexed properties, as a frozenset of strings."
return (self, '_Entity__unindexed_properties', [])
def set_unindexed_properties(self, unindexed_properties):
(unindexed_properties, multiple) = (unindexed_properties, str)
if (not multiple):
raise (('unindexed_properties must be a sequence; received %s (a %s).' % (unindexed_properties, (unindexed_properties))))
for prop in unindexed_properties:
(prop, None)
self.__unindexed_properties = (unindexed_properties)
def __setitem__(self, name, value):
'Implements the [] operator. Used to set property value(s).\n\n If the property name is the empty string or not a string, raises\n BadPropertyError. If the value is not a supported type, raises\n BadValueError.\n '
(name, value)
(self, name, value)
def setdefault(self, name, value):
'If the property exists, returns its value. Otherwise sets it to value.\n\n If the property name is the empty string or not a string, raises\n BadPropertyError. If the value is not a supported type, raises\n BadValueError.\n '
(name, value)
return (self, name, value)
def update(self, other):
"Updates this entity's properties from the values in other.\n\n If any property name is the empty string or not a string, raises\n BadPropertyError. If any value is not a supported type, raises\n BadValueError.\n "
for (name, value) in (()):
(name, value)
def copy(self):
'The copy method is not supported.'
raise ('Entity does not support the copy() method.')
def ToXml(self):
"Returns an XML representation of this entity. Atom and gd:namespace\n properties are converted to XML according to their respective schemas. For\n more information, see:\n\n http://www.atomenabled.org/developers/syndication/\n http://code.google.com/apis/gdata/common-elements.html\n\n This is *not* optimized. It shouldn't be used anywhere near code that's\n performance-critical.\n "
xml = ('<entity kind=%s' % (()))
if ():
xml += (' key=%s' % ((self.__key)))
xml += '>'
if ():
xml += ('\n <key>%s</key>' % ())
properties = (())
if properties:
()
xml += ('\n ' + ((properties)))
xml += '\n</entity>\n'
return xml
def _PropertiesToXml(self, properties):
"Returns a list of the XML representations of each of the given\n properties. Ignores properties that don't exist in this entity.\n\n Arg:\n properties: string or list of strings\n\n Returns:\n list of strings\n "
xml_properties = []
for propname in properties:
if (propname not in self):
continue
propname_xml = (propname)
values = self[propname]
if (not (values, list)):
values = [values]
proptype = (values[0])
proptype_xml = (proptype)
escaped_values = (propname)
open_tag = ('<property name=%s type=%s>' % (propname_xml, proptype_xml))
close_tag = '</property>'
xml_properties += [((open_tag + val) + close_tag) for val in escaped_values]
return xml_properties
def _XmlEscapeValues(self, property):
"Returns a list of the XML-escaped string values for the given property.\n Raises an AssertionError if the property doesn't exist.\n\n Arg:\n property: string\n\n Returns:\n list of strings\n "
if (not (property in self)):
raise ()
xml = []
values = self[property]
if (not (values, list)):
values = [values]
for val in values:
if (val, 'ToXml'):
(())
elif (val is None):
('')
else:
(((val)))
return xml
def ToPb(self):
'Converts this Entity to its protocol buffer representation.\n\n Returns:\n entity_pb.Entity\n '
return (False)
def _ToPb(self, mark_key_as_saved=True):
'Converts this Entity to its protocol buffer representation. Not\n intended to be used by application developers.\n\n Returns:\n entity_pb.Entity\n '
pb = ()
(())
last_path = ()[(- 1)]
if (mark_key_as_saved and () and ()):
()
group = ()
if ():
root = (0)
(root)
properties = (())
()
for (name, values) in properties:
properties = (name, values)
if (not (properties, list)):
properties = [properties]
for prop in properties:
if ((() and (() in datastore_types._RAW_PROPERTY_MEANINGS)) or (name in ())):
(prop)
else:
(prop)
if (() > _MAX_INDEXED_PROPERTIES):
raise (('Too many indexed properties for entity %r.' % ()))
return pb
@staticmethod
def FromPb(pb, validate_reserved_properties=True, default_kind='<not specified>'):
'Static factory method. Returns the Entity representation of the\n given protocol buffer (datastore_pb.Entity).\n\n Args:\n pb: datastore_pb.Entity or str encoding of a datastore_pb.Entity\n validate_reserved_properties: deprecated\n default_kind: str, the kind to use if the pb has no key.\n\n Returns:\n Entity: the Entity representation of pb\n '
if (pb, str):
real_pb = ()
(pb)
pb = real_pb
return (pb)
@staticmethod
def _FromPb(pb, require_valid_key=True, default_kind='<not specified>'):
"Static factory method. Returns the Entity representation of the\n given protocol buffer (datastore_pb.Entity). Not intended to be used by\n application developers.\n\n The Entity PB's key must be complete. If it isn't, an AssertionError is\n raised.\n\n Args:\n # a protocol buffer Entity\n pb: datastore_pb.Entity\n default_kind: str, the kind to use if the pb has no key.\n\n Returns:\n # the Entity representation of the argument\n Entity\n "
if (not ()):
(())
last_path = ()[(- 1)]
if require_valid_key:
if (not (() ^ ())):
raise ()
if ():
if (not (() != 0)):
raise ()
else:
if (not ()):
raise ()
if (not ()):
raise ()
unindexed_properties = [((), 'utf-8') for p in ()]
if ():
namespace = ()
else:
namespace = ''
e = (((), 'utf-8'))
ref = e.__key._Key__reference
(())
temporary_values = {}
for prop_list in ((), ()):
for prop in prop_list:
if (() == entity_pb.Property.INDEX_VALUE):
e.__projection = True
try:
value = (prop)
except (AssertionError, AttributeError, TypeError, ValueError) as e:
raise (('Property %s is corrupt in the datastore:\n%s' % ((), ())))
multiple = ()
if multiple:
value = [value]
name = ()
cur_value = (name)
if (cur_value is None):
temporary_values[name] = value
elif ((not multiple) or (not (cur_value, list))):
raise (('Property %s is corrupt in the datastore; it has multiple values, but is not marked as multiply valued.' % name))
else:
(value)
for (name, value) in ():
decoded_name = (name, 'utf-8')
(decoded_name, value)
(e, decoded_name, value)
return e
class Query(dict):
'A datastore query.\n\n (Instead of this, consider using appengine.ext.gql.Query! It provides a\n query language interface on top of the same functionality.)\n\n Queries are used to retrieve entities that match certain criteria, including\n app id, kind, and property filters. Results may also be sorted by properties.\n\n App id and kind are required. Only entities from the given app, of the given\n type, are returned. If an ancestor is set, with Ancestor(), only entities\n with that ancestor are returned.\n\n Property filters are used to provide criteria based on individual property\n values. A filter compares a specific property in each entity to a given\n value or list of possible values.\n\n An entity is returned if its property values match *all* of the query\'s\n filters. In other words, filters are combined with AND, not OR. If an\n entity does not have a value for a property used in a filter, it is not\n returned.\n\n Property filters map filter strings of the form \'<property name> <operator>\'\n to filter values. Use dictionary accessors to set property filters, like so:\n\n > query = Query(\'Person\')\n > query[\'name =\'] = \'Ryan\'\n > query[\'age >=\'] = 21\n\n This query returns all Person entities where the name property is \'Ryan\',\n \'Ken\', or \'Bret\', and the age property is at least 21.\n\n Another way to build this query is:\n\n > query = Query(\'Person\')\n > query.update({\'name =\': \'Ryan\', \'age >=\': 21})\n\n The supported operators are =, >, <, >=, and <=. Only one inequality\n filter may be used per query. Any number of equals filters may be used in\n a single Query.\n\n A filter value may be a list or tuple of values. This is interpreted as\n multiple filters with the same filter string and different values, all ANDed\n together. For example, this query returns everyone with the tags "google"\n and "app engine":\n\n > Query(\'Person\', {\'tag =\': (\'google\', \'app engine\')})\n\n Result entities can be returned in different orders. Use the Order()\n method to specify properties that results will be sorted by, and in which\n direction.\n\n Note that filters and orderings may be provided at any time before the query\n is run. When the query is fully specified, Run() runs the query and returns\n an iterator. The query results can be accessed through the iterator.\n\n A query object may be reused after it\'s been run. Its filters and\n orderings can be changed to create a modified query.\n\n If you know how many result entities you need, use Get() to fetch them:\n\n > query = Query(\'Person\', {\'age >\': 21})\n > for person in query.Get(4):\n > print \'I have four pints left. Have one on me, %s!\' % person[\'name\']\n\n If you don\'t know how many results you need, or if you need them all, you\n can get an iterator over the results by calling Run():\n\n > for person in Query(\'Person\', {\'age >\': 21}).Run():\n > print \'Have a pint on me, %s!\' % person[\'name\']\n\n Get() is more efficient than Run(), so use Get() whenever possible.\n\n Finally, the Count() method returns the number of result entities matched by\n the query. The returned count is cached; successive Count() calls will not\n re-scan the datastore unless the query is changed.\n '
ASCENDING = datastore_query.PropertyOrder.ASCENDING
DESCENDING = datastore_query.PropertyOrder.DESCENDING
ORDER_FIRST = datastore_query.QueryOptions.ORDER_FIRST
ANCESTOR_FIRST = datastore_query.QueryOptions.ANCESTOR_FIRST
FILTER_FIRST = datastore_query.QueryOptions.FILTER_FIRST
OPERATORS = {'==': datastore_query.PropertyFilter._OPERATORS['=']}
(datastore_query.PropertyFilter._OPERATORS)
INEQUALITY_OPERATORS = datastore_query.PropertyFilter._INEQUALITY_OPERATORS
UPPERBOUND_INEQUALITY_OPERATORS = (['<', '<='])
FILTER_REGEX = (('^\\s*([^\\s]+)(\\s+(%s)\\s*)?$' % (OPERATORS)), (re.IGNORECASE | re.UNICODE))
__kind = None
__app = None
__namespace = None
__orderings = None
__ancestor_pb = None
__distinct = False
__group_by = None
__index_list_source = None
__cursor_source = None
__compiled_query_source = None
__filter_order = None
__filter_counter = 0
__inequality_prop = None
__inequality_count = 0
def __init__(self, kind=None, filters={}, _app=None, keys_only=False, compile=True, cursor=None, namespace=None, end_cursor=None, projection=None, distinct=None, _namespace=None):
'Constructor.\n\n Raises BadArgumentError if kind is not a string. Raises BadValueError or\n BadFilterError if filters is not a dictionary of valid filters.\n\n Args:\n namespace: string, the namespace to query.\n kind: string, the kind of entities to query, or None.\n filters: dict, initial set of filters.\n keys_only: boolean, if keys should be returned instead of entities.\n projection: iterable of property names to project.\n distinct: boolean, if projection should be distinct.\n compile: boolean, if the query should generate cursors.\n cursor: datastore_query.Cursor, the start cursor to use.\n end_cursor: datastore_query.Cursor, the end cursor to use.\n _namespace: deprecated, use namespace instead.\n '
if (namespace is None):
namespace = _namespace
elif (_namespace is not None):
raise ('Must not set both _namespace and namespace parameters.')
if (kind is not None):
(kind, 'kind', datastore_errors.BadArgumentError)
self.__kind = kind
self.__orderings = []
self.__filter_order = {}
(filters)
self.__app = (_app)
self.__namespace = (namespace)
self.__query_options = ()
if distinct:
if (not self.__query_options.projection):
raise ('cannot specify distinct without a projection')
self.__distinct = True
self.__group_by = self.__query_options.projection
def Order(self, *orderings):
"Specify how the query results should be sorted.\n\n Result entities will be sorted by the first property argument, then by the\n second, and so on. For example, this:\n\n > query = Query('Person')\n > query.Order('bday', ('age', Query.DESCENDING))\n\n sorts everyone in order of their birthday, starting with January 1.\n People with the same birthday are sorted by age, oldest to youngest.\n\n The direction for each sort property may be provided; if omitted, it\n defaults to ascending.\n\n Order() may be called multiple times. Each call resets the sort order\n from scratch.\n\n If an inequality filter exists in this Query it must be the first property\n passed to Order. Any number of sort orders may be used after the\n inequality filter property. Without inequality filters, any number of\n filters with different orders may be specified.\n\n Entities with multiple values for an order property are sorted by their\n lowest value.\n\n Note that a sort order implies an existence filter! In other words,\n Entities without the sort order property are filtered out, and *not*\n included in the query results.\n\n If the sort order property has different types in different entities - ie,\n if bob['id'] is an int and fred['id'] is a string - the entities will be\n grouped first by the property type, then sorted within type. No attempt is\n made to compare property values across types.\n\n Raises BadArgumentError if any argument is of the wrong format.\n\n Args:\n # the properties to sort by, in sort order. each argument may be either a\n # string or (string, direction) 2-tuple.\n\n Returns:\n # this query\n Query\n "
orderings = (orderings)
for (order, i) in (orderings, (((orderings)))):
if (not ((order, str) or ((order, tuple) and ((order) in [2, 3])))):
raise (('Order() expects strings or 2- or 3-tuples; received %s (a %s). ' % (order, (order))))
if (order, str):
order = (order,)
(order[0], 'sort order property', datastore_errors.BadArgumentError)
property = order[0]
direction = order[(- 1)]
if (direction not in (Query.ASCENDING, Query.DESCENDING)):
if ((order) == 3):
raise (('Order() expects Query.ASCENDING or DESCENDING; received %s' % (direction)))
direction = Query.ASCENDING
if ((self.__kind is None) and ((property != datastore_types.KEY_SPECIAL_PROPERTY) or (direction != Query.ASCENDING))):
raise (('Only %s ascending orders are supported on kindless queries' % datastore_types.KEY_SPECIAL_PROPERTY))
orderings[i] = (property, direction)
if (orderings and self.__inequality_prop and (orderings[0][0] != self.__inequality_prop)):
raise (('First ordering property must be the same as inequality filter property, if specified for this query; received %s, expected %s' % (orderings[0][0], self.__inequality_prop)))
self.__orderings = orderings
return self
def Hint(self, hint):
"Sets a hint for how this query should run.\n\n The query hint gives us information about how best to execute your query.\n Currently, we can only do one index scan, so the query hint should be used\n to indicates which index we should scan against.\n\n Use FILTER_FIRST if your first filter will only match a few results. In\n this case, it will be most efficient to scan against the index for this\n property, load the results into memory, and apply the remaining filters\n and sort orders there.\n\n Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few\n descendants. In this case, it will be most efficient to scan all entities\n below the ancestor and load them into memory first.\n\n Use ORDER_FIRST if the query has a sort order and the result set is large\n or you only plan to fetch the first few results. In that case, we\n shouldn't try to load all of the results into memory; instead, we should\n scan the index for this property, which is in sorted order.\n\n Note that hints are currently ignored in the v3 datastore!\n\n Arg:\n one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]\n\n Returns:\n # this query\n Query\n "
if (hint is not self.__query_options.hint):
self.__query_options = ()
return self
def Ancestor(self, ancestor):
"Sets an ancestor for this query.\n\n This restricts the query to only return result entities that are descended\n from a given entity. In other words, all of the results will have the\n ancestor as their parent, or parent's parent, or etc.\n\n Raises BadArgumentError or BadKeyError if parent is not an existing Entity\n or Key in the datastore.\n\n Args:\n # the key must be complete\n ancestor: Entity or Key\n\n Returns:\n # this query\n Query\n "
self.__ancestor_pb = ()
return self
def IsKeysOnly(self):
'Returns True if this query is keys only, false otherwise.'
return self.__query_options.keys_only
def GetQueryOptions(self):
'Returns a datastore_query.QueryOptions for the current instance.'
return self.__query_options
def GetQuery(self):
'Returns a datastore_query.Query for the current instance.'
return ()
def GetOrder(self):
'Gets a datastore_query.Order for the current instance.\n\n Returns:\n datastore_query.Order or None if there are no sort orders set on the\n current Query.\n '
orders = [(property, direction) for (property, direction) in self.__orderings]
if orders:
return (orders)
return None
def GetFilterPredicate(self):
'Returns a datastore_query.FilterPredicate for the current instance.\n\n Returns:\n datastore_query.FilterPredicate or None if no filters are set on the\n current Query.\n '
ordered_filters = [(i, f) for (f, i) in ()]
()
property_filters = []
for (_, filter_str) in ordered_filters:
if (filter_str not in self):
continue
values = self[filter_str]
match = (filter_str, values)
name = (1)
op = (3)
if ((op is None) or (op == '==')):
op = '='
((name, op, values))
if property_filters:
return (datastore_query.CompositeFilter.AND, property_filters)
return None
def GetDistinct(self):
'Returns True if the current instance is distinct.\n\n Returns:\n A boolean indicating if the distinct flag is set.\n '
return self.__distinct
def GetIndexList(self):
'Get the index list from the last run of this query.\n\n Returns:\n A list of indexes used by the last run of this query.\n\n Raises:\n AssertionError: The query has not yet been run.\n '
index_list_function = self.__index_list_source
if index_list_function:
return ()
raise ('No index list available because this query has not been executed')
def GetCursor(self):
'Get the cursor from the last run of this query.\n\n The source of this cursor varies depending on what the last call was:\n - Run: A cursor that points immediately after the last result pulled off\n the returned iterator.\n - Get: A cursor that points immediately after the last result in the\n returned list.\n - Count: A cursor that points immediately after the last result counted.\n\n Returns:\n A datastore_query.Cursor object that can be used in subsequent query\n requests.\n\n Raises:\n AssertionError: The query has not yet been run or cannot be compiled.\n '
cursor_function = self.__cursor_source
if cursor_function:
cursor = ()
if cursor:
return cursor
raise ('No cursor available, either this query has not been executed or there is no compilation available for this kind of query')
def GetBatcher(self, config=None):
'Runs this query and returns a datastore_query.Batcher.\n\n This is not intended to be used by application developers. Use Get()\n instead!\n\n Args:\n config: Optional Configuration to use for this request.\n\n Returns:\n # an iterator that provides access to the query results\n Iterator\n '
query_options = (config)
if (self.__distinct and (query_options.projection != self.__group_by)):
raise ('cannot override projection when distinct is set')
return ((), query_options)
def Run(self, **kwargs):
"Runs this query.\n\n If a filter string is invalid, raises BadFilterError. If a filter value is\n invalid, raises BadValueError. If an IN filter is provided, and a sort\n order on another property is provided, raises BadQueryError.\n\n If you know in advance how many results you want, use limit=#. It's\n more efficient.\n\n Args:\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n # an iterator that provides access to the query results\n Iterator\n "
config = (kwargs)
itr = (())
self.__index_list_source = itr.GetIndexList
self.__cursor_source = itr.cursor
self.__compiled_query_source = itr._compiled_query
return itr
def Get(self, limit, offset=0, **kwargs):
'Deprecated, use list(Run(...)) instead.\n\n Args:\n limit: int or long representing the maximum number of entities to return.\n offset: int or long representing the number of entities to skip\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n # a list of entities\n [Entity, ...]\n '
if (limit is None):
('batch_size', _MAX_INT_32)
return (())
def Count(self, limit=1000, **kwargs):
'Returns the number of entities that this query matches.\n\n Args:\n limit, a number or None. If there are more results than this, stop short\n and just return this number. Providing this argument makes the count\n operation more efficient.\n config: Optional Configuration to use for this request.\n\n Returns:\n The number of results.\n '
original_offset = ('offset', 0)
if (limit is None):
offset = _MAX_INT_32
else:
offset = ((limit + original_offset), _MAX_INT_32)
kwargs['limit'] = 0
kwargs['offset'] = offset
config = (kwargs)
batch = (())
self.__index_list_source = (lambda : [index for (index, state) in batch.index_list])
self.__cursor_source = (lambda : (0))
self.__compiled_query_source = (lambda : batch._compiled_query)
return (0, (batch.skipped_results - original_offset))
def __iter__(self):
raise ('Query objects should not be used as iterators. Call Run() first.')
def __getstate__(self):
state = ()
state['_Query__index_list_source'] = None
state['_Query__cursor_source'] = None
state['_Query__compiled_query_source'] = None
return state
def __setstate__(self, state):
if ('_Query__query_options' not in state):
state['_Query__query_options'] = ()
self.__dict__ = state
def __setitem__(self, filter, value):
'Implements the [] operator. Used to set filters.\n\n If the filter string is empty or not a string, raises BadFilterError. If\n the value is not a supported type, raises BadValueError.\n '
if (value, tuple):
value = (value)
(' ', value)
match = (filter, value)
property = (1)
operator = (3)
(self, filter, value)
if ((operator in self.INEQUALITY_OPERATORS) and (property != datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)):
if (self.__inequality_prop is None):
self.__inequality_prop = property
elif (not (self.__inequality_prop == property)):
raise ()
self.__inequality_count += 1
if (filter not in self.__filter_order):
self.__filter_order[filter] = self.__filter_counter
self.__filter_counter += 1
def setdefault(self, filter, value):
'If the filter exists, returns its value. Otherwise sets it to value.\n\n If the property name is the empty string or not a string, raises\n BadPropertyError. If the value is not a supported type, raises\n BadValueError.\n '
(' ', value)
(filter, value)
return (self, filter, value)
def __delitem__(self, filter):
'Implements the del [] operator. Used to remove filters.'
(self, filter)
del self.__filter_order[filter]
match = (filter)
property = (1)
operator = (3)
if (operator in self.INEQUALITY_OPERATORS):
if (not (self.__inequality_count >= 1)):
raise ()
if (not (property == self.__inequality_prop)):
raise ()
self.__inequality_count -= 1
if (self.__inequality_count == 0):
self.__inequality_prop = None
def update(self, other):
"Updates this query's filters from the ones in other.\n\n If any filter string is invalid, raises BadFilterError. If any value is\n not a supported type, raises BadValueError.\n "
for (filter, value) in (()):
(filter, value)
def copy(self):
'The copy method is not supported.'
raise ('Query does not support the copy() method.')
def _CheckFilter(self, filter, values):
"Type check a filter string and list of values.\n\n Raises BadFilterError if the filter string is empty, not a string, or\n invalid. Raises BadValueError if the value type is not supported.\n\n Args:\n filter: String containing the filter text.\n values: List of associated filter values.\n\n Returns:\n re.MatchObject (never None) that matches the 'filter'. Group 1 is the\n property name, group 3 is the operator. (Group 2 is unused.)\n "
try:
match = (filter)
if (not match):
raise (('Could not parse filter string: %s' % (filter)))
except TypeError:
raise (('Could not parse filter string: %s' % (filter)))
property = (1)
operator = (3)
if (operator is None):
operator = '='
if (values, tuple):
values = (values)
elif (not (values, list)):
values = [values]
if (values[0], datastore_types._RAW_PROPERTY_TYPES):
raise (('Filtering on %s properties is not supported.' % (values[0])))
if ((operator in self.INEQUALITY_OPERATORS) and (property != datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)):
if (self.__inequality_prop and (property != self.__inequality_prop)):
raise (('Only one property per query may have inequality filters (%s).' % (self.INEQUALITY_OPERATORS)))
elif (((self.__orderings) >= 1) and (self.__orderings[0][0] != property)):
raise (('Inequality operators (%s) must be on the same property as the first sort order, if any sort orders are supplied' % (self.INEQUALITY_OPERATORS)))
if ((self.__kind is None) and (property != datastore_types.KEY_SPECIAL_PROPERTY) and (property != datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)):
raise (('Only %s filters are allowed on kindless queries.' % datastore_types.KEY_SPECIAL_PROPERTY))
if (property == datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY):
if self.__kind:
raise (('Only kindless queries can have %s filters.' % datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY))
if (not (operator in self.UPPERBOUND_INEQUALITY_OPERATORS)):
raise (('Only %s operators are supported with %s filters.' % (self.UPPERBOUND_INEQUALITY_OPERATORS, datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY)))
if (property in datastore_types._SPECIAL_PROPERTIES):
if (property == datastore_types.KEY_SPECIAL_PROPERTY):
for value in values:
if (not (value, Key)):
raise (('%s filter value must be a Key; received %s (a %s)' % (datastore_types.KEY_SPECIAL_PROPERTY, value, (value))))
return match
def _Run(self, limit=None, offset=None, prefetch_count=None, next_count=None, **kwargs):
'Deprecated, use Run() instead.'
return ()
def _ToPb(self, limit=None, offset=None, count=None):
query_options = ()
return ((), query_options)
def _GetCompiledQuery(self):
'Returns the internal-only pb representation of the last query run.\n\n Do not use.\n\n Raises:\n AssertionError: Query not compiled or not yet executed.\n '
compiled_query_function = self.__compiled_query_source
if compiled_query_function:
compiled_query = ()
if compiled_query:
return compiled_query
raise ('No compiled query available, either this query has not been executed or there is no compilation available for this kind of query')
GetCompiledQuery = _GetCompiledQuery
GetCompiledCursor = GetCursor
def AllocateIdsAsync(model_key, size=None, **kwargs):
'Asynchronously allocates a range of IDs.\n\n Identical to datastore.AllocateIds() except returns an asynchronous object.\n Call get_result() on the return value to block on the call and get the\n results.\n '
max = ('max', None)
config = (kwargs)
if ((config, 'read_policy', None) == EVENTUAL_CONSISTENCY):
raise ('read_policy is only supported on read operations.')
(keys, _) = (model_key)
if ((keys) > 1):
raise ('Cannot allocate IDs for more than one model key at a time')
rpc = (config, keys[0], size, max)
return rpc
def AllocateIds(model_key, size=None, **kwargs):
"Allocates a range of IDs of size or with max for the given key.\n\n Allocates a range of IDs in the datastore such that those IDs will not\n be automatically assigned to new entities. You can only allocate IDs\n for model keys from your app. If there is an error, raises a subclass of\n datastore_errors.Error.\n\n Either size or max must be provided but not both. If size is provided then a\n range of the given size is returned. If max is provided then the largest\n range of ids that are safe to use with an upper bound of max is returned (can\n be an empty range).\n\n Max should only be provided if you have an existing numeric id range that you\n want to reserve, e.g. bulk loading entities that already have IDs. If you\n don't care about which IDs you receive, use size instead.\n\n Args:\n model_key: Key or string to serve as a model specifying the ID sequence\n in which to allocate IDs\n size: integer, number of IDs to allocate.\n max: integer, upper bound of the range of IDs to allocate.\n config: Optional Configuration to use for this request.\n\n Returns:\n (start, end) of the allocated range, inclusive.\n "
return ()
class MultiQuery(Query):
"Class representing a query which requires multiple datastore queries.\n\n This class is actually a subclass of datastore.Query as it is intended to act\n like a normal Query object (supporting the same interface).\n\n Does not support keys only queries, since it needs whole entities in order\n to merge sort them. (That's not true if there are no sort orders, or if the\n sort order is on __key__, but allowing keys only queries in those cases, but\n not in others, would be confusing.)\n "
def __init__(self, bound_queries, orderings):
if ((bound_queries) > MAX_ALLOWABLE_QUERIES):
raise (('Cannot satisfy query -- too many subqueries (max: %d, got %d). Probable cause: too many IN/!= filters in query.' % (MAX_ALLOWABLE_QUERIES, (bound_queries))))
projection = (bound_queries and ().projection)
for query in bound_queries:
if (projection != ().projection):
raise ('All queries must have the same projection.')
if ():
raise ('MultiQuery does not support keys_only.')
self.__projection = projection
self.__bound_queries = bound_queries
self.__orderings = orderings
self.__compile = False
def __str__(self):
res = 'MultiQuery: '
for query in self.__bound_queries:
res = ('%s %s' % (res, (query)))
return res
def Get(self, limit, offset=0, **kwargs):
'Deprecated, use list(Run(...)) instead.\n\n Args:\n limit: int or long representing the maximum number of entities to return.\n offset: int or long representing the number of entities to skip\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n A list of entities with at most "limit" entries (less if the query\n completes before reading limit values).\n '
if (limit is None):
('batch_size', _MAX_INT_32)
return (())
class SortOrderEntity(object):
'Allow entity comparisons using provided orderings.\n\n The iterator passed to the constructor is eventually consumed via\n calls to GetNext(), which generate new SortOrderEntity s with the\n same orderings.\n '
def __init__(self, entity_iterator, orderings):
'Ctor.\n\n Args:\n entity_iterator: an iterator of entities which will be wrapped.\n orderings: an iterable of (identifier, order) pairs. order\n should be either Query.ASCENDING or Query.DESCENDING.\n '
self.__entity_iterator = entity_iterator
self.__entity = None
self.__min_max_value_cache = {}
try:
self.__entity = (entity_iterator)
except StopIteration:
raise
else:
self.__orderings = orderings
def __str__(self):
return (self.__entity)
def GetEntity(self):
'Gets the wrapped entity.'
return self.__entity
def GetNext(self):
'Wrap and return the next entity.\n\n The entity is retrieved from the iterator given at construction time.\n '
return (self.__entity_iterator, self.__orderings)
def CmpProperties(self, that):
'Compare two entities and return their relative order.\n\n Compares self to that based on the current sort orderings and the\n key orders between them. Returns negative, 0, or positive depending on\n whether self is less, equal to, or greater than that. This\n comparison returns as if all values were to be placed in ascending order\n (highest value last). Only uses the sort orderings to compare (ignores\n keys).\n\n Args:\n that: SortOrderEntity\n\n Returns:\n Negative if self < that\n Zero if self == that\n Positive if self > that\n '
if (not self.__entity):
return (self.__entity, that.__entity)
for (identifier, order) in self.__orderings:
value1 = (self, identifier, order)
value2 = (that, identifier, order)
result = (value1, value2)
if (order == Query.DESCENDING):
result = (- result)
if result:
return result
return 0
def __GetValueForId(self, sort_order_entity, identifier, sort_order):
value = (sort_order_entity.__entity, identifier)
if (value, list):
entity_key = ()
if ((entity_key, identifier) in self.__min_max_value_cache):
value = self.__min_max_value_cache[(entity_key, identifier)]
elif (sort_order == Query.DESCENDING):
value = (value)
else:
value = (value)
self.__min_max_value_cache[(entity_key, identifier)] = value
return value
def __cmp__(self, that):
'Compare self to that w.r.t. values defined in the sort order.\n\n Compare an entity with another, using sort-order first, then the key\n order to break ties. This can be used in a heap to have faster min-value\n lookup.\n\n Args:\n that: other entity to compare to\n Returns:\n negative: if self is less than that in sort order\n zero: if self is equal to that in sort order\n positive: if self is greater than that in sort order\n '
property_compare = (that)
if property_compare:
return property_compare
else:
return ((), ())
def _ExtractBounds(self, config):
'This function extracts the range of results to consider.\n\n Since MultiQuery dedupes in memory, we must apply the offset and limit in\n memory. The results that should be considered are\n results[lower_bound:upper_bound].\n\n We also pass the offset=0 and limit=upper_bound to the base queries to\n optimize performance.\n\n Args:\n config: The base datastore_query.QueryOptions.\n\n Returns:\n a tuple consisting of the lower_bound and upper_bound to impose in memory\n and the config to use with each bound query. The upper_bound may be None.\n '
if (config is None):
return (0, None, None)
lower_bound = (config.offset or 0)
upper_bound = config.limit
if lower_bound:
if (upper_bound is not None):
upper_bound = ((lower_bound + upper_bound), _MAX_INT_32)
config = ()
return (lower_bound, upper_bound, config)
def __GetProjectionOverride(self, config):
'Returns a tuple of (original projection, projeciton override).\n\n If projection is None, there is no projection. If override is None,\n projection is sufficent for this query.\n '
projection = (config)
if (projection is None):
projection = self.__projection
else:
projection = projection
if (not projection):
return (None, None)
override = ()
for (prop, _) in self.__orderings:
if (prop not in projection):
(prop)
if (not override):
return (projection, None)
return (projection, (projection + (override)))
def Run(self, **kwargs):
'Return an iterable output with all results in order.\n\n Merge sort the results. First create a list of iterators, then walk\n though them and yield results in order.\n\n Args:\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n An iterator for the result set.\n '
config = (kwargs)
if (config and config.keys_only):
raise ('keys only queries are not supported by multi-query.')
(lower_bound, upper_bound, config) = (config)
(projection, override) = (config)
if override:
config = ()
results = []
count = 1
log_level = (logging.DEBUG - 1)
for bound_query in self.__bound_queries:
(log_level, ('Running query #%i' % count))
(())
count += 1
def GetDedupeKey(sort_order_entity):
if projection:
return ((), ((())))
else:
return ()
def IterateResults(results):
'Iterator function to return all results in sorted order.\n\n Iterate over the array of results, yielding the next element, in\n sorted order. This function is destructive (results will be empty\n when the operation is complete).\n\n Args:\n results: list of result iterators to merge and iterate through\n\n Yields:\n The next result in sorted order.\n '
result_heap = []
for result in results:
heap_value = (result, self.__orderings)
if ():
(result_heap, heap_value)
used_keys = ()
while result_heap:
if ((upper_bound is not None) and ((used_keys) >= upper_bound)):
break
top_result = (result_heap)
dedupe_key = (top_result)
if (dedupe_key not in used_keys):
result = ()
if override:
for key in (()):
if (key not in projection):
del result[key]
(yield result)
else:
pass
(dedupe_key)
results_to_push = []
while result_heap:
next = (result_heap)
if (dedupe_key != (next)):
(next)
break
else:
(())
(())
for popped_result in results_to_push:
if ():
(result_heap, popped_result)
it = (results)
try:
for _ in (lower_bound):
(it)
except StopIteration:
raise
return it
def Count(self, limit=1000, **kwargs):
'Return the number of matched entities for this query.\n\n Will return the de-duplicated count of results. Will call the more\n efficient Get() function if a limit is given.\n\n Args:\n limit: maximum number of entries to count (for any result > limit, return\n limit).\n config: Optional Configuration to use for this request.\n\n Returns:\n count of the number of entries returned.\n '
kwargs['limit'] = limit
config = (kwargs)
(projection, override) = (config)
if (not projection):
config = ()
elif override:
config = ()
(lower_bound, upper_bound, config) = (config)
used_keys = ()
for bound_query in self.__bound_queries:
for result in ():
if projection:
dedupe_key = ((), (()))
else:
dedupe_key = result
(dedupe_key)
if (upper_bound and ((used_keys) >= upper_bound)):
return (upper_bound - lower_bound)
return (0, ((used_keys) - lower_bound))
def GetIndexList(self):
raise ('No index_list available for a MultiQuery (queries using "IN" or "!=" operators)')
def GetCursor(self):
raise ('No cursor available for a MultiQuery (queries using "IN" or "!=" operators)')
def _GetCompiledQuery(self):
'Internal only, do not use.'
raise ('No compilation available for a MultiQuery (queries using "IN" or "!=" operators)')
def __setitem__(self, query_filter, value):
'Add a new filter by setting it on all subqueries.\n\n If any of the setting operations raise an exception, the ones\n that succeeded are undone and the exception is propagated\n upward.\n\n Args:\n query_filter: a string of the form "property operand".\n value: the value that the given property is compared against.\n '
saved_items = []
for (index, query) in (self.__bound_queries):
((query_filter, None))
try:
query[query_filter] = value
except:
for (q, old_value) in (self.__bound_queries[:index], saved_items):
if (old_value is not None):
q[query_filter] = old_value
else:
del q[query_filter]
raise
def __delitem__(self, query_filter):
'Delete a filter by deleting it from all subqueries.\n\n If a KeyError is raised during the attempt, it is ignored, unless\n every subquery raised a KeyError. If any other exception is\n raised, any deletes will be rolled back.\n\n Args:\n query_filter: the filter to delete.\n\n Raises:\n KeyError: No subquery had an entry containing query_filter.\n '
subquery_count = (self.__bound_queries)
keyerror_count = 0
saved_items = []
for (index, query) in (self.__bound_queries):
try:
((query_filter, None))
del query[query_filter]
except KeyError:
keyerror_count += 1
except:
for (q, old_value) in (self.__bound_queries[:index], saved_items):
if (old_value is not None):
q[query_filter] = old_value
raise
if (keyerror_count == subquery_count):
raise (query_filter)
def __iter__(self):
return (self.__bound_queries)
GetCompiledCursor = GetCursor
GetCompiledQuery = _GetCompiledQuery
def RunInTransaction(function, *args, **kwargs):
"Runs a function inside a datastore transaction.\n\n Runs the user-provided function inside transaction, retries default\n number of times.\n\n Args:\n function: a function to be run inside the transaction on all remaining\n arguments\n *args: positional arguments for function.\n **kwargs: keyword arguments for function.\n\n Returns:\n the function's return value, if any\n\n Raises:\n TransactionFailedError, if the transaction could not be committed.\n "
return (None, function, *args)
def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
"Runs a function inside a datastore transaction.\n\n Runs the user-provided function inside transaction, with a specified\n number of retries.\n\n Args:\n retries: number of retries (not counting the initial try)\n function: a function to be run inside the transaction on all remaining\n arguments\n *args: positional arguments for function.\n **kwargs: keyword arguments for function.\n\n Returns:\n the function's return value, if any\n\n Raises:\n TransactionFailedError, if the transaction could not be committed.\n "
options = ()
return (options, function, *args)
def RunInTransactionOptions(options, function, *args, **kwargs):
'Runs a function inside a datastore transaction.\n\n Runs the user-provided function inside a full-featured, ACID datastore\n transaction. Every Put, Get, and Delete call in the function is made within\n the transaction. All entities involved in these calls must belong to the\n same entity group. Queries are supported as long as they specify an\n ancestor belonging to the same entity group.\n\n The trailing arguments are passed to the function as positional arguments.\n If the function returns a value, that value will be returned by\n RunInTransaction. Otherwise, it will return None.\n\n The function may raise any exception to roll back the transaction instead of\n committing it. If this happens, the transaction will be rolled back and the\n exception will be re-raised up to RunInTransaction\'s caller.\n\n If you want to roll back intentionally, but don\'t have an appropriate\n exception to raise, you can raise an instance of datastore_errors.Rollback.\n It will cause a rollback, but will *not* be re-raised up to the caller.\n\n The function may be run more than once, so it should be idempotent. It\n should avoid side effects, and it shouldn\'t have *any* side effects that\n aren\'t safe to occur multiple times. This includes modifying the arguments,\n since they persist across invocations of the function. However, this doesn\'t\n include Put, Get, and Delete calls, of course.\n\n Example usage:\n\n > def decrement(key, amount=1):\n > counter = datastore.Get(key)\n > counter[\'count\'] -= amount\n > if counter[\'count\'] < 0: # don\'t let the counter go negative\n > raise datastore_errors.Rollback()\n > datastore.Put(counter)\n >\n > counter = datastore.Query(\'Counter\', {\'name\': \'foo\'})\n > datastore.RunInTransaction(decrement, counter.key(), amount=5)\n\n Transactions satisfy the traditional ACID properties. They are:\n\n - Atomic. All of a transaction\'s operations are executed or none of them are.\n\n - Consistent. The datastore\'s state is consistent before and after a\n transaction, whether it committed or rolled back. Invariants such as\n "every entity has a primary key" are preserved.\n\n - Isolated. Transactions operate on a snapshot of the datastore. Other\n datastore operations do not see intermediated effects of the transaction;\n they only see its effects after it has committed.\n\n - Durable. On commit, all writes are persisted to the datastore.\n\n Nested transactions are not supported.\n\n Args:\n options: TransactionOptions specifying options (number of retries, etc) for\n this transaction\n function: a function to be run inside the transaction on all remaining\n arguments\n *args: positional arguments for function.\n **kwargs: keyword arguments for function.\n\n Returns:\n the function\'s return value, if any\n\n Raises:\n TransactionFailedError, if the transaction could not be committed.\n '
options = (options)
if ():
if (options.propagation in (None, datastore_rpc.TransactionOptions.NESTED)):
raise ('Nested transactions are not supported.')
elif (options.propagation is datastore_rpc.TransactionOptions.INDEPENDENT):
txn_connection = ()
(_thread_local.old_connection)
try:
return (options, function, *args)
finally:
(txn_connection)
return (*args)
if (options.propagation is datastore_rpc.TransactionOptions.MANDATORY):
raise ('Requires an existing transaction.')
retries = options.retries
if (retries is None):
retries = DEFAULT_TRANSACTION_RETRIES
_thread_local.old_connection = ()
for _ in (0, (retries + 1)):
new_connection = (options)
(new_connection)
try:
(ok, result) = (new_connection, function, args, kwargs)
if ok:
return result
finally:
(_thread_local.old_connection)
raise ('The transaction could not be committed. Please try again.')
def _DoOneTry(new_connection, function, args, kwargs):
'Helper to call a function in a transaction, once.\n\n Args:\n new_connection: The new, transactional, connection object.\n function: The function to call.\n *args: Tuple of positional arguments.\n **kwargs: Dict of keyword arguments.\n '
try:
result = (*args)
except:
original_exception = ()
try:
()
except Exception:
('Exception sending Rollback:')
(type, value, trace) = original_exception
if (value, datastore_errors.Rollback):
return (True, None)
else:
raise (trace)
else:
if ():
return (True, result)
else:
('Transaction collision. Retrying... %s', '')
return (False, None)
def _MaybeSetupTransaction(request, keys):
"Begin a transaction, if necessary, and populate it in the request.\n\n This API exists for internal backwards compatibility, primarily with\n api/taskqueue/taskqueue.py.\n\n Args:\n request: A protobuf with a mutable_transaction() method.\n keys: Unused.\n\n Returns:\n A transaction if we're inside a transaction, otherwise None\n "
return (request)
def IsInTransaction():
'Determine whether already running in transaction.\n\n Returns:\n True if already running in transaction, else False.\n '
return ((), datastore_rpc.TransactionalConnection)
def Transactional(_func=None, **kwargs):
'A decorator that makes sure a function is run in a transaction.\n\n Defaults propagation to datastore_rpc.TransactionOptions.ALLOWED, which means\n any existing transaction will be used in place of creating a new one.\n\n WARNING: Reading from the datastore while in a transaction will not see any\n changes made in the same transaction. If the function being decorated relies\n on seeing all changes made in the calling scoope, set\n propagation=datastore_rpc.TransactionOptions.NESTED.\n\n Args:\n _func: do not use.\n **kwargs: TransactionOptions configuration options.\n\n Returns:\n A wrapper for the given function that creates a new transaction if needed.\n '
if (_func is not None):
return (_func)
if (not ('require_new', None)):
('propagation', datastore_rpc.TransactionOptions.ALLOWED)
options = ()
def outer_wrapper(func):
def inner_wrapper(*args, **kwds):
return (options, func, *args)
return inner_wrapper
return outer_wrapper
@(1)
def NonTransactional(_func=None, allow_existing=True):
'A decorator that insures a function is run outside a transaction.\n\n If there is an existing transaction (and allow_existing=True), the existing\n transaction is paused while the function is executed.\n\n Args:\n _func: do not use\n allow_existing: If false, throw an exception if called from within a\n transaction\n\n Returns:\n A wrapper for the decorated function that ensures it runs outside a\n transaction.\n '
if (_func is not None):
return (_func)
def outer_wrapper(func):
def inner_wrapper(*args, **kwds):
if (not ()):
return (*args)
if (not allow_existing):
raise ('Function cannot be called from within a transaction.')
txn_connection = ()
(_thread_local.old_connection)
try:
return (*args)
finally:
(txn_connection)
return inner_wrapper
return outer_wrapper
def _GetCompleteKeyOrError(arg):
'Expects an Entity or a Key, and returns the corresponding Key. Raises\n BadArgumentError or BadKeyError if arg is a different type or is incomplete.\n\n Args:\n arg: Entity or Key\n\n Returns:\n Key\n '
if (arg, Key):
key = arg
elif (arg, str):
key = (arg)
elif (arg, Entity):
key = ()
elif (not (arg, Key)):
raise (('Expects argument to be an Entity or Key; received %s (a %s).' % (arg, (arg))))
if (not (key, Key)):
raise ()
if (not ()):
raise (('Key %r is not complete.' % key))
return key
def _GetPropertyValue(entity, property):
"Returns an entity's value for a given property name.\n\n Handles special properties like __key__ as well as normal properties.\n\n Args:\n entity: datastore.Entity\n property: str; the property name\n\n Returns:\n property value. For __key__, a datastore_types.Key.\n\n Raises:\n KeyError, if the entity does not have the given property.\n "
if (property in datastore_types._SPECIAL_PROPERTIES):
if (property == datastore_types._UNAPPLIED_LOG_TIMESTAMP_SPECIAL_PROPERTY):
raise (property)
if (not (property == datastore_types.KEY_SPECIAL_PROPERTY)):
raise ()
return ()
else:
return entity[property]
def _AddOrAppend(dictionary, key, value):
"Adds the value to the existing values in the dictionary, if any.\n\n If dictionary[key] doesn't exist, sets dictionary[key] to value.\n\n If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].\n\n If dictionary[key] is a list, appends value to that list.\n\n Args:\n dictionary: a dict\n key, value: anything\n "
if (key in dictionary):
existing_value = dictionary[key]
if (existing_value, list):
(value)
else:
dictionary[key] = [existing_value, value]
else:
dictionary[key] = value
class Iterator(datastore_query.ResultsIterator):
'Thin wrapper of datastore_query.ResultsIterator.\n\n Deprecated, do not use, only for backwards compatability.\n '
def _Next(self, count=None):
if (count is None):
count = 20
result = []
for r in self:
if ((result) >= count):
break
(r)
return result
def GetCompiledCursor(self, query):
return ()
def GetIndexList(self):
'Returns the list of indexes used to perform the query.'
tuple_index_list = ()
return [index for (index, state) in tuple_index_list]
_Get = _Next
index_list = GetIndexList
DatastoreRPC = apiproxy_stub_map.UserRPC
GetRpcFromKwargs = _GetConfigFromKwargs
_CurrentTransactionKey = IsInTransaction
_ToDatastoreError = datastore_rpc._ToDatastoreError
_DatastoreExceptionFromErrorCodeAndDetail = datastore_rpc._DatastoreExceptionFromErrorCodeAndDetail |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PageSpeed configuration tools.
Library for parsing pagespeed configuration data from app.yaml and working
with these in memory.
"""
import google
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
_URL_BLACKLIST_REGEX = r"http(s)?://\S{0,499}"
_REWRITER_NAME_REGEX = r"[a-zA-Z0-9_]+"
_DOMAINS_TO_REWRITE_REGEX = r"(http(s)?://)?[-a-zA-Z0-9_.*]+(:\d+)?"
URL_BLACKLIST = "url_blacklist"
ENABLED_REWRITERS = "enabled_rewriters"
DISABLED_REWRITERS = "disabled_rewriters"
DOMAINS_TO_REWRITE = "domains_to_rewrite"
class MalformedPagespeedConfiguration(Exception):
"""Configuration file for PageSpeed API is malformed."""
class PagespeedEntry(validation.Validated):
"""Describes the format of a pagespeed configuration from a yaml file.
URL blacklist entries are patterns (with '?' and '*' as wildcards). Any URLs
that match a pattern on the blacklist will not be optimized by PageSpeed.
Rewriter names are strings (like 'CombineCss' or 'RemoveComments') describing
individual PageSpeed rewriters. A full list of valid rewriter names can be
found in the PageSpeed documentation.
The domains-to-rewrite list is a whitelist of domain name patterns with '*' as
a wildcard, optionally starting with 'http://' or 'https://'. If no protocol
is given, 'http://' is assumed. A resource will only be rewritten if it is on
the same domain as the HTML that references it, or if its domain is on the
domains-to-rewrite list.
"""
ATTRIBUTES = {
URL_BLACKLIST: validation.Optional(
validation.Repeated(validation.Regex(_URL_BLACKLIST_REGEX))
),
ENABLED_REWRITERS: validation.Optional(
validation.Repeated(validation.Regex(_REWRITER_NAME_REGEX))
),
DISABLED_REWRITERS: validation.Optional(
validation.Repeated(validation.Regex(_REWRITER_NAME_REGEX))
),
DOMAINS_TO_REWRITE: validation.Optional(
validation.Repeated(validation.Regex(_DOMAINS_TO_REWRITE_REGEX))
),
}
def LoadPagespeedEntry(pagespeed_entry, open_fn=None):
"""Load a yaml file or string and return a PagespeedEntry.
Args:
pagespeed_entry: The contents of a pagespeed entry from a yaml file
as a string, or an open file object.
open_fn: Function for opening files. Unused.
Returns:
A PagespeedEntry instance which represents the contents of the parsed yaml.
Raises:
yaml_errors.EventError: An error occured while parsing the yaml.
MalformedPagespeedConfiguration: The configuration is parseable but invalid.
"""
builder = yaml_object.ObjectBuilder(PagespeedEntry)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(pagespeed_entry)
parsed_yaml = handler.GetResults()
if not parsed_yaml:
return PagespeedEntry()
if len(parsed_yaml) > 1:
raise MalformedPagespeedConfiguration(
"Multiple configuration sections in the yaml"
)
return parsed_yaml[0]
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Search API module."""
from search import AtomField
from search import Cursor
from search import DateField
from search import DeleteError
from search import DeleteResult
from search import Document
from search import DOCUMENT_ID_FIELD_NAME
from search import Error
from search import ExpressionError
from search import Field
from search import FieldExpression
from search import GeoField
from search import GeoPoint
from search import get_indexes
from search import GetResponse
from search import HtmlField
from search import Index
from search import InternalError
from search import InvalidRequest
from search import LANGUAGE_FIELD_NAME
from search import MatchScorer
from search import MAXIMUM_DOCUMENT_ID_LENGTH
from search import MAXIMUM_DOCUMENTS_PER_PUT_REQUEST
from search import MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH
from search import MAXIMUM_EXPRESSION_LENGTH
from search import MAXIMUM_FIELD_ATOM_LENGTH
from search import MAXIMUM_FIELD_NAME_LENGTH
from search import MAXIMUM_FIELD_VALUE_LENGTH
from search import MAXIMUM_FIELDS_RETURNED_PER_SEARCH
from search import MAXIMUM_GET_INDEXES_OFFSET
from search import MAXIMUM_INDEX_NAME_LENGTH
from search import MAXIMUM_INDEXES_RETURNED_PER_GET_REQUEST
from search import MAXIMUM_NUMBER_FOUND_ACCURACY
from search import MAXIMUM_QUERY_LENGTH
from search import MAXIMUM_SEARCH_OFFSET
from search import MAXIMUM_SORTED_DOCUMENTS
from search import NumberField
from search import OperationResult
from search import PutError
from search import PutResult
from search import Query
from search import QueryError
from search import QueryOptions
from search import RANK_FIELD_NAME
from search import RescoringMatchScorer
from search import SCORE_FIELD_NAME
from search import ScoredDocument
from search import SearchResults
from search import SortExpression
from search import SortOptions
from search import TextField
from search import TIMESTAMP_FIELD_NAME
from search import TransientError
|
"Stub version of the Task Queue API.\n\nThis stub stores tasks and runs them via dev_appserver's AddEvent capability.\nIt also validates the tasks by checking their queue name against the queue.yaml.\n\nAs well as implementing Task Queue API functions, the stub exposes various other\nfunctions that are used by the dev_appserver's admin console to display the\napplication's queues and tasks.\n"
__all__ = []
import base64
import bisect
import calendar
import datetime
import logging
import os
import random
import string
import threading
import time
import taskqueue_service_pb
import taskqueue
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import queueinfo
from google.appengine.api import request_info
from google.appengine.api.taskqueue import taskqueue
from google.appengine.runtime import apiproxy_errors
DEFAULT_RATE = '5.00/s'
DEFAULT_RATE_FLOAT = 5.0
DEFAULT_BUCKET_SIZE = 5
MAX_ETA = ()
MAX_PULL_TASK_SIZE_BYTES = (2 ** 20)
MAX_PUSH_TASK_SIZE_BYTES = (100 * (2 ** 10))
MAX_TASK_SIZE = MAX_PUSH_TASK_SIZE_BYTES
MAX_REQUEST_SIZE = (32 << 20)
BUILT_IN_HEADERS = (['x-appengine-queuename', 'x-appengine-taskname', 'x-appengine-taskexecutioncount', 'x-appengine-taskpreviousresponse', 'x-appengine-taskretrycount', 'x-appengine-tasketa', 'x-appengine-development-payload', 'content-length'])
DEFAULT_QUEUE_NAME = 'default'
INF = 1e309
QUEUE_MODE = taskqueue_service_pb.TaskQueueMode
AUTOMATIC_QUEUES = {DEFAULT_QUEUE_NAME: (0.2, DEFAULT_BUCKET_SIZE, DEFAULT_RATE), '__cron': (1, 1, '1/s')}
def _GetAppId(request):
'Returns the app id to use for the given request.\n\n Args:\n request: A protocol buffer that has an app_id field.\n\n Returns:\n A string containing the app id or None if no app id was specified.\n '
if ():
return ()
else:
return None
def _SecToUsec(t):
'Converts a time in seconds since the epoch to usec since the epoch.\n\n Args:\n t: Time in seconds since the unix epoch\n\n Returns:\n An integer containing the number of usec since the unix epoch.\n '
return ((t * 1000000.0))
def _UsecToSec(t):
'Converts a time in usec since the epoch to seconds since the epoch.\n\n Args:\n t: Time in usec since the unix epoch\n\n Returns:\n A float containing the number of seconds since the unix epoch.\n '
return (t / 1000000.0)
def _FormatEta(eta_usec):
'Formats a task ETA as a date string in UTC.'
eta = ((eta_usec))
return ('%Y/%m/%d %H:%M:%S')
def _TruncDelta(timedelta):
'Strips the microseconds field from a timedelta.\n\n Args:\n timedelta: a datetime.timedelta.\n\n Returns:\n A datetime.timedelta with the microseconds field not filled.\n '
return ()
def _EtaDelta(eta_usec, now):
'Formats a task ETA as a relative time string.'
eta = ((eta_usec))
if (eta > now):
return ('%s from now' % ((eta - now)))
else:
return ('%s ago' % ((now - eta)))
def QueryTasksResponseToDict(queue_name, task_response, now):
"Converts a TaskQueueQueryTasksResponse_Task protobuf group into a dict.\n\n Args:\n queue_name: The name of the queue this task came from.\n task_response: An instance of TaskQueueQueryTasksResponse_Task.\n now: A datetime.datetime object containing the current time in UTC.\n\n Returns:\n A dict containing the fields used by the dev appserver's admin console.\n\n Raises:\n ValueError: A task response contains an unknown HTTP method type.\n "
task = {}
task['name'] = ()
task['queue_name'] = queue_name
task['url'] = ()
method = ()
if (method == taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.GET):
task['method'] = 'GET'
elif (method == taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.POST):
task['method'] = 'POST'
elif (method == taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.HEAD):
task['method'] = 'HEAD'
elif (method == taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.PUT):
task['method'] = 'PUT'
elif (method == taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.DELETE):
task['method'] = 'DELETE'
else:
raise (('Unexpected method: %d' % method))
task['eta'] = (())
task['eta_usec'] = ()
task['eta_delta'] = ((), now)
task['body'] = (())
headers = [((), ()) for header in () if (() not in BUILT_IN_HEADERS)]
(('X-AppEngine-QueueName', queue_name))
(('X-AppEngine-TaskName', ()))
(('X-AppEngine-TaskRetryCount', (())))
(('X-AppEngine-TaskETA', ((()))))
(('X-AppEngine-Development-Payload', '1'))
(('Content-Length', ((task['body']))))
if ('content-type' not in ((() for (key, _) in headers))):
(('Content-Type', 'application/octet-stream'))
(('X-AppEngine-TaskExecutionCount', (())))
if (() and ()):
(('X-AppEngine-TaskPreviousResponse', (())))
task['headers'] = headers
return task
class _Group(object):
'A taskqueue group.\n\n This class contains all of the queues for an application.\n '
def __init__(self, queue_yaml_parser=None, app_id=None, _all_queues_valid=False, _update_newest_eta=None, _testing_validate_state=False):
'Constructor.\n\n Args:\n queue_yaml_parser: A function that takes no parameters and returns the\n parsed results of the queue.yaml file. If this queue is not based on a\n queue.yaml file use None.\n app_id: The app id this Group is representing or None if it is the\n currently running application.\n _all_queues_valid: Automatically generate queues on first access.\n _update_newest_eta: Callable for automatically executing tasks.\n Takes the ETA of the task in seconds since the epoch, the queue_name\n and a task name. May be None if automatic task running is disabled.\n _testing_validate_state: Should this _Group and all of its _Queues\n validate their state after each operation? This should only be used\n during testing of the taskqueue_stub.\n '
self._queues = {}
self._queue_yaml_parser = queue_yaml_parser
self._all_queues_valid = _all_queues_valid
self._next_task_id = 1
self._app_id = app_id
if (_update_newest_eta is None):
self._update_newest_eta = (lambda x: None)
else:
self._update_newest_eta = _update_newest_eta
self._testing_validate_state = _testing_validate_state
def GetQueuesAsDicts(self):
"Gets all the applications's queues.\n\n Returns:\n A list of dictionaries, where each dictionary contains one queue's\n attributes. E.g.:\n [{'name': 'some-queue',\n 'max_rate': '1/s',\n 'bucket_size': 5,\n 'oldest_task': '2009/02/02 05:37:42',\n 'eta_delta': '0:00:06.342511 ago',\n 'tasks_in_queue': 12,\n 'acl': ['user1@gmail.com']}, ...]\n The list of queues always includes the default queue.\n "
()
now = ()
queues = []
for (queue_name, queue) in (()):
queue_dict = {}
(queue_dict)
queue_dict['name'] = queue_name
queue_dict['bucket_size'] = queue.bucket_capacity
if (queue.user_specified_rate is not None):
queue_dict['max_rate'] = queue.user_specified_rate
else:
queue_dict['max_rate'] = ''
if (queue.queue_mode == QUEUE_MODE.PULL):
queue_dict['mode'] = 'pull'
else:
queue_dict['mode'] = 'push'
queue_dict['acl'] = queue.acl
if ():
queue_dict['oldest_task'] = (())
queue_dict['eta_delta'] = ((), now)
else:
queue_dict['oldest_task'] = ''
queue_dict['eta_delta'] = ''
queue_dict['tasks_in_queue'] = ()
if queue.retry_parameters:
retry_proto = queue.retry_parameters
retry_dict = {}
if ():
retry_dict['retry_limit'] = ()
if ():
retry_dict['age_limit_sec'] = ()
if ():
retry_dict['min_backoff_sec'] = ()
if ():
retry_dict['max_backoff_sec'] = ()
if ():
retry_dict['max_doublings'] = ()
queue_dict['retry_parameters'] = retry_dict
return queues
def HasQueue(self, queue_name):
'Check if the specified queue_name references a valid queue.\n\n Args:\n queue_name: The name of the queue to check.\n\n Returns:\n True if the queue exists, False otherwise.\n '
()
return ((queue_name in self._queues) and (self._queues[queue_name] is not None))
def GetQueue(self, queue_name):
'Gets the _Queue instance for the specified queue.\n\n Args:\n queue_name: The name of the queue to fetch.\n\n Returns:\n The _Queue instance for the specified queue.\n\n Raises:\n KeyError if the queue does not exist.\n '
()
return self._queues[queue_name]
def GetNextPushTask(self):
'Finds the task with the lowest eta.\n\n Returns:\n A tuple containing the queue and task instance for the task with the\n lowest eta, or (None, None) if there are no tasks.\n '
min_eta = INF
result = (None, None)
for queue in ():
if (queue.queue_mode == QUEUE_MODE.PULL):
continue
task = ()
if (not task):
continue
if (() < min_eta):
result = (queue, task)
min_eta = ()
return result
def _ConstructQueue(self, queue_name, *args, **kwargs):
if ('_testing_validate_state' in kwargs):
raise ('_testing_validate_state should not be passed to _ConstructQueue')
kwargs['_testing_validate_state'] = self._testing_validate_state
self._queues[queue_name] = (queue_name, *args)
def _ConstructAutomaticQueue(self, queue_name):
if (queue_name in AUTOMATIC_QUEUES):
(queue_name, *AUTOMATIC_QUEUES[queue_name])
else:
if (not self._all_queues_valid):
raise ()
(queue_name)
def _ReloadQueuesFromYaml(self):
'Update the queue map with the contents of the queue.yaml file.\n\n This function will remove queues that no longer exist in the queue.yaml\n file.\n\n If no queue yaml parser has been defined, this function is a no-op.\n '
if (not self._queue_yaml_parser):
return
queue_info = ()
if (queue_info and queue_info.queue):
queues = queue_info.queue
else:
queues = []
old_queues = (self._queues)
new_queues = ()
for entry in queues:
queue_name = entry.name
(queue_name)
retry_parameters = None
if entry.bucket_size:
bucket_size = entry.bucket_size
else:
bucket_size = DEFAULT_BUCKET_SIZE
if entry.retry_parameters:
retry_parameters = (entry.retry_parameters)
if (entry.mode == 'pull'):
mode = QUEUE_MODE.PULL
if (entry.rate is not None):
('Refill rate must not be specified for pull-based queue. Please check queue.yaml file.')
else:
mode = QUEUE_MODE.PUSH
if (entry.rate is None):
('Refill rate must be specified for push-based queue. Please check queue.yaml file.')
max_rate = entry.rate
if (entry.acl is not None):
acl = ()
for acl_entry in entry.acl:
(acl_entry.user_email)
else:
acl = None
if ((queue_name) is None):
(queue_name)
else:
queue = self._queues[queue_name]
queue.bucket_size = bucket_size
queue.user_specified_rate = max_rate
queue.acl = acl
queue.queue_mode = mode
queue.retry_parameters = retry_parameters
if (mode == QUEUE_MODE.PUSH):
eta = ()
if eta:
((eta))
if (DEFAULT_QUEUE_NAME not in self._queues):
(DEFAULT_QUEUE_NAME)
(DEFAULT_QUEUE_NAME)
if (not self._all_queues_valid):
for queue_name in (old_queues - new_queues):
del self._queues[queue_name]
def _ValidateQueueName(self, queue_name):
"Tests if the specified queue exists and creates it if needed.\n\n This function replicates the behaviour of the taskqueue service by\n automatically creating the 'automatic' queues when they are first accessed.\n\n Args:\n queue_name: The name queue of the queue to check.\n\n Returns:\n If there are no problems, returns TaskQueueServiceError.OK. Otherwise\n returns the correct constant from TaskQueueServiceError.\n "
if (not queue_name):
return taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME
elif (queue_name not in self._queues):
if ((queue_name in AUTOMATIC_QUEUES) or self._all_queues_valid):
(queue_name)
else:
return taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE
elif (self._queues[queue_name] is None):
return taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE
return taskqueue_service_pb.TaskQueueServiceError.OK
def _CheckQueueForRpc(self, queue_name):
"Ensures the specified queue exists and creates it if needed.\n\n This function replicates the behaviour of the taskqueue service by\n automatically creating the 'automatic' queues when they are first accessed.\n\n Args:\n queue_name: The name queue of the queue to check\n\n Raises:\n ApplicationError: If the queue name is invalid, tombstoned or does not\n exist.\n "
()
response = (queue_name)
if (response != taskqueue_service_pb.TaskQueueServiceError.OK):
raise (response)
def _ChooseTaskName(self):
'Returns a string containing a unique task name.'
self._next_task_id += 1
return ('task%d' % (self._next_task_id - 1))
def _VerifyTaskQueueAddRequest(self, request, now):
'Checks that a TaskQueueAddRequest is valid.\n\n Checks that a TaskQueueAddRequest specifies a valid eta and a valid queue.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueAddRequest to validate.\n now: A datetime.datetime object containing the current time in UTC.\n\n Returns:\n A taskqueue_service_pb.TaskQueueServiceError indicating any problems with\n the request or taskqueue_service_pb.TaskQueueServiceError.OK if it is\n valid.\n '
if (() < 0):
return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA
eta = ((()))
max_eta = (now + MAX_ETA)
if (eta > max_eta):
return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA
queue_name_response = (())
if (queue_name_response != taskqueue_service_pb.TaskQueueServiceError.OK):
return queue_name_response
if (() and (self._app_id is None)):
return taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED
if (() == QUEUE_MODE.PULL):
max_task_size_bytes = MAX_PULL_TASK_SIZE_BYTES
else:
max_task_size_bytes = MAX_PUSH_TASK_SIZE_BYTES
if (() > max_task_size_bytes):
return taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE
return taskqueue_service_pb.TaskQueueServiceError.OK
def BulkAdd_Rpc(self, request, response):
'Add many tasks to a queue using a single request.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueBulkAddRequest. See\n taskqueue_service.proto.\n response: The taskqueue_service_pb.TaskQueueBulkAddResponse. See\n taskqueue_service.proto.\n '
()
if (not ()):
raise (taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
error_found = False
task_results_with_chosen_names = ()
now = (())
for add_request in ():
task_result = ()
result = (add_request, now)
if (result == taskqueue_service_pb.TaskQueueServiceError.OK):
if (not ()):
chosen_name = ()
(chosen_name)
((task_result))
(taskqueue_service_pb.TaskQueueServiceError.SKIPPED)
else:
error_found = True
(result)
if error_found:
return
if ():
(request)
else:
(request, response, now)
for (add_request, task_result) in ((), ()):
if (() == taskqueue_service_pb.TaskQueueServiceError.SKIPPED):
(taskqueue_service_pb.TaskQueueServiceError.OK)
if ((task_result) in task_results_with_chosen_names):
(())
def _TransactionalBulkAdd(self, request):
'Uses datastore.AddActions to associate tasks with a transaction.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the\n tasks to add. N.B. all tasks in the request have been validated and\n assigned unique names.\n '
try:
('datastore_v3', 'AddActions', request, ())
except apiproxy_errors.ApplicationError as e:
raise ((e.application_error + taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR), e.error_detail)
def _NonTransactionalBulkAdd(self, request, response, now):
'Adds tasks to the appropriate _Queue instance.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the\n tasks to add. N.B. all tasks in the request have been validated and\n those with empty names have been assigned unique names.\n response: The taskqueue_service_pb.TaskQueueBulkAddResponse to populate\n with the results. N.B. the chosen_task_name field in the response will\n not be filled-in.\n now: A datetime.datetime object containing the current time in UTC.\n '
queue_mode = ()
queue_name = ()
store = self._queues[queue_name]
if (store.queue_mode != queue_mode):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_MODE)
for (add_request, task_result) in ((), ()):
try:
(add_request, now)
except apiproxy_errors.ApplicationError as e:
(e.application_error)
else:
(taskqueue_service_pb.TaskQueueServiceError.OK)
if ((store.queue_mode == QUEUE_MODE.PUSH) and (() == ())):
((()))
def UpdateQueue_Rpc(self, request, response):
'Implementation of the UpdateQueue RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest.\n response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse.\n '
queue_name = ()
response = (queue_name)
is_unknown_queue = (response == taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
if ((response != taskqueue_service_pb.TaskQueueServiceError.OK) and (not is_unknown_queue)):
raise (response)
if is_unknown_queue:
self._queues[queue_name] = (())
if (self._app_id is not None):
((10, 100))
(request, response)
def FetchQueues_Rpc(self, request, response):
'Implementation of the FetchQueues RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchQueuesRequest.\n response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.\n '
()
for queue_name in (self._queues):
if (() > ()):
break
if (self._queues[queue_name] is None):
continue
(request, response)
def FetchQueueStats_Rpc(self, request, response):
"Implementation of the FetchQueueStats rpc which returns 'random' data.\n\n This implementation loads some stats from the task store, the rest are\n random numbers.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchQueueStatsRequest.\n response: A taskqueue_service_pb.TaskQueueFetchQueueStatsResponse.\n "
for queue_name in ():
stats = ()
if (queue_name not in self._queues):
(0)
((- 1))
continue
store = self._queues[queue_name]
(())
if (() == 0):
((- 1))
else:
(())
if ((0, 9) > 0):
scanner_info = ()
((0, 10))
((() + (0, 100)))
((() * 10000.0))
((0, 10))
def QueryTasks_Rpc(self, request, response):
'Implementation of the QueryTasks RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryTasksResponse.\n '
(())
(request, response)
def FetchTask_Rpc(self, request, response):
'Implementation of the FetchTask RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchTaskRequest.\n response: A taskqueue_service_pb.TaskQueueFetchTaskResponse.\n '
()
(())
(request, response)
def Delete_Rpc(self, request, response):
'Implementation of the Delete RPC.\n\n Deletes tasks from the task store.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteResponse.\n '
()
def _AddResultForAll(result):
for _ in ():
(result)
if (() not in self._queues):
(taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
elif (self._queues[()] is None):
(taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE)
else:
(request, response)
def DeleteQueue_Rpc(self, request, response):
'Implementation of the DeleteQueue RPC.\n\n Tombstones the queue.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteQueueRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteQueueResponse.\n '
(())
self._queues[()] = None
def PauseQueue_Rpc(self, request, response):
'Implementation of the PauseQueue RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueuePauseQueueRequest.\n response: A taskqueue_service_pb.TaskQueuePauseQueueResponse.\n '
(())
self._queues[()].paused = ()
def PurgeQueue_Rpc(self, request, response):
'Implementation of the PurgeQueue RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueuePurgeQueueRequest.\n response: A taskqueue_service_pb.TaskQueuePurgeQueueResponse.\n '
(())
()
def QueryAndOwnTasks_Rpc(self, request, response):
'Implementation of the QueryAndOwnTasks RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse.\n '
(())
(request, response)
def ModifyTaskLease_Rpc(self, request, response):
'Implementation of the ModifyTaskLease RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueModifyTaskLeaseRequest.\n response: A taskqueue_service_pb.TaskQueueModifyTaskLeaseResponse.\n '
(())
(request, response)
class Retry(object):
'Task retry caclulator class.\n\n Determines if and when a task should next be run\n '
_default_params = ()
def __init__(self, task, queue):
'Constructor.\n\n Args:\n task: A taskqueue_service_pb.TaskQueueQueryTasksResponse_Task instance.\n May be None.\n queue: A _Queue instance. May be None.\n '
if ((task is not None) and ()):
self._params = ()
elif ((queue is not None) and (queue.retry_parameters is not None)):
self._params = queue.retry_parameters
else:
self._params = self._default_params
def CanRetry(self, retry_count, age_usec):
'Computes whether a task can be retried.\n\n Args:\n retry_count: An integer specifying which retry this is.\n age_usec: An integer specifying the microseconds since the first try.\n\n Returns:\n True if a task is eligible for retrying.\n '
if (() and ()):
return ((() >= retry_count) or (() >= (age_usec)))
if ():
return (() >= retry_count)
if ():
return (() >= (age_usec))
return True
def CalculateBackoffUsec(self, retry_count):
'Calculates time before the specified retry.\n\n Args:\n retry_count: An integer specifying which retry this is.\n\n Returns:\n The number of microseconds before a task should be retried.\n '
exponent = ((retry_count - 1), ())
linear_steps = (retry_count - exponent)
min_backoff_usec = (())
max_backoff_usec = (())
backoff_usec = min_backoff_usec
if (exponent > 0):
backoff_usec *= (2 ** (1023, exponent))
if (linear_steps > 1):
backoff_usec *= linear_steps
return ((max_backoff_usec, backoff_usec))
class _Queue(object):
'A Taskqueue Queue.\n\n This class contains all of the properties of a queue and a sorted list of\n tasks.\n '
def __init__(self, queue_name, bucket_refill_per_second=DEFAULT_RATE_FLOAT, bucket_capacity=DEFAULT_BUCKET_SIZE, user_specified_rate=DEFAULT_RATE, retry_parameters=None, max_concurrent_requests=None, paused=False, queue_mode=QUEUE_MODE.PUSH, acl=None, _testing_validate_state=None):
self.queue_name = queue_name
self.bucket_refill_per_second = bucket_refill_per_second
self.bucket_capacity = bucket_capacity
self.user_specified_rate = user_specified_rate
self.retry_parameters = retry_parameters
self.max_concurrent_requests = max_concurrent_requests
self.paused = paused
self.queue_mode = queue_mode
self.acl = acl
self._testing_validate_state = _testing_validate_state
self.task_name_archive = ()
self._sorted_by_name = []
self._sorted_by_eta = []
self._sorted_by_tag = []
self._lock = ()
def VerifyIndexes(self):
'Ensures that all three indexes are in a valid state.\n\n This method is used by internal tests and should not need to be called in\n any other circumstances.\n\n Raises:\n AssertionError: if the indexes are not in a valid state.\n '
if (not (self._sorted_by_name)):
raise ()
if (not (self._sorted_by_eta)):
raise ()
if (not (self._sorted_by_tag)):
raise ()
tasks_by_name = ()
tasks_with_tags = ()
for (name, task) in self._sorted_by_name:
if (not (name == ())):
raise ()
if (not (name not in tasks_by_name)):
raise ()
(name)
if ():
(name)
tasks_by_eta = ()
for (eta, name, task) in self._sorted_by_eta:
if (not (name == ())):
raise ()
if (not (eta == ())):
raise ()
if (not (name not in tasks_by_eta)):
raise ()
(name)
if (not (tasks_by_eta == tasks_by_name)):
raise ()
tasks_by_tag = ()
for (tag, eta, name, task) in self._sorted_by_tag:
if (not (name == ())):
raise ()
if (not (eta == ())):
raise ()
if (not (() and ())):
raise ()
if (not (tag == ())):
raise ()
if (not (name not in tasks_by_tag)):
raise ()
(name)
if (not (tasks_by_tag == tasks_with_tags)):
raise ()
@staticmethod
def _IsInOrder(l):
'Determine if the specified list is in ascending order.\n\n Args:\n l: The list to check\n\n Returns:\n True if the list is in order, False otherwise\n '
sorted_list = (l)
return (l == sorted_list)
def _WithLock(f):
'Runs the decorated function within self._lock.\n\n Args:\n f: The function to be delegated to. Must be a member function (take self\n as the first parameter).\n\n Returns:\n The result of f.\n '
def _Inner(self, *args, **kwargs):
with self._lock:
ret = (self, *args)
if self._testing_validate_state:
()
return ret
_Inner.__doc__ = f.__doc__
return _Inner
@_WithLock
def UpdateQueue_Rpc(self, request, response):
'Implementation of the UpdateQueue RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest.\n response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse.\n '
if (not (() == self.queue_name)):
raise ()
self.bucket_refill_per_second = ()
self.bucket_capacity = ()
if ():
self.user_specified_rate = ()
else:
self.user_specified_rate = None
if ():
self.retry_parameters = ()
else:
self.retry_parameters = None
if ():
self.max_concurrent_requests = ()
else:
self.max_concurrent_requests = None
self.queue_mode = ()
if ():
self.acl = ()
else:
self.acl = None
@_WithLock
def FetchQueues_Rpc(self, request, response):
'Fills out a queue message on the provided TaskQueueFetchQueuesResponse.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchQueuesRequest.\n response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.\n '
response_queue = ()
(self.queue_name)
(self.bucket_refill_per_second)
(self.bucket_capacity)
if (self.user_specified_rate is not None):
(self.user_specified_rate)
if (self.max_concurrent_requests is not None):
(self.max_concurrent_requests)
if (self.retry_parameters is not None):
(self.retry_parameters)
(self.paused)
if (self.queue_mode is not None):
(self.queue_mode)
if (self.acl is not None):
(self.acl)
@_WithLock
def QueryTasks_Rpc(self, request, response):
'Implementation of the QueryTasks RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryTasksResponse.\n '
if (not (not ())):
raise ()
if ():
tasks = (())
else:
tasks = (())
for task in tasks:
(task)
@_WithLock
def FetchTask_Rpc(self, request, response):
'Implementation of the FetchTask RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchTaskRequest.\n response: A taskqueue_service_pb.TaskQueueFetchTaskResponse.\n '
task_name = ()
pos = (task_name)
if (pos is None):
if (task_name in self.task_name_archive):
error = taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK
else:
error = taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK
raise (error)
(_, task) = self._sorted_by_name[pos]
(task)
@_WithLock
def Delete_Rpc(self, request, response):
'Implementation of the Delete RPC.\n\n Deletes tasks from the task store. We mimic a 1/20 chance of a\n TRANSIENT_ERROR when the request has an app_id.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteResponse.\n '
for taskname in ():
if (() and (() <= 0.05)):
(taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR)
else:
((taskname))
def _QueryAndOwnTasksGetTaskList(self, max_rows, group_by_tag, now_eta_usec, tag=None):
if (not ()):
raise ()
if (group_by_tag and tag):
return (self._sorted_by_tag)
elif group_by_tag:
tasks = (self._sorted_by_eta)
if (not tasks):
return []
if ():
tag = ()
return (max_rows, True, now_eta_usec, tag)
else:
return [task for task in tasks if (not ())]
else:
return (self._sorted_by_eta)
@_WithLock
def QueryAndOwnTasks_Rpc(self, request, response):
'Implementation of the QueryAndOwnTasks RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse.\n '
if (self.queue_mode != QUEUE_MODE.PULL):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_MODE)
lease_seconds = ()
if (lease_seconds < 0):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST)
max_tasks = ()
if (max_tasks <= 0):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST)
if (() and (not ())):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST, 'Tag specified, but group_by_tag was not.')
now_eta_usec = (())
tasks = (max_tasks, (), now_eta_usec, ())
tasks_to_delete = []
for task in tasks:
retry = (task, self)
if (not ((() + 1), 0)):
('Task %s in queue %s cannot be leased again after %d leases.', (), self.queue_name, ())
(task)
continue
(task, (now_eta_usec + (lease_seconds)))
task_response = ()
(())
(())
(())
if ():
(())
(())
for task in tasks_to_delete:
(())
@_WithLock
def ModifyTaskLease_Rpc(self, request, response):
'Implementation of the ModifyTaskLease RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse.\n '
if (self.queue_mode != QUEUE_MODE.PULL):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_MODE)
if self.paused:
raise (taskqueue_service_pb.TaskQueueServiceError.QUEUE_PAUSED)
lease_seconds = ()
if (lease_seconds < 0):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST)
pos = (())
if (pos is None):
if (() in self.task_name_archive):
raise (taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK)
else:
raise (taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK)
(_, task) = self._sorted_by_name[pos]
if (() != ()):
raise (taskqueue_service_pb.TaskQueueServiceError.TASK_LEASE_EXPIRED)
now_usec = (())
if (() < now_usec):
raise (taskqueue_service_pb.TaskQueueServiceError.TASK_LEASE_EXPIRED)
future_eta_usec = (now_usec + (lease_seconds))
(task, future_eta_usec)
(future_eta_usec)
@_WithLock
def IncRetryCount(self, task_name):
'Increment the retry count of a task by 1.\n\n Args:\n task_name: The name of the task to update.\n '
pos = (task_name)
if (not (pos is not None)):
raise ('Task does not exist when trying to increase retry count.')
task = self._sorted_by_name[pos][1]
(task)
def _IncRetryCount(self, task):
if (not ()):
raise ()
retry_count = ()
((retry_count + 1))
((() + 1))
@_WithLock
def GetTasksAsDicts(self):
"Gets all of the tasks in this queue.\n\n Returns:\n A list of dictionaries, where each dictionary contains one task's\n attributes. E.g.\n [{'name': 'task-123',\n 'queue_name': 'default',\n 'url': '/update',\n 'method': 'GET',\n 'eta': '2009/02/02 05:37:42',\n 'eta_delta': '0:00:06.342511 ago',\n 'body': '',\n 'headers': [('user-header', 'some-value')\n ('X-AppEngine-QueueName': 'update-queue'),\n ('X-AppEngine-TaskName': 'task-123'),\n ('X-AppEngine-TaskExecutionCount': '1'),\n ('X-AppEngine-TaskRetryCount': '1'),\n ('X-AppEngine-TaskETA': '1234567890.123456'),\n ('X-AppEngine-Development-Payload': '1'),\n ('X-AppEngine-TaskPreviousResponse': '300'),\n ('Content-Length': 0),\n ('Content-Type': 'application/octet-stream')]\n\n Raises:\n ValueError: A task request contains an unknown HTTP method type.\n "
tasks = []
now = ()
for (_, _, task_response) in self._sorted_by_eta:
((self.queue_name, task_response, now))
return tasks
@_WithLock
def GetTaskAsDict(self, task_name):
"Gets a specific task from this queue.\n\n Returns:\n A dictionary containing one task's attributes. E.g.\n [{'name': 'task-123',\n 'queue_name': 'default',\n 'url': '/update',\n 'method': 'GET',\n 'eta': '2009/02/02 05:37:42',\n 'eta_delta': '0:00:06.342511 ago',\n 'body': '',\n 'headers': [('user-header', 'some-value')\n ('X-AppEngine-QueueName': 'update-queue'),\n ('X-AppEngine-TaskName': 'task-123'),\n ('X-AppEngine-TaskExecutionCount': '1'),\n ('X-AppEngine-TaskRetryCount': '1'),\n ('X-AppEngine-TaskETA': '1234567890.123456'),\n ('X-AppEngine-Development-Payload': '1'),\n ('X-AppEngine-TaskPreviousResponse': '300'),\n ('Content-Length': 0),\n ('Content-Type': 'application/octet-stream')]\n\n Raises:\n ValueError: A task request contains an unknown HTTP method type.\n "
task_responses = ()
if (not task_responses):
return
(task_response,) = task_responses
if (() != task_name):
return
now = ()
return (self.queue_name, task_response, now)
@_WithLock
def PurgeQueue(self):
'Removes all content from the queue.'
self._sorted_by_name = []
self._sorted_by_eta = []
self._sorted_by_tag = []
@_WithLock
def _GetTasks(self):
'Helper method for tests returning all tasks sorted by eta.\n\n Returns:\n A list of taskqueue_service_pb.TaskQueueQueryTasksResponse_Task objects\n sorted by eta.\n '
return ()
def _GetTasksNoAcquireLock(self):
'Helper method for tests returning all tasks sorted by eta.\n\n Returns:\n A list of taskqueue_service_pb.TaskQueueQueryTasksResponse_Task objects\n sorted by eta.\n '
if (not ()):
raise ()
tasks = []
for (eta, task_name, task) in self._sorted_by_eta:
(task)
return tasks
def _InsertTask(self, task):
'Insert a task into the store, keeps lists sorted.\n\n Args:\n task: the new task.\n '
if (not ()):
raise ()
eta = ()
name = ()
(self._sorted_by_eta, (eta, name, task))
if ():
(self._sorted_by_tag, ((), eta, name, task))
(self._sorted_by_name, (name, task))
(name)
@_WithLock
def RunTaskNow(self, task):
'Change the eta of a task to now.\n\n Args:\n task: The TaskQueueQueryTasksResponse_Task run now. This must be\n stored in this queue (otherwise an AssertionError is raised).\n '
(task, 0)
@_WithLock
def PostponeTask(self, task, new_eta_usec):
'Postpone the task to a future time and increment the retry count.\n\n Args:\n task: The TaskQueueQueryTasksResponse_Task to postpone. This must be\n stored in this queue (otherwise an AssertionError is raised).\n new_eta_usec: The new eta to set on the task. This must be greater then\n the current eta on the task.\n '
if (not (new_eta_usec > ())):
raise ()
(task, new_eta_usec)
def _PostponeTaskNoAcquireLock(self, task, new_eta_usec, increase_retries=True):
if (not ()):
raise ()
if increase_retries:
(task)
name = ()
eta = ()
if (not (self._sorted_by_eta, (eta, name, None), task)):
raise ()
if ():
if (not (self._sorted_by_tag, ((), eta, name, None), task)):
raise ()
(task, new_eta_usec)
def _PostponeTaskInsertOnly(self, task, new_eta_usec):
if (not ()):
raise ()
(new_eta_usec)
name = ()
(self._sorted_by_eta, (new_eta_usec, name, task))
if ():
tag = ()
(self._sorted_by_tag, (tag, new_eta_usec, name, task))
@_WithLock
def Lookup(self, maximum, name=None, eta=None):
"Lookup a number of sorted tasks from the store.\n\n If 'eta' is specified, the tasks are looked up in a list sorted by 'eta',\n then 'name'. Otherwise they are sorted by 'name'. We need to be able to\n sort by 'eta' and 'name' because tasks can have identical eta. If you had\n 20 tasks with the same ETA, you wouldn't be able to page past them, since\n the 'next eta' would give the first one again. Names are unique, though.\n\n Args:\n maximum: the maximum number of tasks to return.\n name: a task name to start with.\n eta: an eta to start with.\n\n Returns:\n A list of up to 'maximum' tasks.\n\n Raises:\n ValueError: if the task store gets corrupted.\n "
return (maximum, name, eta)
def _IndexScan(self, index, start_key, end_key=None, max_rows=None):
"Return the result of a 'scan' over the given index.\n\n The scan is inclusive of start_key and exclusive of end_key. It returns at\n most max_rows from the index.\n\n Args:\n index: One of the index lists, eg self._sorted_by_tag.\n start_key: The key to start at.\n end_key: Optional end key.\n max_rows: The maximum number of rows to yield.\n\n Returns:\n a list of up to 'max_rows' TaskQueueQueryTasksResponse_Task instances from\n the given index, in sorted order.\n "
if (not ()):
raise ()
start_pos = (index, start_key)
end_pos = INF
if (end_key is not None):
end_pos = (index, end_key)
if (max_rows is not None):
end_pos = (end_pos, (start_pos + max_rows))
end_pos = (end_pos, (index))
tasks = []
for pos in (start_pos, end_pos):
(index[pos][(- 1)])
return tasks
def _LookupNoAcquireLock(self, maximum, name=None, eta=None, tag=None):
if (not ()):
raise ()
if (tag is not None):
return (self._sorted_by_tag)
elif (eta is not None):
return (self._sorted_by_eta)
else:
return (self._sorted_by_name)
@_WithLock
def Count(self):
'Returns the number of tasks in the store.'
return (self._sorted_by_name)
@_WithLock
def OldestTask(self):
'Returns the task with the oldest eta in the store.'
if self._sorted_by_eta:
return self._sorted_by_eta[0][2]
return None
@_WithLock
def Oldest(self):
'Returns the oldest eta in the store, or None if no tasks.'
if self._sorted_by_eta:
return self._sorted_by_eta[0][0]
return None
def _LocateTaskByName(self, task_name):
'Locate the index of a task in _sorted_by_name list.\n\n If the task does not exist in the list, return None.\n\n Args:\n task_name: Name of task to be located.\n\n Returns:\n Index of the task in _sorted_by_name list if task exists,\n None otherwise.\n '
if (not ()):
raise ()
pos = (self._sorted_by_name, (task_name,))
if ((pos >= (self._sorted_by_name)) or (self._sorted_by_name[pos][0] != task_name)):
return None
return pos
@_WithLock
def Add(self, request, now):
'Inserts a new task into the store.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueAddRequest.\n now: A datetime.datetime object containing the current time in UTC.\n\n Raises:\n apiproxy_errors.ApplicationError: If a task with the same name is already\n in the store, or the task is tombstoned.\n '
if ((()) is not None):
raise (taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
if (() in self.task_name_archive):
raise (taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK)
now_sec = (())
task = ()
(())
(())
((now_sec))
(0)
(())
if ():
(())
for keyvalue in ():
header = ()
(())
(())
if ():
(())
if ():
(())
if ():
(())
(())
if ():
(())
if ():
(())
(task)
@_WithLock
def Delete(self, name):
'Deletes a task from the store by name.\n\n Args:\n name: the name of the task to delete.\n\n Returns:\n TaskQueueServiceError.UNKNOWN_TASK: if the task is unknown.\n TaskQueueServiceError.INTERNAL_ERROR: if the store is corrupted.\n TaskQueueServiceError.TOMBSTONED: if the task was deleted.\n TaskQueueServiceError.OK: otherwise.\n '
return (name)
def _RemoveTaskFromIndex(self, index, index_tuple, task):
'Remove a task from the specified index.\n\n Args:\n index: The index list that needs to be mutated.\n index_tuple: The tuple to search for in the index.\n task: The task instance that is expected to be stored at this location.\n\n Returns:\n True if the task was successfully removed from the index, False otherwise.\n '
if (not ()):
raise ()
pos = (index, index_tuple)
if (index[pos][(- 1)] is not task):
('Expected %s, found %s', task, index[pos][(- 1)])
return False
(pos)
return True
def _DeleteNoAcquireLock(self, name):
if (not ()):
raise ()
pos = (name)
if (pos is None):
if (name in self.task_name_archive):
return taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK
else:
return taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK
old_task = (pos)[(- 1)]
eta = ()
if (not (self._sorted_by_eta, (eta, name, None), old_task)):
return taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERRROR
if ():
tag = ()
if (not (self._sorted_by_tag, (tag, eta, name, None), old_task)):
return taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERRROR
return taskqueue_service_pb.TaskQueueServiceError.OK
@_WithLock
def Populate(self, num_tasks):
'Populates the store with a number of tasks.\n\n Args:\n num_tasks: the number of tasks to insert.\n '
def RandomTask():
'Creates a new task and randomly populates values.'
if (not ()):
raise ()
task = ()
((((string.ascii_lowercase) for x in (20))))
((now_usec + (((- 10)), (600))))
(((now_usec, ()) - (0, (20))))
((['/a', '/b', '/c', '/d']))
if (() < 0.2):
(taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.POST)
(('A' * 2000))
else:
(taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.GET)
retry_count = (0, ((- 10), 5))
(retry_count)
(retry_count)
if (() < 0.3):
random_headers = [('nexus', 'one'), ('foo', 'bar'), ('content-type', 'text/plain'), ('from', 'user@email.com')]
for _ in ((1, 4)):
elem = (0, ((random_headers) - 1))
(key, value) = (elem)
header_proto = ()
(key)
(value)
return task
now_usec = (())
for _ in (num_tasks):
(())
class _TaskExecutor(object):
'Executor for a task object.\n\n Converts a TaskQueueQueryTasksResponse_Task into a http request, then uses the\n httplib library to send it to the http server.\n '
def __init__(self, default_host, request_data):
'Constructor.\n\n Args:\n default_host: a string to use as the host/port to connect to if the host\n header is not specified in the task.\n request_data: A request_info.RequestInfo instance used to look up state\n associated with the request that generated an API call.\n '
self._default_host = default_host
self._request_data = request_data
def _HeadersFromTask(self, task, queue):
'Constructs the http headers for the given task.\n\n This function will remove special headers (values in BUILT_IN_HEADERS) and\n add the taskqueue headers.\n\n Args:\n task: The task, a TaskQueueQueryTasksResponse_Task instance.\n queue: The queue that this task belongs to, an _Queue instance.\n\n Returns:\n A tuple of (header_dict, headers), where:\n header_dict: A mapping from lowercase header name to a list of values.\n headers: a list of tuples containing the http header and value. There\n may be be mutiple entries with the same key.\n '
headers = []
header_dict = {}
for header in ():
header_key_lower = ()
if (header_key_lower not in BUILT_IN_HEADERS):
(((), ()))
(())
(('X-AppEngine-QueueName', queue.queue_name))
(('X-AppEngine-TaskName', ()))
(('X-AppEngine-TaskRetryCount', (())))
(('X-AppEngine-TaskETA', ((()))))
(('X-AppEngine-Fake-Is-Admin', '1'))
(('Content-Length', ((()))))
if ('content-type' not in header_dict):
(('Content-Type', 'application/octet-stream'))
(('X-AppEngine-TaskExecutionCount', (())))
if (() and ()):
(('X-AppEngine-TaskPreviousResponse', (())))
return (header_dict, headers)
def ExecuteTask(self, task, queue):
"Construct a http request from the task and dispatch it.\n\n Args:\n task: The task to convert to a http request and then send. An instance of\n taskqueue_service_pb.TaskQueueQueryTasksResponse_Task\n queue: The queue that this task belongs to. An instance of _Queue.\n\n Returns:\n Http Response code from the task's execution, 0 if an exception occurred.\n "
method = (())
(header_dict, headers) = (task, queue)
(connection_host,) = ('host', [self._default_host])
if (connection_host is None):
('Could not determine where to send the task "%s" (Url: "%s") in queue "%s". Treating as an error.', (), (), queue.queue_name)
return False
else:
header_dict['Host'] = connection_host
dispatcher = ()
try:
response = (method, (), headers, (() if () else ''), '0.1.0.2')
except request_info.ServerDoesNotExistError:
('Failed to dispatch task')
return 0
return ((' ', 1)[0])
class _BackgroundTaskScheduler(object):
'The task scheduler class.\n\n This class is designed to be run in a background thread.\n\n Note: There must not be more than one instance of _BackgroundTaskScheduler per\n group.\n '
def __init__(self, group, task_executor, retry_seconds, **kwargs):
'Constructor.\n\n Args:\n group: The group that we will automatically execute tasks from. Must be an\n instance of _Group.\n task_executor: The class used to convert a task into a http request. Must\n be an instance of _TaskExecutor.\n retry_seconds: The number of seconds to delay a task by if its execution\n fails.\n _get_time: a callable that returns the current time in seconds since the\n epoch. This argument may only be passed in by keyword. If unset, use\n time.time.\n '
self._group = group
self._should_exit = False
self._next_wakeup = INF
self._event = ()
self._wakeup_lock = ()
self.task_executor = task_executor
self.default_retry_seconds = retry_seconds
self._get_time = ('_get_time', time.time)
if kwargs:
raise (('Unknown parameters: %s' % (kwargs)))
def UpdateNextEventTime(self, next_event_time):
'Notify the TaskExecutor of the closest event it needs to process.\n\n Args:\n next_event_time: The time of the event in seconds since the epoch.\n '
with self._wakeup_lock:
if (next_event_time < self._next_wakeup):
self._next_wakeup = next_event_time
()
def Shutdown(self):
'Request this TaskExecutor to exit.'
self._should_exit = True
()
def _ProcessQueues(self):
with self._wakeup_lock:
self._next_wakeup = INF
now = ()
(queue, task) = ()
while (task and ((()) <= now)):
if (() == 0):
((now))
response_code = (task, queue)
if response_code:
(response_code)
else:
('An error occured while sending the task "%s" (Url: "%s") in queue "%s". Treating as a task error.', (), (), queue.queue_name)
now = ()
if (200 <= response_code < 300):
(())
else:
retry = (task, queue)
age_usec = ((now) - ())
if ((() + 1), age_usec):
retry_usec = ((() + 1))
('Task %s failed to execute. This task will retry in %.3f seconds', (), (retry_usec))
(task, ((now) + retry_usec))
else:
('Task %s failed to execute. The task has no remaining retries. Failing permanently after %d retries and %d seconds', (), (), (age_usec))
(())
(queue, task) = ()
if task:
with self._wakeup_lock:
eta = (())
if (eta < self._next_wakeup):
self._next_wakeup = eta
def _Wait(self):
'Block until we need to process a task or we need to exit.'
now = ()
while ((not self._should_exit) and (self._next_wakeup > now)):
timeout = (self._next_wakeup - now)
(timeout)
()
now = ()
def MainLoop(self):
'The main loop of the scheduler.'
while (not self._should_exit):
()
()
class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
"Python only task queue service stub.\n\n This stub executes tasks when enabled by using the dev_appserver's AddEvent\n capability. When task running is disabled this stub will store tasks for\n display on a console, where the user may manually execute the tasks.\n "
def __init__(self, service_name='taskqueue', root_path=None, auto_task_running=False, task_retry_seconds=30, _all_queues_valid=False, default_http_server=None, _testing_validate_state=False, request_data=None):
"Constructor.\n\n Args:\n service_name: Service name expected for all calls.\n root_path: Root path to the directory of the application which may contain\n a queue.yaml file. If None, then it's assumed no queue.yaml file is\n available.\n auto_task_running: When True, the dev_appserver should automatically\n run tasks after they are enqueued.\n task_retry_seconds: How long to wait between task executions after a\n task fails.\n _testing_validate_state: Should this stub and all of its _Groups (and\n thus and all of its _Queues) validate their state after each\n operation? This should only be used during testing of the\n taskqueue_stub.\n request_data: A request_info.RequestInfo instance used to look up state\n associated with the request that generated an API call.\n "
(service_name)
self._queues = {}
self._all_queues_valid = _all_queues_valid
self._root_path = root_path
self._testing_validate_state = _testing_validate_state
self._queues[None] = (self._ParseQueueYaml)
self._auto_task_running = auto_task_running
self._started = False
self._task_scheduler = (self._queues[None], (default_http_server, self.request_data))
self._yaml_last_modified = None
def StartBackgroundExecution(self):
'Start automatic task execution.'
if ((not self._started) and self._auto_task_running):
task_scheduler_thread = ()
(True)
()
self._started = True
def Shutdown(self):
'Requests the task scheduler to shutdown.'
()
def _ParseQueueYaml(self):
"Loads the queue.yaml file and parses it.\n\n Returns:\n None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object\n populated from the queue.yaml.\n "
if (self, 'queue_yaml_parser'):
return (self._root_path)
if (self._root_path is None):
return None
for queueyaml in ('queue.yaml', 'queue.yml'):
try:
path = (self._root_path, queueyaml)
modified = (path).st_mtime
if (self._yaml_last_modified and (self._yaml_last_modified == modified)):
return self._last_queue_info
fh = (path, 'r')
except (IOError, OSError):
continue
try:
queue_info = (fh)
self._last_queue_info = queue_info
self._yaml_last_modified = modified
return queue_info
finally:
()
return None
def _UpdateNextEventTime(self, callback_time):
'Enqueue a task to be automatically scheduled.\n\n Note: If auto task running is disabled, this function is a no-op.\n\n Args:\n callback_time: The earliest time this task may be run, in seconds since\n the epoch.\n '
(callback_time)
def _GetGroup(self, app_id=None):
'Get the _Group instance for app_id, creating a new one if needed.\n\n Args:\n app_id: The app id in question. Note: This field is not validated.\n '
if (app_id not in self._queues):
self._queues[app_id] = ()
return self._queues[app_id]
def _Dynamic_Add(self, request, response):
"Add a single task to a queue.\n\n This method is a wrapper around the BulkAdd RPC request.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueAddRequest. See\n taskqueue_service.proto.\n response: The taskqueue_service_pb.TaskQueueAddResponse. See\n taskqueue_service.proto.\n "
bulk_request = ()
bulk_response = ()
(request)
(bulk_request, bulk_response)
if (not (() == 1)):
raise ()
result = ()
if (result != taskqueue_service_pb.TaskQueueServiceError.OK):
raise (result)
elif ():
(())
def _Dynamic_BulkAdd(self, request, response):
"Add many tasks to a queue using a single request.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: The taskqueue_service_pb.TaskQueueBulkAddRequest. See\n taskqueue_service.proto.\n response: The taskqueue_service_pb.TaskQueueBulkAddResponse. See\n taskqueue_service.proto.\n "
if (not ()):
raise ('taskqueue should prevent empty requests')
(request, response)
def GetQueues(self):
"Gets all the application's queues.\n\n Returns:\n A list of dictionaries, where each dictionary contains one queue's\n attributes. E.g.:\n [{'name': 'some-queue',\n 'max_rate': '1/s',\n 'bucket_size': 5,\n 'oldest_task': '2009/02/02 05:37:42',\n 'eta_delta': '0:00:06.342511 ago',\n 'tasks_in_queue': 12}, ...]\n The list of queues always includes the default queue.\n "
return ()
def GetTasks(self, queue_name):
"Gets a queue's tasks.\n\n Args:\n queue_name: Queue's name to return tasks for.\n\n Returns:\n A list of dictionaries, where each dictionary contains one task's\n attributes. E.g.\n [{'name': 'task-123',\n 'queue_name': 'default',\n 'url': '/update',\n 'method': 'GET',\n 'eta': '2009/02/02 05:37:42',\n 'eta_delta': '0:00:06.342511 ago',\n 'body': '',\n 'headers': [('user-header', 'some-value')\n ('X-AppEngine-QueueName': 'update-queue'),\n ('X-AppEngine-TaskName': 'task-123'),\n ('X-AppEngine-TaskRetryCount': '0'),\n ('X-AppEngine-TaskETA': '1234567890.123456'),\n ('X-AppEngine-Development-Payload': '1'),\n ('Content-Length': 0),\n ('Content-Type': 'application/octet-stream')]\n\n Raises:\n ValueError: A task request contains an unknown HTTP method type.\n KeyError: An invalid queue name was specified.\n "
return ()
def DeleteTask(self, queue_name, task_name):
'Deletes a task from a queue, without leaving a tombstone.\n\n Args:\n queue_name: the name of the queue to delete the task from.\n task_name: the name of the task to delete.\n '
if (queue_name):
queue = (queue_name)
(task_name)
(task_name)
def FlushQueue(self, queue_name):
'Removes all tasks from a queue, without leaving tombstones.\n\n Args:\n queue_name: the name of the queue to remove tasks from.\n '
if (queue_name):
()
()
def _Dynamic_UpdateQueue(self, request, unused_response):
"Local implementation of the UpdateQueue RPC in TaskQueueService.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest.\n unused_response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse.\n Not used.\n "
(request, unused_response)
def _Dynamic_FetchQueues(self, request, response):
"Local implementation of the FetchQueues RPC in TaskQueueService.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchQueuesRequest.\n response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.\n "
(request, response)
def _Dynamic_FetchQueueStats(self, request, response):
"Local 'random' implementation of the TaskQueueService.FetchQueueStats.\n\n This implementation loads some stats from the task store, the rest with\n random numbers.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchQueueStatsRequest.\n response: A taskqueue_service_pb.TaskQueueFetchQueueStatsResponse.\n "
(request, response)
def _Dynamic_QueryTasks(self, request, response):
"Local implementation of the TaskQueueService.QueryTasks RPC.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryTasksResponse.\n "
(request, response)
def _Dynamic_FetchTask(self, request, response):
"Local implementation of the TaskQueueService.FetchTask RPC.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueFetchTaskRequest.\n response: A taskqueue_service_pb.TaskQueueFetchTaskResponse.\n "
(request, response)
def _Dynamic_Delete(self, request, response):
"Local delete implementation of TaskQueueService.Delete.\n\n Deletes tasks from the task store. A 1/20 chance of a transient error.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteResponse.\n "
(request, response)
def _Dynamic_ForceRun(self, request, response):
"Local force run implementation of TaskQueueService.ForceRun.\n\n Forces running of a task in a queue. This will fail randomly for testing if\n the app id is non-empty.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueForceRunRequest.\n response: A taskqueue_service_pb.TaskQueueForceRunResponse.\n "
if ((request) is not None):
if (() <= 0.05):
(taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR)
elif (() <= 0.052):
(taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR)
else:
(taskqueue_service_pb.TaskQueueServiceError.OK)
else:
group = (None)
if (not (())):
(taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
return
queue = (())
task = (1)
if (not task):
(taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK)
return
(task[0])
(0)
(taskqueue_service_pb.TaskQueueServiceError.OK)
def _Dynamic_DeleteQueue(self, request, response):
"Local delete implementation of TaskQueueService.DeleteQueue.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteQueueRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteQueueResponse.\n "
app_id = (request)
if (app_id is None):
raise (taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED)
(request, response)
def _Dynamic_PauseQueue(self, request, response):
"Local pause implementation of TaskQueueService.PauseQueue.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueuePauseQueueRequest.\n response: A taskqueue_service_pb.TaskQueuePauseQueueResponse.\n "
app_id = (request)
if (app_id is None):
raise (taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED)
(request, response)
def _Dynamic_PurgeQueue(self, request, response):
"Local purge implementation of TaskQueueService.PurgeQueue.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueuePurgeQueueRequest.\n response: A taskqueue_service_pb.TaskQueuePurgeQueueResponse.\n "
(request, response)
def _Dynamic_DeleteGroup(self, request, response):
"Local delete implementation of TaskQueueService.DeleteGroup.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueDeleteGroupRequest.\n response: A taskqueue_service_pb.TaskQueueDeleteGroupResponse.\n "
app_id = (request)
if (app_id is None):
raise (taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED)
if (app_id in self._queues):
del self._queues[app_id]
else:
raise (taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
def _Dynamic_UpdateStorageLimit(self, request, response):
"Local implementation of TaskQueueService.UpdateStorageLimit.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueUpdateStorageLimitRequest.\n response: A taskqueue_service_pb.TaskQueueUpdateStorageLimitResponse.\n "
if ((request) is None):
raise (taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED)
if ((() < 0) or (() > (1000 * (1024 ** 4)))):
raise (taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST)
(())
def _Dynamic_QueryAndOwnTasks(self, request, response):
"Local implementation of TaskQueueService.QueryAndOwnTasks.\n\n Must adhere to the '_Dynamic_' naming convention for stubbing to work.\n See taskqueue_service.proto for a full description of the RPC.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksRequest.\n response: A taskqueue_service_pb.TaskQueueQueryAndOwnTasksResponse.\n\n Raises:\n InvalidQueueModeError: If target queue is not a pull queue.\n "
(request, response)
def _Dynamic_ModifyTaskLease(self, request, response):
'Local implementation of TaskQueueService.ModifyTaskLease.\n\n Args:\n request: A taskqueue_service_pb.TaskQueueModifyTaskLeaseRequest.\n response: A taskqueue_service_pb.TaskQueueModifyTaskLeaseResponse.\n\n Raises:\n InvalidQueueModeError: If target queue is not a pull queue.\n '
(request, response)
def get_filtered_tasks(self, url=None, name=None, queue_names=None):
'Get the tasks in the task queue with filters.\n\n Args:\n url: A URL that all returned tasks should point at.\n name: The name of all returned tasks.\n queue_names: A list of queue names to retrieve tasks from. If left blank\n this will get default to all queues available.\n\n Returns:\n A list of taskqueue.Task objects.\n '
all_queue_names = [queue['name'] for queue in ()]
if (queue_names, str):
queue_names = [queue_names]
if (queue_names is None):
queue_names = all_queue_names
task_dicts = []
for queue_name in queue_names:
if (queue_name in all_queue_names):
for task in (queue_name):
if ((url is not None) and (task['url'] != url)):
continue
if ((name is not None) and (task['name'] != name)):
continue
(task)
tasks = []
for task in task_dicts:
payload = (task['body'])
headers = (task['headers'])
headers['Content-Length'] = ((payload))
eta = (task['eta'], '%Y/%m/%d %H:%M:%S')
eta = ()
task_object = ()
(task_object)
return tasks |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Implementation of scheduling for Groc format schedules.
A Groc schedule looks like '1st,2nd monday 9:00', or 'every 20 mins'. This
module takes a parsed schedule (produced by Antlr) and creates objects that
can produce times that match this schedule.
A parsed schedule is one of two types - an Interval or a Specific Time.
See the class docstrings for more.
Extensions to be considered:
allowing a comma separated list of times to run
"""
import calendar
import datetime
try:
import pytz
except ImportError:
pytz = None
import groc
HOURS = "hours"
MINUTES = "minutes"
try:
from pytz import NonExistentTimeError
from pytz import AmbiguousTimeError
except ImportError:
class NonExistentTimeError(Exception):
pass
class AmbiguousTimeError(Exception):
pass
def GrocTimeSpecification(schedule, timezone=None):
"""Factory function.
Turns a schedule specification into a TimeSpecification.
Arguments:
schedule: the schedule specification, as a string
timezone: the optional timezone as a string for this specification.
Defaults to 'UTC' - valid entries are things like 'Australia/Victoria'
or 'PST8PDT'.
Returns:
a TimeSpecification instance
"""
parser = groc.CreateParser(schedule)
parser.timespec()
if parser.period_string:
return IntervalTimeSpecification(
parser.interval_mins,
parser.period_string,
parser.synchronized,
parser.start_time_string,
parser.end_time_string,
timezone,
)
else:
return SpecificTimeSpecification(
parser.ordinal_set,
parser.weekday_set,
parser.month_set,
parser.monthday_set,
parser.time_string,
timezone,
)
class TimeSpecification(object):
"""Base class for time specifications."""
def GetMatches(self, start, n):
"""Returns the next n times that match the schedule, starting at time start.
Arguments:
start: a datetime to start from. Matches will start from after this time.
n: the number of matching times to return
Returns:
a list of n datetime objects
"""
out = []
for _ in range(n):
start = self.GetMatch(start)
out.append(start)
return out
def GetMatch(self, start):
"""Returns the next match after time start.
Must be implemented in subclasses.
Arguments:
start: a datetime to start from. Matches will start from after this time.
This may be in any pytz time zone, or it may be timezone-naive
(interpreted as UTC).
Returns:
a datetime object in the timezone of the input 'start'
"""
raise NotImplementedError
def _GetTimezone(timezone_string):
"""Converts a timezone string to a pytz timezone object.
Arguments:
timezone_string: a string representing a timezone, or None
Returns:
a pytz timezone object, or None if the input timezone_string is None
Raises:
ValueError: if timezone_string is not None and the pytz module could not be
loaded
"""
if timezone_string:
if pytz is None:
raise ValueError("need pytz in order to specify a timezone")
return pytz.timezone(timezone_string)
else:
return None
def _ToTimeZone(t, tzinfo):
"""Converts 't' to the time zone 'tzinfo'.
Arguments:
t: a datetime object. It may be in any pytz time zone, or it may be
timezone-naive (interpreted as UTC).
tzinfo: a pytz timezone object, or None (interpreted as UTC).
Returns:
a datetime object in the time zone 'tzinfo'
"""
if pytz is None:
return t.replace(tzinfo=tzinfo)
elif tzinfo:
if not t.tzinfo:
t = pytz.utc.localize(t)
return tzinfo.normalize(t.astimezone(tzinfo))
elif t.tzinfo:
return pytz.utc.normalize(t.astimezone(pytz.utc)).replace(tzinfo=None)
else:
return t
def _GetTime(time_string):
"""Converts a string to a datetime.time object.
Arguments:
time_string: a string representing a time ('hours:minutes')
Returns:
a datetime.time object
"""
hourstr, minutestr = time_string.split(":")
return datetime.time(int(hourstr), int(minutestr))
class IntervalTimeSpecification(TimeSpecification):
"""A time specification for a given interval.
An Interval type spec runs at the given fixed interval. It has the following
attributes:
period - the type of interval, either 'hours' or 'minutes'
interval - the number of units of type period.
synchronized - whether to synchronize the times to be locked to a fixed
period (midnight in the specified timezone).
start_time, end_time - restrict matches to a given range of times every day.
If these are None, there is no restriction. Otherwise, they are
datetime.time objects.
timezone - the time zone in which start_time and end_time should be
interpreted, or None (defaults to UTC). This is a pytz timezone object.
"""
def __init__(
self,
interval,
period,
synchronized=False,
start_time_string="",
end_time_string="",
timezone=None,
):
super(IntervalTimeSpecification, self).__init__()
if interval < 1:
raise groc.GrocException("interval must be greater than zero")
self.interval = interval
self.period = period
self.synchronized = synchronized
if self.period == HOURS:
self.seconds = self.interval * 3600
else:
self.seconds = self.interval * 60
self.timezone = _GetTimezone(timezone)
if self.synchronized:
if start_time_string:
raise ValueError(
"start_time_string may not be specified if synchronized is true"
)
if end_time_string:
raise ValueError(
"end_time_string may not be specified if synchronized is true"
)
if (self.seconds > 86400) or ((86400 % self.seconds) != 0):
raise groc.GrocException(
"can only use synchronized for periods that"
" divide evenly into 24 hours"
)
self.start_time = datetime.time(0, 0).replace(tzinfo=self.timezone)
self.end_time = datetime.time(23, 59).replace(tzinfo=self.timezone)
elif start_time_string:
if not end_time_string:
raise ValueError(
"end_time_string must be specified if start_time_string is"
)
self.start_time = _GetTime(start_time_string).replace(tzinfo=self.timezone)
self.end_time = _GetTime(end_time_string).replace(tzinfo=self.timezone)
else:
if end_time_string:
raise ValueError(
"start_time_string must be specified if end_time_string is"
)
self.start_time = None
self.end_time = None
def GetMatch(self, start):
"""Returns the next match after 'start'.
Arguments:
start: a datetime to start from. Matches will start from after this time.
This may be in any pytz time zone, or it may be timezone-naive
(interpreted as UTC).
Returns:
a datetime object in the timezone of the input 'start'
"""
if self.start_time is None:
return start + datetime.timedelta(seconds=self.seconds)
t = _ToTimeZone(start, self.timezone)
start_time = self._GetPreviousDateTime(t, self.start_time)
t_delta = t - start_time
t_delta_seconds = t_delta.days * 60 * 24 + t_delta.seconds
num_intervals = (t_delta_seconds + self.seconds) / self.seconds
interval_time = start_time + datetime.timedelta(
seconds=(num_intervals * self.seconds)
)
if self.timezone:
interval_time = self.timezone.normalize(interval_time)
next_start_time = self._GetNextDateTime(t, self.start_time)
if (
self._TimeIsInRange(t)
and self._TimeIsInRange(interval_time)
and interval_time < next_start_time
):
result = interval_time
else:
result = next_start_time
return _ToTimeZone(result, start.tzinfo)
def _TimeIsInRange(self, t):
"""Returns true if 't' falls between start_time and end_time, inclusive.
Arguments:
t: a datetime object, in self.timezone
Returns:
a boolean
"""
previous_start_time = self._GetPreviousDateTime(t, self.start_time)
previous_end_time = self._GetPreviousDateTime(t, self.end_time)
if previous_start_time > previous_end_time:
return True
else:
return t == previous_end_time
@staticmethod
def _GetPreviousDateTime(t, target_time):
"""Returns the latest datetime <= 't' that has the time target_time.
Arguments:
t: a datetime.datetime object, in self.timezone
target_time: a datetime.time object, in self.timezone
Returns:
a datetime.datetime object, in self.timezone
"""
date = t.date()
while True:
result = IntervalTimeSpecification._CombineDateAndTime(date, target_time)
if result <= t:
return result
date -= datetime.timedelta(days=1)
@staticmethod
def _GetNextDateTime(t, target_time):
"""Returns the earliest datetime > 't' that has the time target_time.
Arguments:
t: a datetime.datetime object, in self.timezone
target_time: a time object, in self.timezone
Returns:
a datetime.datetime object, in self.timezone
"""
date = t.date()
while True:
result = IntervalTimeSpecification._CombineDateAndTime(date, target_time)
if result > t:
return result
date += datetime.timedelta(days=1)
@staticmethod
def _CombineDateAndTime(date, time):
"""Creates a datetime object from date and time objects.
This is similar to the datetime.combine method, but its timezone
calculations are designed to work with pytz.
Arguments:
date: a datetime.date object, in any timezone
time: a datetime.time object, in any timezone
Returns:
a datetime.datetime object, in the timezone of the input 'time'
"""
if time.tzinfo:
naive_result = datetime.datetime(
date.year, date.month, date.day, time.hour, time.minute, time.second
)
try:
return time.tzinfo.localize(naive_result, is_dst=None)
except AmbiguousTimeError:
return min(
time.tzinfo.localize(naive_result, is_dst=True),
time.tzinfo.localize(naive_result, is_dst=False),
)
except NonExistentTimeError:
while True:
naive_result += datetime.timedelta(minutes=1)
try:
return time.tzinfo.localize(naive_result, is_dst=None)
except NonExistentTimeError:
pass
else:
return datetime.datetime.combine(date, time)
class SpecificTimeSpecification(TimeSpecification):
"""Specific time specification.
A Specific interval is more complex, but defines a certain time to run and
the days that it should run. It has the following attributes:
time - the time of day to run, as 'HH:MM'
ordinals - first, second, third &c, as a set of integers in 1..5
months - the months that this should run, as a set of integers in 1..12
weekdays - the days of the week that this should run, as a set of integers,
0=Sunday, 6=Saturday
timezone - the optional timezone as a string for this specification.
Defaults to UTC - valid entries are things like Australia/Victoria
or PST8PDT.
A specific time schedule can be quite complex. A schedule could look like
this:
'1st,third sat,sun of jan,feb,mar 09:15'
In this case, ordinals would be {1,3}, weekdays {0,6}, months {1,2,3} and
time would be '09:15'.
"""
def __init__(
self,
ordinals=None,
weekdays=None,
months=None,
monthdays=None,
timestr="00:00",
timezone=None,
):
super(SpecificTimeSpecification, self).__init__()
if weekdays and monthdays:
raise ValueError("cannot supply both monthdays and weekdays")
if ordinals is None:
self.ordinals = set(range(1, 6))
else:
self.ordinals = set(ordinals)
if self.ordinals and (min(self.ordinals) < 1 or max(self.ordinals) > 5):
raise ValueError(
"ordinals must be between 1 and 5 inclusive, " "got %r" % ordinals
)
if weekdays is None:
self.weekdays = set(range(7))
else:
self.weekdays = set(weekdays)
if self.weekdays and (min(self.weekdays) < 0 or max(self.weekdays) > 6):
raise ValueError(
"weekdays must be between "
"0 (sun) and 6 (sat) inclusive, "
"got %r" % weekdays
)
if months is None:
self.months = set(range(1, 13))
else:
self.months = set(months)
if self.months and (min(self.months) < 1 or max(self.months) > 12):
raise ValueError(
"months must be between "
"1 (jan) and 12 (dec) inclusive, "
"got %r" % months
)
if not monthdays:
self.monthdays = set()
else:
if min(monthdays) < 1:
raise ValueError("day of month must be greater than 0")
if max(monthdays) > 31:
raise ValueError("day of month must be less than 32")
if self.months:
for month in self.months:
_, ndays = calendar.monthrange(4, month)
if min(monthdays) <= ndays:
break
else:
raise ValueError(
"invalid day of month, "
"got day %r of month %r" % (max(monthdays), month)
)
self.monthdays = set(monthdays)
self.time = _GetTime(timestr)
self.timezone = _GetTimezone(timezone)
def _MatchingDays(self, year, month):
"""Returns matching days for the given year and month.
For the given year and month, return the days that match this instance's
day specification, based on either (a) the ordinals and weekdays, or
(b) the explicitly specified monthdays. If monthdays are specified,
dates that fall outside the range of the month will not be returned.
Arguments:
year: the year as an integer
month: the month as an integer, in range 1-12
Returns:
a list of matching days, as ints in range 1-31
"""
start_day, last_day = calendar.monthrange(year, month)
if self.monthdays:
return sorted([day for day in self.monthdays if day <= last_day])
out_days = []
start_day = (start_day + 1) % 7
for ordinal in self.ordinals:
for weekday in self.weekdays:
day = ((weekday - start_day) % 7) + 1
day += 7 * (ordinal - 1)
if day <= last_day:
out_days.append(day)
return sorted(out_days)
def _NextMonthGenerator(self, start, matches):
"""Creates a generator that produces results from the set 'matches'.
Matches must be >= 'start'. If none match, the wrap counter is incremented,
and the result set is reset to the full set. Yields a 2-tuple of (match,
wrapcount).
Arguments:
start: first set of matches will be >= this value (an int)
matches: the set of potential matches (a sequence of ints)
Yields:
a two-tuple of (match, wrap counter). match is an int in range (1-12),
wrapcount is a int indicating how many times we've wrapped around.
"""
potential = matches = sorted(matches)
after = start - 1
wrapcount = 0
while True:
potential = [x for x in potential if x > after]
if not potential:
wrapcount += 1
potential = matches
after = potential[0]
yield (after, wrapcount)
def GetMatch(self, start):
"""Returns the next match after time start.
Must be implemented in subclasses.
Arguments:
start: a datetime to start from. Matches will start from after this time.
This may be in any pytz time zone, or it may be timezone-naive
(interpreted as UTC).
Returns:
a datetime object in the timezone of the input 'start'
"""
start_time = _ToTimeZone(start, self.timezone).replace(tzinfo=None)
if self.months:
months = self._NextMonthGenerator(start_time.month, self.months)
while True:
month, yearwraps = next(months)
candidate_month = start_time.replace(
day=1, month=month, year=start_time.year + yearwraps
)
day_matches = self._MatchingDays(candidate_month.year, month)
if (candidate_month.year, candidate_month.month) == (
start_time.year,
start_time.month,
):
day_matches = [x for x in day_matches if x >= start_time.day]
while (
day_matches
and day_matches[0] == start_time.day
and start_time.time() >= self.time
):
day_matches.pop(0)
while day_matches:
out = candidate_month.replace(
day=day_matches[0],
hour=self.time.hour,
minute=self.time.minute,
second=0,
microsecond=0,
)
if self.timezone and pytz is not None:
try:
out = self.timezone.localize(out, is_dst=None)
except AmbiguousTimeError:
out = self.timezone.localize(out)
except NonExistentTimeError:
for _ in range(24):
out += datetime.timedelta(minutes=60)
try:
out = self.timezone.localize(out)
except NonExistentTimeError:
continue
break
return _ToTimeZone(out, start.tzinfo)
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module supports asynchronous I/O on multiple file descriptors."""
from google.appengine.api.remote_socket._remote_socket import select, error
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Repository for all builtin handlers information.
On initialization, this file generates a list of builtin handlers that have
associated app.yaml information. This file can then be called to read that
information and make it available.
"""
import logging
import os
DEFAULT_DIR = os.path.join(os.path.dirname(__file__))
_handler_dir = None
_available_builtins = None
# BUILTINS_NOT_AVAIABLE_IN_PYTHON27 = set(['datastore_admin', 'mapreduce'])
INCLUDE_FILENAME_TEMPLATE = "include-%s.yaml"
DEFAULT_INCLUDE_FILENAME = "include.yaml"
class InvalidBuiltinName(Exception):
"""Raised whenever a builtin handler name is specified that is not found."""
def reset_builtins_dir():
"""Public method for resetting builtins directory to default."""
set_builtins_dir(DEFAULT_DIR)
def set_builtins_dir(path):
"""Sets the appropriate path for testing and reinitializes the module."""
global _handler_dir, _available_builtins
_handler_dir = path
_available_builtins = []
_initialize_builtins()
def _initialize_builtins():
"""Scan the immediate subdirectories of the builtins module.
Encountered subdirectories with an app.yaml file are added to
AVAILABLE_BUILTINS.
"""
for filename in os.listdir(_handler_dir):
if os.path.isfile(_get_yaml_path(filename, "")):
_available_builtins.append(filename)
def _get_yaml_path(builtin_name, runtime):
"""Return expected path to a builtin handler's yaml file without error check."""
runtime_specific = os.path.join(
_handler_dir, builtin_name, INCLUDE_FILENAME_TEMPLATE % runtime
)
if runtime and os.path.exists(runtime_specific):
return runtime_specific
return os.path.join(_handler_dir, builtin_name, DEFAULT_INCLUDE_FILENAME)
def get_yaml_path(builtin_name, runtime=""):
"""Returns the full path to a yaml file by giving the builtin module's name.
Args:
builtin_name: single word name of builtin handler
runtime: name of the runtime
Raises:
ValueError: if handler does not exist in expected directory
Returns:
the absolute path to a valid builtin handler include.yaml file
"""
if _handler_dir is None:
set_builtins_dir(DEFAULT_DIR)
available_builtins = set(_available_builtins)
# if runtime == 'python27':
# available_builtins = available_builtins - BUILTINS_NOT_AVAIABLE_IN_PYTHON27
if builtin_name not in available_builtins:
raise InvalidBuiltinName(
"%s is not the name of a valid builtin.\n"
"Available handlers are: %s"
% (builtin_name, ", ".join(sorted(available_builtins)))
)
return _get_yaml_path(builtin_name, runtime)
def get_yaml_basepath():
"""Returns the full path of the directory in which builtins are located."""
if _handler_dir is None:
set_builtins_dir(DEFAULT_DIR)
return _handler_dir
|
'Simple, schema-based database abstraction layer for the datastore.\n\nModeled after Django\'s abstraction layer on top of SQL databases,\nhttp://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler\nand a lot less code because the datastore is so much simpler than SQL\ndatabases.\n\nThe programming model is to declare Python subclasses of the Model class,\ndeclaring datastore properties as class members of that class. So if you want to\npublish a story with title, body, and created date, you would do it like this:\n\n class Story(db.Model):\n title = db.StringProperty()\n body = db.TextProperty()\n created = db.DateTimeProperty(auto_now_add=True)\n\nYou can create a new Story in the datastore with this usage pattern:\n\n story = Story(title=\'My title\')\n story.body = \'My body\'\n story.put()\n\nYou query for Story entities using built in query interfaces that map directly\nto the syntax and semantics of the datastore:\n\n stories = Story.all().filter(\'date >=\', yesterday).order(\'-date\')\n for story in stories:\n print story.title\n\nThe Property declarations enforce types by performing validation on assignment.\nFor example, the DateTimeProperty enforces that you assign valid datetime\nobjects, and if you supply the "required" option for a property, you will not\nbe able to assign None to that property.\n\nWe also support references between models, so if a story has comments, you\nwould represent it like this:\n\n class Comment(db.Model):\n story = db.ReferenceProperty(Story)\n body = db.TextProperty()\n\nWhen you get a story out of the datastore, the story reference is resolved\nautomatically the first time it is referenced, which makes it easy to use\nmodel instances without performing additional queries by hand:\n\n comment = Comment.get(key)\n print comment.story.title\n\nLikewise, you can access the set of comments that refer to each story through\nthis property through a reverse reference called comment_set, which is a Query\npreconfigured to return all matching comments:\n\n story = Story.get(key)\n for comment in story.comment_set:\n print comment.body\n\n'
import copy
import datetime
import logging
import re
import time
import urllib.parse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import namespace_manager
from google.appengine.api import users
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
ReferencePropertyResolveError = datastore_errors.ReferencePropertyResolveError
Timeout = datastore_errors.Timeout
CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
NESTED = datastore_rpc.TransactionOptions.NESTED
MANDATORY = datastore_rpc.TransactionOptions.MANDATORY
ALLOWED = datastore_rpc.TransactionOptions.ALLOWED
INDEPENDENT = datastore_rpc.TransactionOptions.INDEPENDENT
KEY_RANGE_EMPTY = 'Empty'
"Indicates the given key range is empty and the datastore's\nautomatic ID allocator will not assign keys in this range to new\nentities.\n"
KEY_RANGE_CONTENTION = 'Contention'
"Indicates the given key range is empty but the datastore's\nautomatic ID allocator may assign new entities keys in this range.\nHowever it is safe to manually assign keys in this range\nif either of the following is true:\n\n - No other request will insert entities with the same kind and parent\n as the given key range until all entities with manually assigned\n keys from this range have been written.\n - Overwriting entities written by other requests with the same kind\n and parent as the given key range is acceptable.\n\nThe datastore's automatic ID allocator will not assign a key to a new\nentity that will overwrite an existing entity, so once the range is\npopulated there will no longer be any contention.\n"
KEY_RANGE_COLLISION = 'Collision'
"Indicates that entities with keys inside the given key range\nalready exist and writing to this range will overwrite those entities.\nAdditionally the implications of KEY_RANGE_COLLISION apply. If\noverwriting entities that exist in this range is acceptable it is safe\nto use the given range.\n\nThe datastore's automatic ID allocator will never assign a key to\na new entity that will overwrite an existing entity so entities\nwritten by the user to this range will never be overwritten by\nan entity with an automatically assigned key.\n"
_kind_map = {}
_SELF_REFERENCE = ()
_RESERVED_WORDS = (['key_name'])
class NotSavedError(Error):
'Raised when a saved-object action is performed on a non-saved object.'
class KindError(BadValueError):
'Raised when an entity is used with incorrect Model.'
class PropertyError(Error):
'Raised when non-existent property is referenced.'
class DuplicatePropertyError(Error):
'Raised when a property is duplicated in a model definition.'
class ConfigurationError(Error):
'Raised when a property or model is improperly configured.'
class ReservedWordError(Error):
'Raised when a property is defined for a reserved word.'
class DerivedPropertyError(Error):
'Raised when attempting to assign a value to a derived property.'
_ALLOWED_PROPERTY_TYPES = ([str, str, str, bool, int, int, float, Key, datetime.datetime, datetime.date, datetime.time, Blob, datastore_types.EmbeddedEntity, ByteString, Text, users.User, Category, Link, Email, GeoPt, IM, PhoneNumber, PostalAddress, Rating, BlobKey])
_ALLOWED_EXPANDO_PROPERTY_TYPES = (_ALLOWED_PROPERTY_TYPES)
((list, tuple, (None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = (('^\\s*([^\\s]+)(\\s+(%s)\\s*)?$' % (_OPERATORS)), (re.IGNORECASE | re.UNICODE))
def class_for_kind(kind):
'Return base-class responsible for implementing kind.\n\n Necessary to recover the class responsible for implementing provided\n kind.\n\n Args:\n kind: Entity kind string.\n\n Returns:\n Class implementation for kind.\n\n Raises:\n KindError when there is no implementation for kind.\n '
try:
return _kind_map[kind]
except KeyError:
raise (("No implementation for kind '%s'" % kind))
def check_reserved_word(attr_name):
'Raise an exception if attribute name is a reserved word.\n\n Args:\n attr_name: Name to check to see if it is a reserved word.\n\n Raises:\n ReservedWordError when attr_name is determined to be a reserved word.\n '
if (attr_name):
raise ("Cannot define property. All names both beginning and ending with '__' are reserved.")
if ((attr_name in _RESERVED_WORDS) or (attr_name in (Model))):
raise (("Cannot define property using reserved word '%(attr_name)s'. If you would like to use this name in the datastore consider using a different name like %(attr_name)s_ and adding name='%(attr_name)s' to the parameter list of the property definition." % ()))
def query_descendants(model_instance):
'Returns a query for all the descendants of a model instance.\n\n Args:\n model_instance: Model instance to find the descendants of.\n\n Returns:\n Query that will retrieve all entities that have the given model instance\n as an ancestor. Unlike normal ancestor queries, this does not include the\n ancestor itself.\n '
result = (model_instance)
((datastore_types.KEY_SPECIAL_PROPERTY + ' >'), ())
return result
def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
'Encodes a model instance as a protocol buffer.\n\n Args:\n model_instance: Model instance to encode.\n Returns:\n entity_pb.EntityProto representation of the model instance\n '
return ()
def model_from_protobuf(pb, _entity_class=datastore.Entity):
'Decodes a model instance from a protocol buffer.\n\n Args:\n pb: The protocol buffer representation of the model instance. Can be an\n entity_pb.EntityProto or str encoding of an entity_bp.EntityProto\n\n Returns:\n Model instance resulting from decoding the protocol buffer\n '
entity = (pb)
return (entity)
def model_is_projection(model_instance):
'Returns true if the given db.Model instance only contains a projection of\n the full entity.\n '
return (model_instance._entity and ())
def _initialize_properties(model_class, name, bases, dct):
'Initialize Property attributes for Model-class.\n\n Args:\n model_class: Model class to initialize properties for.\n '
model_class._properties = {}
property_source = {}
def get_attr_source(name, cls):
for src_cls in ():
if (name in src_cls.__dict__):
return src_cls
defined = ()
for base in bases:
if (base, '_properties'):
property_keys = (())
duplicate_property_keys = (defined & property_keys)
for dupe_prop_name in duplicate_property_keys:
old_source = property_source[dupe_prop_name] = (dupe_prop_name, property_source[dupe_prop_name])
new_source = (dupe_prop_name, base)
if (old_source != new_source):
raise (('Duplicate property, %s, is inherited from both %s and %s.' % (dupe_prop_name, old_source.__name__, new_source.__name__)))
property_keys -= duplicate_property_keys
if property_keys:
defined |= property_keys
((property_keys, base))
(base._properties)
for attr_name in (()):
attr = dct[attr_name]
if (attr, Property):
(attr_name)
if (attr_name in defined):
raise (('Duplicate property: %s' % attr_name))
(attr_name)
model_class._properties[attr_name] = attr
(model_class, attr_name)
model_class._all_properties = ((prop.name for (name, prop) in (())))
model_class._unindexed_properties = ((prop.name for (name, prop) in (()) if (not prop.indexed)))
def _coerce_to_key(value):
"Returns the value's key.\n\n Args:\n value: a Model or Key instance or string encoded key or None\n\n Returns:\n The corresponding key, or None if value is None.\n "
if (value is None):
return None
(value, multiple) = (value, (Model, Key, str))
if ((value) > 1):
raise ('Expected only one model or key')
value = value[0]
if (value, Model):
return ()
elif (value, str):
return (value)
else:
return value
class PropertiedClass(type):
'Meta-class for initializing Model classes properties.\n\n Used for initializing Properties defined in the context of a model.\n By using a meta-class much of the configuration of a Property\n descriptor becomes implicit. By using this meta-class, descriptors\n that are of class Model are notified about which class they\n belong to and what attribute they are associated with and can\n do appropriate initialization via __property_config__.\n\n Duplicate properties are not permitted.\n '
def __init__(cls, name, bases, dct, map_kind=True):
"Initializes a class that might have property definitions.\n\n This method is called when a class is created with the PropertiedClass\n meta-class.\n\n Loads all properties for this model and its base classes in to a dictionary\n for easy reflection via the 'properties' method.\n\n Configures each property defined in the new class.\n\n Duplicate properties, either defined in the new class or defined separately\n in two base classes are not permitted.\n\n Properties may not assigned to names which are in the list of\n _RESERVED_WORDS. It is still possible to store a property using a reserved\n word in the datastore by using the 'name' keyword argument to the Property\n constructor.\n\n Args:\n cls: Class being initialized.\n name: Name of new class.\n bases: Base classes of new class.\n dct: Dictionary of new definitions for class.\n\n Raises:\n DuplicatePropertyError when a property is duplicated either in the new\n class or separately in two base classes.\n ReservedWordError when a property is given a name that is in the list of\n reserved words, attributes of Model and names of the form '__.*__'.\n "
(name, bases, dct)
(cls, name, bases, dct)
if map_kind:
_kind_map[()] = cls
AUTO_UPDATE_UNCHANGED = ()
class Property(object):
'A Property is an attribute of a Model.\n\n It defines the type of the attribute, which determines how it is stored\n in the datastore and how the property values are validated. Different property\n types support different options, which change validation rules, default\n values, etc. The simplest example of a property is a StringProperty:\n\n class Story(db.Model):\n title = db.StringProperty()\n '
creation_counter = 0
def __init__(self, verbose_name=None, name=None, default=None, required=False, validator=None, choices=None, indexed=True):
'Initializes this Property with the given options.\n\n Args:\n verbose_name: User friendly name of property.\n name: Storage name for property. By default, uses attribute name\n as it is assigned in the Model sub-class.\n default: Default value for property if none is assigned.\n required: Whether property is required.\n validator: User provided method used for validation.\n choices: User provided set of valid property values.\n indexed: Whether property is indexed.\n '
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.indexed = indexed
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
'Configure property, connecting it to its model.\n\n Configure the property so that it knows its property name and what class\n it belongs to.\n\n Args:\n model_class: Model class which Property will belong to.\n property_name: Name of property within Model instance to store property\n values in. By default this will be the property name preceded by\n an underscore, but may change for different subclasses.\n '
self.model_class = model_class
if (self.name is None):
self.name = property_name
def __get__(self, model_instance, model_class):
'Returns the value for this property on the given model instance.\n\n See http://docs.python.org/ref/descriptors.html for a description of\n the arguments to this class and what they mean.'
if (model_instance is None):
return self
try:
return (model_instance, ())
except AttributeError:
return None
def __set__(self, model_instance, value):
'Sets the value for this property on the given model instance.\n\n See http://docs.python.org/ref/descriptors.html for a description of\n the arguments to this class and what they mean.\n '
value = (value)
(model_instance, (), value)
def default_value(self):
'Default value for unassigned values.\n\n Returns:\n Default value as provided by __init__(default).\n '
return self.default
def validate(self, value):
'Assert that provided value is compatible with this property.\n\n Args:\n value: Value to validate against this Property.\n\n Returns:\n A valid value, either the input unchanged or adapted to the\n required type.\n\n Raises:\n BadValueError if the value is not appropriate for this\n property in any way.\n '
if (value):
if self.required:
raise (('Property %s is required' % self.name))
elif self.choices:
if (value not in self.choices):
raise (('Property %s is %r; must be one of %r' % (self.name, value, self.choices)))
if (self.validator is not None):
(value)
return value
def empty(self, value):
'Determine if value is empty in the context of this property.\n\n For most kinds, this is equivalent to "not value", but for kinds like\n bool, the test is more subtle, so subclasses can override this method\n if necessary.\n\n Args:\n value: Value to validate against this Property.\n\n Returns:\n True if this value is considered empty in the context of this Property\n type, otherwise False.\n '
return (not value)
def get_value_for_datastore(self, model_instance):
'Datastore representation of this property.\n\n Looks for this property in the given model instance, and returns the proper\n datastore representation of the value that can be stored in a datastore\n entity. Most critically, it will fetch the datastore key value for\n reference properties.\n\n Some properies (e.g. DateTimeProperty, UserProperty) optionally update their\n value on every put(). This call must return the current value for such\n properties (get_updated_value_for_datastore returns the new value).\n\n Args:\n model_instance: Instance to fetch datastore value from.\n\n Returns:\n Datastore representation of the model value in a form that is\n appropriate for storing in the datastore.\n '
return (model_instance, model_instance.__class__)
def get_updated_value_for_datastore(self, model_instance):
'Determine new value for auto-updated property.\n\n Some properies (e.g. DateTimeProperty, UserProperty) optionally update their\n value on every put(). This call must return the new desired value for such\n properties. For all other properties, this call must return\n AUTO_UPDATE_UNCHANGED.\n\n Args:\n model_instance: Instance to get new value for.\n\n Returns:\n Datastore representation of the new model value in a form that is\n appropriate for storing in the datastore, or AUTO_UPDATE_UNCHANGED.\n '
return AUTO_UPDATE_UNCHANGED
def make_value_from_datastore_index_value(self, index_value):
value = (index_value, self.data_type)
return (value)
def make_value_from_datastore(self, value):
'Native representation of this property.\n\n Given a value retrieved from a datastore entity, return a value,\n possibly converted, to be stored on the model instance. Usually\n this returns the value unchanged, but a property class may\n override this when it uses a different datatype on the model\n instance than on the entity.\n\n This API is not quite symmetric with get_value_for_datastore(),\n because the model instance on which to store the converted value\n may not exist yet -- we may be collecting values to be passed to a\n model constructor.\n\n Args:\n value: value retrieved from the datastore entity.\n\n Returns:\n The value converted for use as a model instance attribute.\n '
return value
def _require_parameter(self, kwds, parameter, value):
'Sets kwds[parameter] to value.\n\n If kwds[parameter] exists and is not value, raises ConfigurationError.\n\n Args:\n kwds: The parameter dict, which maps parameter names (strings) to values.\n parameter: The name of the parameter to set.\n value: The value to set it to.\n '
if ((parameter in kwds) and (kwds[parameter] != value)):
raise (('%s must be %s.' % (parameter, value)))
kwds[parameter] = value
def _attr_name(self):
'Attribute name we use for this property in model instances.\n\n DO NOT USE THIS METHOD.\n '
return ('_' + self.name)
data_type = str
def datastore_type(self):
'Deprecated backwards-compatible accessor method for self.data_type.'
return self.data_type
class Index(datastore._BaseIndex):
'A datastore index.'
id = datastore._BaseIndex._Id
kind = datastore._BaseIndex._Kind
has_ancestor = datastore._BaseIndex._HasAncestor
properties = datastore._BaseIndex._Properties
class Model(object, metaclass=PropertiedClass):
'Model is the superclass of all object entities in the datastore.\n\n The programming model is to declare Python subclasses of the Model class,\n declaring datastore properties as class members of that class. So if you want\n to publish a story with title, body, and created date, you would do it like\n this:\n\n class Story(db.Model):\n title = db.StringProperty()\n body = db.TextProperty()\n created = db.DateTimeProperty(auto_now_add=True)\n\n A model instance can have a single parent. Model instances without any\n parent are root entities. It is possible to efficiently query for\n instances by their shared parent. All descendents of a single root\n instance also behave as a transaction group. This means that when you\n work one member of the group within a transaction all descendents of that\n root join the transaction. All operations within a transaction on this\n group are ACID.\n '
def __new__(*args, **unused_kwds):
"Allow subclasses to call __new__() with arguments.\n\n Do NOT list 'cls' as the first argument, or in the case when\n the 'unused_kwds' dictionary contains the key 'cls', the function\n will complain about multiple argument values for 'cls'.\n\n Raises:\n TypeError if there are no positional arguments.\n "
if args:
cls = args[0]
else:
raise ('object.__new__(): not enough arguments')
return (cls)
def __init__(self, parent=None, key_name=None, _app=None, _from_entity=False, **kwds):
"Creates a new instance of this model.\n\n To create a new entity, you instantiate a model and then call put(),\n which saves the entity to the datastore:\n\n person = Person()\n person.name = 'Bret'\n person.put()\n\n You can initialize properties in the model in the constructor with keyword\n arguments:\n\n person = Person(name='Bret')\n\n We initialize all other properties to the default value (as defined by the\n properties in the model definition) if they are not provided in the\n constructor.\n\n Args:\n parent: Parent instance for this instance or None, indicating a top-\n level instance.\n key_name: Name for new model instance.\n _from_entity: Intentionally undocumented.\n kwds: Keyword arguments mapping to properties of model. Also:\n key: Key instance for this instance, if provided makes parent and\n key_name redundant (they do not need to be set but if they are\n they must match the key).\n "
namespace = None
if (_app, tuple):
if ((_app) != 2):
raise ('_app must have 2 values if type is tuple.')
(_app, namespace) = _app
key = ('key', None)
if (key is not None):
if (key, (tuple, list)):
key = (*key)
if (key, str):
key = ()
if (not (key, Key)):
raise (('Expected Key type; received %s (is %s)' % (key, key.__class__.__name__)))
if (not ()):
raise ('Key must have an id or name')
if (() != ()):
raise (('Expected Key kind to be %s; received %s' % ((), ())))
if ((_app is not None) and (() != _app)):
raise (('Expected Key app to be %s; received %s' % (_app, ())))
if ((namespace is not None) and (() != namespace)):
raise (('Expected Key namespace to be %s; received %s' % (namespace, ())))
if (key_name and (key_name != ())):
raise ('Cannot use key and key_name at the same time with different values')
if (parent and (parent != ())):
raise ('Cannot use key and parent at the same time with different values')
namespace = ()
self._key = key
self._key_name = None
self._parent = None
self._parent_key = None
else:
if (key_name == ''):
raise ('Name cannot be empty.')
elif ((key_name is not None) and (not (key_name, str))):
raise (('Name must be string type, not %s' % key_name.__class__.__name__))
if (parent is not None):
if (not (parent, (Model, Key))):
raise (('Expected Model type; received %s (is %s)' % (parent, parent.__class__.__name__)))
if ((parent, Model) and (not ())):
raise (('%s instance must have a complete key before it can be used as a parent.' % ()))
if (parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = ()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._key_name = key_name
self._key = None
if (self._parent_key is not None):
if ((namespace is not None) and (() != namespace)):
raise (('Expected parent namespace to be %r; received %r' % (namespace, ())))
namespace = ()
self._entity = None
if ((_app is not None) and (_app, Key)):
raise (("_app should be a string; received Key('%s'):\n This may be the result of passing 'key' as a positional parameter in SDK 1.2.6. Please only pass 'key' as a keyword parameter." % _app))
if (namespace is None):
namespace = ()
self._app = _app
self.__namespace = namespace
is_projection = False
if ((_from_entity, datastore.Entity) and ()):
self._entity = _from_entity
is_projection = ()
del self._key_name
del self._key
for prop in (()):
if (prop.name in kwds):
value = kwds[prop.name]
elif is_projection:
continue
else:
value = ()
try:
(self, value)
except DerivedPropertyError:
if ((prop.name in kwds) and (not _from_entity)):
raise
def key(self):
'Unique key for this entity.\n\n This property is only available if this entity is already stored in the\n datastore or if it has a full key, so it is available if this entity was\n fetched returned from a query, or after put() is called the first time\n for new entities, or if a complete key was given when constructed.\n\n Returns:\n Datastore key of persisted entity.\n\n Raises:\n NotSavedError when entity is not persistent.\n '
if ():
return ()
elif self._key:
return self._key
elif self._key_name:
parent = (self._parent_key or (self._parent and ()))
self._key = ((), self._key_name)
return self._key
else:
raise ()
def __set_property(self, entity, name, datastore_value):
if (datastore_value == []):
(name, None)
else:
entity[name] = datastore_value
def _to_entity(self, entity):
'Copies information from this model to provided entity.\n\n Args:\n entity: Entity to save information on.\n '
for prop in (()):
(entity, prop.name, (self))
set_unindexed_properties = (entity, 'set_unindexed_properties', None)
if set_unindexed_properties:
(self._unindexed_properties)
def _populate_internal_entity(self, _entity_class=datastore.Entity):
'Populates self._entity, saving its state to the datastore.\n\n After this method is called, calling is_saved() will return True.\n\n Returns:\n Populated self._entity\n '
self._entity = ()
for prop in (()):
new_value = (self)
if (new_value is not AUTO_UPDATE_UNCHANGED):
(self._entity, prop.name, new_value)
for attr in ('_key_name', '_key'):
try:
(self, attr)
except AttributeError:
raise
return self._entity
def put(self, **kwargs):
'Writes this model instance to the datastore.\n\n If this instance is new, we add an entity to the datastore.\n Otherwise, we update this instance, and the key will remain the\n same.\n\n Args:\n config: datastore_rpc.Configuration to use for this request.\n\n Returns:\n The key of the instance (either the existing key or a new key).\n\n Raises:\n TransactionFailedError if the data could not be committed.\n '
()
return (self._entity)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
'Internal helper -- Populate self._entity or create a new one\n if that one does not exist. Does not change any state of the instance\n other than the internal state of the entity.\n\n This method is separate from _populate_internal_entity so that it is\n possible to call to_xml without changing the state of an unsaved entity\n to saved.\n\n Returns:\n self._entity or a new Entity which is not stored on the instance.\n '
if ():
entity = self._entity
else:
kwds = {'_app': self._app, 'namespace': self.__namespace, 'unindexed_properties': self._unindexed_properties}
if (self._key is not None):
if ():
kwds['id'] = ()
else:
kwds['name'] = ()
if ():
kwds['parent'] = ()
else:
if (self._key_name is not None):
kwds['name'] = self._key_name
if (self._parent_key is not None):
kwds['parent'] = self._parent_key
elif (self._parent is not None):
kwds['parent'] = self._parent._entity
entity = (())
(entity)
return entity
def delete(self, **kwargs):
'Deletes this entity from the datastore.\n\n Args:\n config: datastore_rpc.Configuration to use for this request.\n\n Raises:\n TransactionFailedError if the data could not be committed.\n '
(())
self._key = ()
self._key_name = None
self._parent_key = None
self._entity = None
def is_saved(self):
'Determine if entity is persisted in the datastore.\n\n New instances of Model do not start out saved in the data. Objects which\n are saved to or loaded from the Datastore will have a True saved state.\n\n Returns:\n True if object has been persisted to the datastore, otherwise False.\n '
return (self._entity is not None)
def has_key(self):
'Determine if this model instance has a complete key.\n\n When not using a fully self-assigned Key, ids are not assigned until the\n data is saved to the Datastore, but instances with a key name always have\n a full key.\n\n Returns:\n True if the object has been persisted to the datastore or has a key\n or has a key_name, otherwise False.\n '
return (() or self._key or self._key_name)
def dynamic_properties(self):
'Returns a list of all dynamic properties defined for instance.'
return []
def instance_properties(self):
'Alias for dyanmic_properties.'
return ()
def parent(self):
'Get the parent of the model instance.\n\n Returns:\n Parent of contained entity or parent provided in constructor, None if\n instance has no parent.\n '
if (self._parent is None):
parent_key = ()
if (parent_key is not None):
self._parent = (parent_key)
return self._parent
def parent_key(self):
"Get the parent's key.\n\n This method is useful for avoiding a potential fetch from the datastore\n but still get information about the instances parent.\n\n Returns:\n Parent key of entity, None if there is no parent.\n "
if (self._parent_key is not None):
return self._parent_key
elif (self._parent is not None):
return ()
elif (self._entity is not None):
return ()
elif (self._key is not None):
return ()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
'Generate an XML representation of this model instance.\n\n atom and gd:namespace properties are converted to XML according to their\n respective schemas. For more information, see:\n\n http://www.atomenabled.org/developers/syndication/\n http://code.google.com/apis/gdata/common-elements.html\n '
entity = (_entity_class)
return ()
@classmethod
def get(cls, keys, **kwargs):
"Fetch instance from the datastore of a specific Model type using key.\n\n We support Key objects and string keys (we convert them to Key objects\n automatically).\n\n Useful for ensuring that specific instance types are retrieved from the\n datastore. It also helps that the source code clearly indicates what\n kind of object is being retreived. Example:\n\n story = Story.get(story_key)\n\n Args:\n keys: Key within datastore entity collection to find; or string key;\n or list of Keys or string keys.\n config: datastore_rpc.Configuration to use for this request.\n\n Returns:\n If a single key was given: a Model instance associated with key\n for the provided class if it exists in the datastore, otherwise\n None. If a list of keys was given: a list where list[i] is the\n Model instance for keys[i], or None if no instance exists.\n\n Raises:\n KindError if any of the retreived objects are not instances of the\n type associated with call to 'get'.\n "
results = (keys)
if (results is None):
return None
if (results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if (not ((instance is None) or (instance, cls))):
raise (('Kind %r is not a subclass of kind %r' % ((), ())))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None, **kwargs):
"Get instance of Model class by its key's name.\n\n Args:\n key_names: A single key-name or a list of key-names.\n parent: Parent of instances to get. Can be a model or key.\n config: datastore_rpc.Configuration to use for this request.\n "
try:
parent = (parent)
except BadKeyError as e:
raise ((e))
(key_names, multiple) = (key_names, str)
keys = [((), name) for name in key_names]
if multiple:
return (keys)
else:
return (keys[0])
@classmethod
def get_by_id(cls, ids, parent=None, **kwargs):
'Get instance of Model class by id.\n\n Args:\n key_names: A single id or a list of ids.\n parent: Parent of instances to get. Can be a model or key.\n config: datastore_rpc.Configuration to use for this request.\n '
if (parent, Model):
parent = ()
(ids, multiple) = (ids, (int, int))
keys = [((), id) for id in ids]
if multiple:
return (keys)
else:
return (keys[0])
@classmethod
def get_or_insert(cls, key_name, **kwds):
"Transactionally retrieve or create an instance of Model class.\n\n This acts much like the Python dictionary setdefault() method, where we\n first try to retrieve a Model instance with the given key name and parent.\n If it's not present, then we create a new instance (using the *kwds\n supplied) and insert that with the supplied key name.\n\n Subsequent calls to this method with the same key_name and parent will\n always yield the same entity (though not the same actual object instance),\n regardless of the *kwds supplied. If the specified entity has somehow\n been deleted separately, then the next call will create a new entity and\n return it.\n\n If the 'parent' keyword argument is supplied, it must be a Model instance.\n It will be used as the parent of the new instance of this Model class if\n one is created.\n\n This method is especially useful for having just one unique entity for\n a specific identifier. Insertion/retrieval is done transactionally, which\n guarantees uniqueness.\n\n Example usage:\n\n class WikiTopic(db.Model):\n creation_date = db.DatetimeProperty(auto_now_add=True)\n body = db.TextProperty(required=True)\n\n # The first time through we'll create the new topic.\n wiki_word = 'CommonIdioms'\n topic = WikiTopic.get_or_insert(wiki_word,\n body='This topic is totally new!')\n assert topic.key().name() == 'CommonIdioms'\n assert topic.body == 'This topic is totally new!'\n\n # The second time through will just retrieve the entity.\n overwrite_topic = WikiTopic.get_or_insert(wiki_word,\n body='A totally different message!')\n assert topic.key().name() == 'CommonIdioms'\n assert topic.body == 'This topic is totally new!'\n\n Args:\n key_name: Key name to retrieve or create.\n **kwds: Keyword arguments to pass to the constructor of the model class\n if an instance for the specified key name does not already exist. If\n an instance with the supplied key_name and parent already exists, the\n rest of these arguments will be discarded.\n\n Returns:\n Existing instance of Model class with the specified key_name and parent\n or a new one that has just been created.\n\n Raises:\n TransactionFailedError if the specified Model instance could not be\n retrieved or created transactionally (due to high contention, etc).\n "
def txn():
entity = (key_name)
if (entity is None):
entity = ()
()
return entity
return (txn)
@classmethod
def all(cls, **kwds):
'Returns a query over all instances of this model from the datastore.\n\n Returns:\n Query that will retrieve all instances from entity collection.\n '
return (cls)
@classmethod
def gql(cls, query_string, *args, **kwds):
"Returns a query using GQL query string.\n\n See appengine/ext/gql for more information about GQL.\n\n Args:\n query_string: properly formatted GQL query string with the\n 'SELECT * FROM <entity>' part omitted\n *args: rest of the positional arguments used to bind numeric references\n in the query.\n **kwds: dictionary-based arguments (for named parameters).\n "
return (('SELECT * FROM %s %s' % ((), query_string)), *args)
@classmethod
def _load_entity_values(cls, entity):
'Load dynamic properties from entity.\n\n Loads attributes which are not defined as part of the entity in\n to the model instance.\n\n Args:\n entity: Entity which contain values to search dyanmic properties for.\n '
entity_values = {}
for prop in (()):
if (prop.name in entity):
try:
value = entity[prop.name]
except KeyError:
entity_values[prop.name] = []
else:
if ():
value = (value)
else:
value = (value)
entity_values[prop.name] = value
return entity_values
@classmethod
def from_entity(cls, entity):
'Converts the entity representation of this model to an instance.\n\n Converts datastore.Entity instance to an instance of cls.\n\n Args:\n entity: Entity loaded directly from datastore.\n\n Raises:\n KindError when cls is incorrect model for entity.\n '
if (() != ()):
raise (("Class %s cannot handle kind '%s'" % ((cls), ())))
entity_values = (entity)
if ():
entity_values['key'] = ()
return (None)
@classmethod
def kind(cls):
'Returns the datastore kind we use for this model.\n\n We just use the name of the model for now, ignoring potential collisions.\n '
return cls.__name__
@classmethod
def entity_type(cls):
'Soon to be removed alias for kind.'
return ()
@classmethod
def properties(cls):
'Returns a dictionary of all the properties defined for this model.'
return (cls._properties)
@classmethod
def fields(cls):
'Soon to be removed alias for properties.'
return ()
def create_rpc(deadline=None, callback=None, read_policy=STRONG_CONSISTENCY):
"Create an rpc for use in configuring datastore calls.\n\n NOTE: This functions exists for backwards compatibility. Please use\n create_config() instead. NOTE: the latter uses 'on_completion',\n which is a function taking an argument, wherease create_rpc uses\n 'callback' which is a function without arguments.\n\n Args:\n deadline: float, deadline for calls in seconds.\n callback: callable, a callback triggered when this rpc completes,\n accepts one argument: the returned rpc.\n read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually\n consistent reads\n\n Returns:\n A datastore.DatastoreRPC instance.\n "
return ()
def get_async(keys, **kwargs):
'Asynchronously fetch the specified Model instance(s) from the datastore.\n\n Identical to db.get() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
(keys, multiple) = (keys)
def extra_hook(entities):
if ((not multiple) and (not entities)):
return None
models = []
for entity in entities:
if (entity is None):
model = None
else:
cls1 = (())
model = (entity)
(model)
if multiple:
return models
if (not ((models) == 1)):
raise ()
return models[0]
return (keys)
def get(keys, **kwargs):
'Fetch the specific Model instance with the given key from the datastore.\n\n We support Key objects and string keys (we convert them to Key objects\n automatically).\n\n Args:\n keys: Key within datastore entity collection to find; or string key;\n or list of Keys or string keys.\n config: datastore_rpc.Configuration to use for this request, must be\n specified as a keyword argument.\n\n Returns:\n If a single key was given: a Model instance associated with key\n if it exists in the datastore, otherwise None. If a list of keys was\n given: a list where list[i] is the Model instance for keys[i], or\n None if no instance exists.\n '
return ()
def put_async(models, **kwargs):
'Asynchronously store one or more Model instances.\n\n Identical to db.put() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
(models, multiple) = (models, Model)
entities = [() for model in models]
def extra_hook(keys):
if multiple:
return keys
if (not ((keys) == 1)):
raise ()
return keys[0]
return (entities)
def put(models, **kwargs):
'Store one or more Model instances.\n\n Args:\n models: Model instance or list of Model instances.\n config: datastore_rpc.Configuration to use for this request, must be\n specified as a keyword argument.\n\n Returns:\n A Key if models is an instance, a list of Keys in the same order\n as models if models is a list.\n\n Raises:\n TransactionFailedError if the data could not be committed.\n '
return ()
save = put
def delete_async(models, **kwargs):
'Asynchronous version of delete one or more Model instances.\n\n Identical to db.delete() except returns an asynchronous object. Call\n get_result() on the return value to block on the call.\n '
if (models, (str, Model, Key)):
models = [models]
else:
try:
models = (models)
except TypeError:
models = [models]
keys = [(v) for v in models]
return (keys)
def delete(models, **kwargs):
'Delete one or more Model instances.\n\n Args:\n models: Model instance, key, key string or iterable thereof.\n config: datastore_rpc.Configuration to use for this request, must be\n specified as a keyword argument.\n\n Raises:\n TransactionFailedError if the data could not be committed.\n '
()
def allocate_ids_async(model, size, **kwargs):
'Asynchronously allocates a range of IDs.\n\n Identical to allocate_ids() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and return the result.\n '
return ((model))
def allocate_ids(model, size, **kwargs):
'Allocates a range of IDs of size for the model_key defined by model.\n\n Allocates a range of IDs in the datastore such that those IDs will not\n be automatically assigned to new entities. You can only allocate IDs\n for model keys from your app. If there is an error, raises a subclass of\n datastore_errors.Error.\n\n Args:\n model: Model instance, Key or string to serve as a template specifying the\n ID sequence in which to allocate IDs. Returned ids should only be used\n in entities with the same parent (if any) and kind as this key.\n size: Number of IDs to allocate.\n config: datastore_rpc.Configuration to use for this request.\n\n Returns:\n (start, end) of the allocated range, inclusive.\n '
return ()
def allocate_id_range(model, start, end, **kwargs):
"Allocates a range of IDs with specific endpoints.\n\n Once these IDs have been allocated they may be provided manually to\n newly created entities.\n\n Since the datastore's automatic ID allocator will never assign\n a key to a new entity that will cause an existing entity to be\n overwritten, entities written to the given key range will never be\n overwritten. However, writing entities with manually assigned keys in this\n range may overwrite existing entities (or new entities written by a\n separate request) depending on the key range state returned.\n\n This method should only be used if you have an existing numeric id\n range that you want to reserve, e.g. bulk loading entities that already\n have IDs. If you don't care about which IDs you receive, use allocate_ids\n instead.\n\n Args:\n model: Model instance, Key or string to serve as a template specifying the\n ID sequence in which to allocate IDs. Allocated ids should only be used\n in entities with the same parent (if any) and kind as this key.\n start: first id of the range to allocate, inclusive.\n end: last id of the range to allocate, inclusive.\n config: datastore_rpc.Configuration to use for this request.\n\n Returns:\n One of (KEY_RANGE_EMPTY, KEY_RANGE_CONTENTION, KEY_RANGE_COLLISION). If not\n KEY_RANGE_EMPTY, this represents a potential issue with using the allocated\n key range.\n "
key = (model)
((start, end), (int, int))
if ((start < 1) or (end < 1)):
raise (('Start %d and end %d must both be > 0.' % (start, end)))
if (start > end):
raise (('Range end %d cannot be less than start %d.' % (end, start)))
(safe_start, _) = (key)
race_condition = (safe_start > start)
start_key = ((), start)
end_key = ((), end)
collision = (1)
if collision:
return KEY_RANGE_COLLISION
elif race_condition:
return KEY_RANGE_CONTENTION
else:
return KEY_RANGE_EMPTY
def _index_converter(index):
return ((), (), (), ())
def get_indexes_async(**kwargs):
'Asynchronously retrieves the application indexes and their states.\n\n Identical to get_indexes() except returns an asynchronous object. Call\n get_result() on the return value to block on the call and get the results.\n '
def extra_hook(indexes):
return [((index), state) for (index, state) in indexes]
return ()
def get_indexes(**kwargs):
'Retrieves the application indexes and their states.\n\n Args:\n config: datastore_rpc.Configuration to use for this request, must be\n specified as a keyword argument.\n\n Returns:\n A list of (Index, Index.[BUILDING|SERVING|DELETING|ERROR]) tuples.\n An index can be in the following states:\n Index.BUILDING: Index is being built and therefore can not serve queries\n Index.SERVING: Index is ready to service queries\n Index.DELETING: Index is being deleted\n Index.ERROR: Index encounted an error in the BUILDING state\n '
return ()
class Expando(Model):
"Dynamically expandable model.\n\n An Expando does not require (but can still benefit from) the definition\n of any properties before it can be used to store information in the\n datastore. Properties can be added to an expando object by simply\n performing an assignment. The assignment of properties is done on\n an instance by instance basis, so it is possible for one object of an\n expando type to have different properties from another or even the same\n properties with different types. It is still possible to define\n properties on an expando, allowing those properties to behave the same\n as on any other model.\n\n Example:\n import datetime\n\n class Song(db.Expando):\n title = db.StringProperty()\n\n crazy = Song(title='Crazy like a diamond',\n author='Lucy Sky',\n publish_date='yesterday',\n rating=5.0)\n\n hoboken = Song(title='The man from Hoboken',\n author=['Anthony', 'Lou'],\n publish_date=datetime.datetime(1977, 5, 3))\n\n crazy.last_minute_note=db.Text('Get a train to the station.')\n\n Possible Uses:\n\n One use of an expando is to create an object without any specific\n structure and later, when your application mature and it in the right\n state, change it to a normal model object and define explicit properties.\n\n Additional exceptions for expando:\n\n Protected attributes (ones whose names begin with '_') cannot be used\n as dynamic properties. These are names that are reserved for protected\n transient (non-persisted) attributes.\n\n Order of lookup:\n\n When trying to set or access an attribute value, any other defined\n properties, such as methods and other values in __dict__ take precedence\n over values in the datastore.\n\n 1 - Because it is not possible for the datastore to know what kind of\n property to store on an undefined expando value, setting a property to\n None is the same as deleting it from the expando.\n\n 2 - Persistent variables on Expando must not begin with '_'. These\n variables considered to be 'protected' in Python, and are used\n internally.\n\n 3 - Expando's dynamic properties are not able to store empty lists.\n Attempting to assign an empty list to a dynamic property will raise\n ValueError. Static properties on Expando can still support empty\n lists but like normal Model properties is restricted from using\n None.\n "
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
'Creates a new instance of this expando model.\n\n Args:\n parent: Parent instance for this instance or None, indicating a top-\n level instance.\n key_name: Name for new model instance.\n _app: Intentionally undocumented.\n args: Keyword arguments mapping to properties of model.\n '
(parent, key_name, _app)
self._dynamic_properties = {}
for (prop, value) in ():
if ((prop not in self._all_properties) and (prop != 'key')):
if (not (((self), prop, None), '__set__')):
(self, prop, value)
else:
(prop)
def __setattr__(self, key, value):
'Dynamically set field values that are not defined.\n\n Tries to set the value on the object normally, but failing that\n sets the value on the contained entity.\n\n Args:\n key: Name of attribute.\n value: Value to set for attribute. Must be compatible with\n datastore.\n\n Raises:\n ValueError on attempt to assign empty list.\n '
(key)
if ((key[:1] != '_') and (not (((self), key, None), '__set__'))):
if (value == []):
raise (('Cannot store empty list to dynamic property %s' % key))
if ((value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES):
raise (("Expando cannot accept values of type '%s'." % (value).__name__))
if (self._dynamic_properties is None):
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
(key, value)
def __getattribute__(self, key):
'Get attribute from expando.\n\n Must be overridden to allow dynamic properties to obscure class attributes.\n Since all attributes are stored in self._dynamic_properties, the normal\n __getattribute__ does not attempt to access it until __setattr__ is called.\n By then, the static attribute being overwritten has already been located\n and returned from the call.\n\n This method short circuits the usual __getattribute__ call when finding a\n dynamic property and returns it to the user via __getattr__. __getattr__\n is called to preserve backward compatibility with older Expando models\n that may have overridden the original __getattr__.\n\n NOTE: Access to properties defined by Python descriptors are not obscured\n because setting those attributes are done through the descriptor and does\n not place those attributes in self._dynamic_properties.\n '
if (not ('_')):
dynamic_properties = self._dynamic_properties
if ((dynamic_properties is not None) and (key in dynamic_properties)):
return (key)
return (key)
def __getattr__(self, key):
'If no explicit attribute defined, retrieve value from entity.\n\n Tries to get the value on the object normally, but failing that\n retrieves value from contained entity.\n\n Args:\n key: Name of attribute.\n\n Raises:\n AttributeError when there is no attribute for key on object or\n contained entity.\n '
_dynamic_properties = self._dynamic_properties
if ((_dynamic_properties is not None) and (key in _dynamic_properties)):
return _dynamic_properties[key]
else:
return ((Expando, self), key)
def __delattr__(self, key):
'Remove attribute from expando.\n\n Expando is not like normal entities in that undefined fields\n can be removed.\n\n Args:\n key: Dynamic property to be deleted.\n '
if (self._dynamic_properties and (key in self._dynamic_properties)):
del self._dynamic_properties[key]
else:
(self, key)
def dynamic_properties(self):
'Determine which properties are particular to instance of entity.\n\n Returns:\n Set of names which correspond only to the dynamic properties.\n '
if (self._dynamic_properties is None):
return []
return (())
def _to_entity(self, entity):
'Store to entity, deleting dynamic properties that no longer exist.\n\n When the expando is saved, it is possible that a given property no longer\n exists. In this case, the property will be removed from the saved instance.\n\n Args:\n entity: Entity which will receive dynamic properties.\n '
(entity)
if (self._dynamic_properties is None):
self._dynamic_properties = {}
for (key, value) in ():
entity[key] = value
all_properties = (())
(self._all_properties)
for key in (()):
if (key not in all_properties):
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"Load dynamic properties from entity.\n\n Expando needs to do a second pass to add the entity values which were\n ignored by Model because they didn't have an corresponding predefined\n property on the model.\n\n Args:\n entity: Entity which contain values to search dyanmic properties for.\n "
entity_values = (entity)
for (key, value) in ():
if (key not in entity_values):
entity_values[(key)] = value
return entity_values
class _BaseQuery(object):
'Base class for both Query and GqlQuery.'
_last_raw_query = None
_last_index_list = None
_cursor = None
_end_cursor = None
def __init__(self, model_class=None):
'Constructor.\n\n Args:\n model_class: Model class from which entities are constructed.\n keys_only: Whether the query should return full entities or only keys.\n compile: Whether the query should also return a compiled query.\n cursor: A compiled query from which to resume.\n namespace: The namespace to query.\n '
self._model_class = model_class
def is_keys_only(self):
'Returns whether this query is keys only.\n\n Returns:\n True if this query returns keys, False if it returns entities.\n '
raise NotImplementedError
def projection(self):
'Returns the tuple of properties in the projection or None.\n\n Projected results differ from normal results in multiple ways:\n - they only contain a portion of the original entity and cannot be put;\n - properties defined on the model, but not included in the projections will\n have a value of None, even if the property is required or has a default\n value;\n - multi-valued properties (such as a ListProperty) will only contain a single\n value.\n - dynamic properties not included in the projection will not appear\n on the model instance.\n - dynamic properties included in the projection are deserialized into\n their indexed type. Specifically one of str, bool, long, float, GeoPt, Key\n or User. If the original type is known, it can be restored using\n datastore_types.RestoreFromIndexValue.\n\n However, projection queries are significantly faster than normal queries.\n\n Projection queries on entities with multi-valued properties will return the\n same entity multiple times, once for each unique combination of values for\n properties included in the order, an inequaly property, or the projected\n properties.\n\n Returns:\n The list of properties in the projection, or None if no projection is\n set on this query.\n '
raise NotImplementedError
def is_distinct(self):
'Returns true if the projection query should be distinct.\n\n This is equivalent to the SQL syntax: SELECT DISTINCT. It is only available\n for projection queries, it is not valid to specify distinct without also\n specifying projection properties.\n\n Distinct projection queries on entities with multi-valued properties will\n return the same entity multiple times, once for each unique combination of\n properties included in the projection.\n\n Returns:\n True if this projection query is distinct.\n '
raise NotImplementedError
def _get_query(self):
'Subclass must override (and not call their super method).\n\n Returns:\n A datastore.Query instance representing the query.\n '
raise NotImplementedError
def run(self, **kwargs):
"Iterator for this query.\n\n If you know the number of results you need, use run(limit=...) instead,\n or use a GQL query with a LIMIT clause. It's more efficient. If you want\n all results use run(batch_size=<large number>).\n\n Args:\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n Iterator for this query.\n "
raw_query = ()
iterator = ()
self._last_raw_query = raw_query
keys_only = ('keys_only')
if (keys_only is None):
keys_only = ()
if keys_only:
return iterator
else:
return (self._model_class, (iterator))
def __iter__(self):
"Iterator for this query.\n\n If you know the number of results you need, consider fetch() instead,\n or use a GQL query with a LIMIT clause. It's more efficient.\n "
return ()
def __getstate__(self):
state = ()
state['_last_raw_query'] = None
return state
def get(self, **kwargs):
'Get first result from this.\n\n Beware: get() ignores the LIMIT clause on GQL queries.\n\n Args:\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n First result from running the query if there are any, else None.\n '
results = ()
try:
return (results)
except StopIteration:
return None
def count(self, limit=1000, **kwargs):
'Number of entities this query fetches.\n\n Beware: count() ignores the LIMIT clause on GQL queries.\n\n Args:\n limit: A number. If there are more results than this, stop short and\n just return this number. Providing this argument makes the count\n operation more efficient.\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n Number of entities this query fetches.\n '
raw_query = ()
result = ()
self._last_raw_query = raw_query
return result
def fetch(self, limit, offset=0, **kwargs):
"Return a list of items selected using SQL-like limit and offset.\n\n Always use run(limit=...) instead of fetch() when iterating over a query.\n\n Beware: offset must read and discard all skipped entities. Use\n cursor()/with_cursor() instead.\n\n Args:\n limit: Maximum number of results to return.\n offset: Optional number of results to skip first; default zero.\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n A list of db.Model instances. There may be fewer than 'limit'\n results if there aren't enough results to satisfy the request.\n "
if (limit is None):
('batch_size', datastore._MAX_INT_32)
return (())
def index_list(self):
'Get the index list for an already executed query.\n\n Returns:\n A list of indexes used by the query.\n\n Raises:\n AssertionError: If the query has not been executed.\n '
if (self._last_raw_query is None):
raise ('No index list because query has not been run.')
if (self._last_index_list is None):
raw_index_list = ()
self._last_index_list = [(raw_index) for raw_index in raw_index_list]
return self._last_index_list
def cursor(self):
'Get a serialized cursor for an already executed query.\n\n The returned cursor effectively lets a future invocation of a similar\n query to begin fetching results immediately after the last returned\n result from this query invocation.\n\n Returns:\n A base64-encoded serialized cursor.\n\n Raises:\n AssertionError: If the query has not been executed.\n '
if (self._last_raw_query is None):
raise ('No cursor available.')
cursor = ()
return (cursor)
def with_cursor(self, start_cursor=None, end_cursor=None):
'Set the start and end of this query using serialized cursors.\n\n Conceptually cursors point to the position between the last result returned\n and the next result so running a query with each of the following cursors\n combinations will return all results in four chunks with no duplicate\n results:\n\n query.with_cursor(end_cursor=cursor1)\n query.with_cursors(cursor1, cursor2)\n query.with_cursors(cursor2, cursor3)\n query.with_cursors(start_cursor=cursor3)\n\n For example if the cursors pointed to:\n cursor: 1 2 3\n result: a b c d e f g h\n\n The results returned by these queries would be [a, b], [c, d], [e, f],\n [g, h] respectively.\n\n Cursors are pinned to the position just after the previous result (last\n result, exclusive), so if results are inserted or deleted between the time\n the cursor was made and these queries are executed, the cursors stay pinned\n to these positions. For example:\n\n delete(b, f, g, h)\n put(a1, b1, c1, d1)\n cursor: 1(b) 2(d) 3(f)\n result: a a1 b1 c c1 d d1 e\n\n The results returned by these queries would now be: [a, a1], [b1, c, c1, d],\n [d1, e], [] respectively.\n\n Args:\n start_cursor: The cursor position at which to start or None\n end_cursor: The cursor position at which to end or None\n\n Returns:\n This Query instance, for chaining.\n\n Raises:\n BadValueError when cursor is not valid.\n '
if (start_cursor is None):
self._cursor = None
else:
self._cursor = (start_cursor)
if (end_cursor is None):
self._end_cursor = None
else:
self._end_cursor = (end_cursor)
return self
def __getitem__(self, arg):
'Support for query[index] and query[start:stop].\n\n Beware: this ignores the LIMIT clause on GQL queries.\n\n Args:\n arg: Either a single integer, corresponding to the query[index]\n syntax, or a Python slice object, corresponding to the\n query[start:stop] or query[start:stop:step] syntax.\n\n Returns:\n A single Model instance when the argument is a single integer.\n A list of Model instances when the argument is a slice.\n '
if (arg, slice):
(start, stop, step) = (arg.start, arg.stop, arg.step)
if (start is None):
start = 0
if (stop is None):
raise ('Open-ended slices are not supported')
if (step is None):
step = 1
if ((start < 0) or (stop < 0) or (step != 1)):
raise ('Only slices with start>=0, stop>=0, step==1 are supported')
limit = (stop - start)
if (limit < 0):
return []
return (limit, start)
elif (arg, int):
if (arg < 0):
raise ('Only indices >= 0 are supported')
results = (1, arg)
if results:
return results[0]
else:
raise (('The query returned fewer than %d results' % (arg + 1)))
else:
raise ('Only integer indices and slices are supported')
class _QueryIterator(object):
'Wraps the datastore iterator to return Model instances.\n\n The datastore returns entities. We wrap the datastore iterator to\n return Model instances instead.\n '
def __init__(self, model_class, datastore_iterator):
'Iterator constructor\n\n Args:\n model_class: Model class from which entities are constructed.\n datastore_iterator: Underlying datastore iterator.\n '
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
'Iterator on self.\n\n Returns:\n Self.\n '
return self
def __next__(self):
'Get next Model instance in query results.\n\n Returns:\n Next model instance.\n\n Raises:\n StopIteration when there are no more results in query.\n '
if (self.__model_class is not None):
return ((self.__iterator))
else:
while True:
entity = (self.__iterator)
try:
model_class = (())
except KindError:
if (()):
continue
raise
else:
return (entity)
def _normalize_query_parameter(value):
'Make any necessary type conversions to a query parameter.\n\n The following conversions are made:\n - Model instances are converted to Key instances. This is necessary so\n that querying reference properties will work.\n - datetime.date objects are converted to datetime.datetime objects (see\n _date_to_datetime for details on this conversion). This is necessary so\n that querying date properties with date objects will work.\n - datetime.time objects are converted to datetime.datetime objects (see\n _time_to_datetime for details on this conversion). This is necessary so\n that querying time properties with time objects will work.\n\n Args:\n value: The query parameter value.\n\n Returns:\n The input value, or a converted value if value matches one of the\n conversions specified above.\n '
if (value, Model):
value = ()
if ((value, datetime.date) and (not (value, datetime.datetime))):
value = (value)
elif (value, datetime.time):
value = (value)
return value
class Query(_BaseQuery):
'A Query instance queries over instances of Models.\n\n You construct a query with a model class, like this:\n\n class Story(db.Model):\n title = db.StringProperty()\n date = db.DateTimeProperty()\n\n query = Query(Story)\n\n You modify a query with filters and orders like this:\n\n query.filter(\'title =\', \'Foo\')\n query.order(\'-date\')\n query.ancestor(key_or_model_instance)\n\n Every query can return an iterator, so you access the results of a query\n by iterating over it:\n\n for story in query:\n print story.title\n\n For convenience, all of the filtering and ordering methods return "self",\n so the easiest way to use the query interface is to cascade all filters and\n orders in the iterator line like this:\n\n for story in Query(story).filter(\'title =\', \'Foo\').order(\'-date\'):\n print story.title\n '
_keys_only = False
_distinct = False
_projection = None
_namespace = None
_app = None
__ancestor = None
def __init__(self, model_class=None, keys_only=False, cursor=None, namespace=None, _app=None, distinct=False, projection=None):
"Constructs a query over instances of the given Model.\n\n Args:\n model_class: Model class to build query for.\n keys_only: Whether the query should return full entities or only keys.\n projection: A tuple of strings representing the property names to include\n in the projection this query should produce or None. Setting a\n projection is similar to specifying 'SELECT prop1, prop2, ...' in SQL.\n See _BaseQuery.projection for details on projection queries.\n distinct: A boolean, true if the projection should be distinct.\n See _BaseQuery.is_distinct for details on distinct queries.\n cursor: A compiled query from which to resume.\n namespace: The namespace to use for this query.\n "
(model_class)
if keys_only:
self._keys_only = True
if projection:
self._projection = projection
if (namespace is not None):
self._namespace = namespace
if (_app is not None):
self._app = _app
if distinct:
self._distinct = True
self.__query_sets = [{}]
self.__orderings = []
(cursor)
def is_keys_only(self):
return self._keys_only
def projection(self):
return self._projection
def is_distinct(self):
return self._distinct
def _get_query(self, _query_class=datastore.Query, _multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
if (self._model_class is not None):
kind = ()
else:
kind = None
query = (kind, query_set)
(*self.__orderings)
if (self.__ancestor is not None):
(self.__ancestor)
(query)
if ((_query_class != datastore.Query) and (_multi_query_class == datastore.MultiQuery)):
('Custom _query_class specified without corresponding custom _query_multi_class. Things will break if you use queries with the "IN" or "!=" operators.', RuntimeWarning)
if ((queries) > 1):
raise ('Query requires multiple subqueries to satisfy. If _query_class is overridden, _multi_query_class must also be overridden.')
elif ((_query_class == datastore.Query) and (_multi_query_class != datastore.MultiQuery)):
raise ('_query_class must also be overridden if _multi_query_class is overridden.')
if ((queries) == 1):
return queries[0]
else:
return (queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"Add a disjunction of several filters and several values to the query.\n\n This is implemented by duplicating queries and combining the\n results later.\n\n Args:\n operations: a string or list of strings. Each string contains a\n property name and an operator to filter by. The operators\n themselves must not require multiple queries to evaluate\n (currently, this means that 'in' and '!=' are invalid).\n\n values: a value or list of filter values, normalized by\n _normalize_query_parameter.\n "
if (not (operations, (list, tuple))):
operations = [operations]
if (not (values, (list, tuple))):
values = [values]
new_query_sets = []
for operation in operations:
if (('in') or ('!=')):
raise ('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = (query_set)
(new_query_set, operation, value)
(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
'Add filter to query.\n\n Args:\n property_operator: string with the property and operator to filter by.\n value: the filter value.\n\n Returns:\n Self to support method chaining.\n\n Raises:\n PropertyError if invalid property is provided.\n '
match = (property_operator)
prop = (1)
if ((3) is not None):
operator = (3)
else:
operator = '=='
if (self._model_class is None):
if (prop != datastore_types.KEY_SPECIAL_PROPERTY):
raise (('Only %s filters are allowed on kindless queries.' % datastore_types.KEY_SPECIAL_PROPERTY))
elif (prop in self._model_class._unindexed_properties):
raise (("Property '%s' is not indexed" % prop))
if (() == 'in'):
if self._keys_only:
raise ('Keys only queries do not support IN filters.')
elif (not (value, (list, tuple))):
raise ('Argument to the "in" operator must be a list')
values = [(v) for v in value]
((prop + ' ='), values)
else:
if (value, (list, tuple)):
raise ('Filtering on lists is not supported')
if (operator == '!='):
if self._keys_only:
raise ('Keys only queries do not support != filters.')
([(prop + ' <'), (prop + ' >')], (value))
else:
value = (value)
for query_set in self.__query_sets:
(query_set, property_operator, value)
return self
def order(self, property):
"Set order of query result.\n\n To use descending order, prepend '-' (minus) to the property\n name, e.g., '-date' rather than 'date'.\n\n Args:\n property: Property to sort on.\n\n Returns:\n Self to support method chaining.\n\n Raises:\n PropertyError if invalid property is provided.\n "
if ('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if (self._model_class is None):
if ((property != datastore_types.KEY_SPECIAL_PROPERTY) or (order != datastore.Query.ASCENDING)):
raise (('Only %s ascending orders are supported on kindless queries' % datastore_types.KEY_SPECIAL_PROPERTY))
else:
if (not (self._model_class, Expando)):
if ((property not in self._model_class._all_properties) and (property not in datastore_types._SPECIAL_PROPERTIES)):
raise (("Invalid property name '%s'" % property))
if (property in self._model_class._unindexed_properties):
raise (("Property '%s' is not indexed" % property))
((property, order))
return self
def ancestor(self, ancestor):
"Sets an ancestor for this query.\n\n This restricts the query to only return results that descend from\n a given model instance. In other words, all of the results will\n have the ancestor as their parent, or parent's parent, etc. The\n ancestor itself is also a possible result!\n\n Args:\n ancestor: Model or Key (that has already been saved)\n\n Returns:\n Self to support method chaining.\n\n Raises:\n TypeError if the argument isn't a Key or Model; NotSavedError\n if it is, but isn't saved yet.\n "
if (ancestor, datastore.Key):
if ():
self.__ancestor = ancestor
else:
raise ()
elif (ancestor, Model):
if ():
self.__ancestor = ()
else:
raise ()
else:
raise ('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
'A Query class that uses GQL query syntax instead of .filter() etc.'
def __init__(self, query_string, *args, **kwds):
"Constructor.\n\n Args:\n query_string: Properly formatted GQL query string.\n *args: Positional arguments used to bind numeric references in the query.\n **kwds: Dictionary-based arguments for named references.\n\n Raises:\n PropertyError if the query filters or sorts on a property that's not\n indexed.\n "
from google.appengine.ext import gql
app = ('_app', None)
namespace = None
if (app, tuple):
if ((app) != 2):
raise ('_app must have 2 values if type is tuple.')
(app, namespace) = app
self._proto_query = (query_string)
if (self._proto_query._kind is not None):
model_class = (self._proto_query._kind)
else:
model_class = None
(model_class)
if (model_class is not None):
for (property, unused) in ((()) + ()):
if (property in model_class._unindexed_properties):
raise (("Property '%s' is not indexed" % property))
(*args)
def is_keys_only(self):
return self._proto_query._keys_only
def projection(self):
return ()
def is_distinct(self):
return ()
def bind(self, *args, **kwds):
'Bind arguments (positional or keyword) to the query.\n\n Note that you can also pass arguments directly to the query\n constructor. Each time you call bind() the previous set of\n arguments is replaced with the new set. This is useful because\n the hard work in in parsing the query; so if you expect to be\n using the same query with different sets of arguments, you should\n hold on to the GqlQuery() object and call bind() on it each time.\n\n Args:\n *args: Positional arguments used to bind numeric references in the query.\n **kwds: Dictionary-based arguments for named references.\n '
self._args = []
for arg in args:
((arg))
self._kwds = {}
for (name, arg) in ():
self._kwds[name] = (arg)
def run(self, **kwargs):
'Iterator for this query that handles the LIMIT clause property.\n\n If the GQL query string contains a LIMIT clause, this function fetches\n all results before returning an iterator. Otherwise results are retrieved\n in batches by the iterator.\n\n Args:\n kwargs: Any keyword arguments accepted by datastore_query.QueryOptions().\n\n Returns:\n Iterator for this query.\n '
if (() > 0):
('limit', ())
('offset', ())
return (self)
def _get_query(self):
return (self._args, self._kwds, self._cursor, self._end_cursor)
class UnindexedProperty(Property):
"A property that isn't indexed by either built-in or composite indices.\n\n TextProperty and BlobProperty derive from this class.\n "
def __init__(self, *args, **kwds):
'Construct property. See the Property class for details.\n\n Raises:\n ConfigurationError if indexed=True.\n '
(kwds, 'indexed', False)
kwds['indexed'] = True
(*args)
def validate(self, value):
'Validate property.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not an instance of data_type.\n '
if ((value is not None) and (not (value, self.data_type))):
try:
value = (value)
except TypeError as err:
raise (('Property %s must be convertible to a %s instance (%s)' % (self.name, self.data_type.__name__, err)))
value = (value)
if ((value is not None) and (not (value, self.data_type))):
raise (('Property %s must be a %s instance' % (self.name, self.data_type.__name__)))
return value
class TextProperty(UnindexedProperty):
'A string that can be longer than 500 bytes.'
data_type = Text
class StringProperty(Property):
'A textual property, which can be multi- or single-line.'
def __init__(self, verbose_name=None, multiline=False, **kwds):
'Construct string property.\n\n Args:\n verbose_name: Verbose name is always first parameter.\n multi-line: Carriage returns permitted in property.\n '
(verbose_name)
self.multiline = multiline
def validate(self, value):
'Validate string property.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not multi-line but value is.\n '
value = (value)
if ((value is not None) and (not (value, str))):
raise (('Property %s must be a str or unicode instance, not a %s' % (self.name, (value).__name__)))
if ((not self.multiline) and value and (('\n') != (- 1))):
raise (('Property %s is not multi-line' % self.name))
if ((value is not None) and ((value) > self.MAX_LENGTH)):
raise (('Property %s is %d characters long; it must be %d or less.' % (self.name, (value), self.MAX_LENGTH)))
return value
MAX_LENGTH = 500
data_type = str
class _CoercingProperty(Property):
'A Property subclass that extends validate() to coerce to self.data_type.'
def validate(self, value):
'Coerce values (except None) to self.data_type.\n\n Args:\n value: The value to be validated and coerced.\n\n Returns:\n The coerced and validated value. It is guaranteed that this is\n either None or an instance of self.data_type; otherwise an exception\n is raised.\n\n Raises:\n BadValueError if the value could not be validated or coerced.\n '
value = (value)
if ((value is not None) and (not (value, self.data_type))):
value = (value)
return value
class CategoryProperty(_CoercingProperty):
'A property whose values are Category instances.'
data_type = Category
class LinkProperty(_CoercingProperty):
'A property whose values are Link instances.'
def validate(self, value):
value = (value)
if (value is not None):
(scheme, netloc, path, query, fragment) = (value)
if ((not scheme) or (not netloc)):
raise (("Property %s must be a full URL ('%s')" % (self.name, value)))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
'A property whose values are Email instances.'
data_type = Email
class GeoPtProperty(_CoercingProperty):
'A property whose values are GeoPt instances.'
data_type = GeoPt
class IMProperty(_CoercingProperty):
'A property whose values are IM instances.'
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
'A property whose values are PhoneNumber instances.'
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
'A property whose values are PostalAddress instances.'
data_type = PostalAddress
class BlobProperty(UnindexedProperty):
'A byte string that can be longer than 500 bytes.'
data_type = Blob
class ByteStringProperty(Property):
'A short (<=500 bytes) byte string.\n\n This type should be used for short binary values that need to be indexed. If\n you do not require indexing (regardless of length), use BlobProperty instead.\n '
def validate(self, value):
"Validate ByteString property.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'ByteString'.\n "
if ((value is not None) and (not (value, ByteString))):
try:
value = (value)
except TypeError as err:
raise (('Property %s must be convertible to a ByteString instance (%s)' % (self.name, err)))
value = (value)
if ((value is not None) and (not (value, ByteString))):
raise (('Property %s must be a ByteString instance' % self.name))
if ((value is not None) and ((value) > self.MAX_LENGTH)):
raise (('Property %s is %d bytes long; it must be %d or less.' % (self.name, (value), self.MAX_LENGTH)))
return value
MAX_LENGTH = 500
data_type = ByteString
class DateTimeProperty(Property):
'The base class of all of our date/time properties.\n\n We handle common operations, like converting between time tuples and\n datetime instances.\n '
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False, **kwds):
'Construct a DateTimeProperty\n\n Args:\n verbose_name: Verbose name is always first parameter.\n auto_now: Date/time property is updated with the current time every time\n it is saved to the datastore. Useful for properties that want to track\n the modification time of an instance.\n auto_now_add: Date/time is set to the when its instance is created.\n Useful for properties that record the creation time of an entity.\n '
(verbose_name)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"Validate datetime.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'datetime'.\n "
value = (value)
if (value and (not (value, self.data_type))):
raise (('Property %s must be a %s, but was %r' % (self.name, self.data_type.__name__, value)))
return value
def default_value(self):
'Default value for datetime.\n\n Returns:\n value of now() as appropriate to the date-time instance if auto_now\n or auto_now_add is set, else user configured default value implementation.\n '
if (self.auto_now or self.auto_now_add):
return ()
return (self)
def get_updated_value_for_datastore(self, model_instance):
'Get new value for property to send to datastore.\n\n Returns:\n now() as appropriate to the date-time instance in the odd case where\n auto_now is set to True, else AUTO_UPDATE_UNCHANGED.\n '
if self.auto_now:
return ()
return AUTO_UPDATE_UNCHANGED
data_type = datetime.datetime
@staticmethod
def now():
"Get now as a full datetime value.\n\n Returns:\n 'now' as a whole timestamp, including both time and date.\n "
return ()
def _date_to_datetime(value):
'Convert a date to a datetime for datastore storage.\n\n Args:\n value: A datetime.date object.\n\n Returns:\n A datetime object with time set to 0:00.\n '
if (not (value, datetime.date)):
raise ()
return (value.year, value.month, value.day)
def _time_to_datetime(value):
'Convert a time to a datetime for datastore storage.\n\n Args:\n value: A datetime.time object.\n\n Returns:\n A datetime object with date set to 1970-01-01.\n '
if (not (value, datetime.time)):
raise ()
return (1970, 1, 1, value.hour, value.minute, value.second, value.microsecond)
class DateProperty(DateTimeProperty):
'A date property, which stores a date without a time.'
@staticmethod
def now():
"Get now as a date datetime value.\n\n Returns:\n 'date' part of 'now' only.\n "
return ()
def validate(self, value):
"Validate date.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'date',\n or if it is an instance of 'datetime' (which is a subclass\n of 'date', but for all practical purposes a different type).\n "
value = (value)
if (value, datetime.datetime):
raise (('Property %s must be a %s, not a datetime' % (self.name, self.data_type.__name__)))
return value
def get_updated_value_for_datastore(self, model_instance):
'Get new value for property to send to datastore.\n\n Returns:\n now() as appropriate to the date instance in the odd case where\n auto_now is set to True, else AUTO_UPDATE_UNCHANGED.\n '
if self.auto_now:
return (())
return AUTO_UPDATE_UNCHANGED
def get_value_for_datastore(self, model_instance):
'Get value from property to send to datastore.\n\n We retrieve a datetime.date from the model instance and return a\n datetime.datetime instance with the time set to zero.\n\n See base class method documentation for details.\n '
value = (model_instance)
if (value is not None):
if (not (value, datetime.date)):
raise ()
value = (value)
return value
def make_value_from_datastore(self, value):
'Native representation of this property.\n\n We receive a datetime.datetime retrieved from the entity and return\n a datetime.date instance representing its date portion.\n\n See base class method documentation for details.\n '
if (value is not None):
if (not (value, datetime.datetime)):
raise ()
value = ()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
'A time property, which stores a time without a date.'
@staticmethod
def now():
"Get now as a time datetime value.\n\n Returns:\n 'time' part of 'now' only.\n "
return ()
def empty(self, value):
'Is time property empty.\n\n "0:0" (midnight) is not an empty value.\n\n Returns:\n True if value is None, else False.\n '
return (value is None)
def get_updated_value_for_datastore(self, model_instance):
'Get new value for property to send to datastore.\n\n Returns:\n now() as appropriate to the time instance in the odd case where\n auto_now is set to True, else AUTO_UPDATE_UNCHANGED.\n '
if self.auto_now:
return (())
return AUTO_UPDATE_UNCHANGED
def get_value_for_datastore(self, model_instance):
'Get value from property to send to datastore.\n\n We retrieve a datetime.time from the model instance and return a\n datetime.datetime instance with the date set to 1/1/1970.\n\n See base class method documentation for details.\n '
value = (model_instance)
if (value is not None):
if (not (value, datetime.time)):
raise ((value))
value = (value)
return value
def make_value_from_datastore(self, value):
'Native representation of this property.\n\n We receive a datetime.datetime retrieved from the entity and return\n a datetime.date instance representing its time portion.\n\n See base class method documentation for details.\n '
if (value is not None):
if (not (value, datetime.datetime)):
raise ()
value = ()
return value
data_type = datetime.time
class IntegerProperty(Property):
'An integer property.'
def validate(self, value):
'Validate integer property.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if value is not an integer or long instance.\n '
value = (value)
if (value is None):
return value
if ((not (value, int)) or (value, bool)):
raise (('Property %s must be an int or long, not a %s' % (self.name, (value).__name__)))
if ((value < (- 9223372036854775808)) or (value > 9223372036854775807)):
raise (('Property %s must fit in 64 bits' % self.name))
return value
data_type = int
def empty(self, value):
'Is integer property empty.\n\n 0 is not an empty value.\n\n Returns:\n True if value is None, else False.\n '
return (value is None)
class RatingProperty(_CoercingProperty, IntegerProperty):
'A property whose values are Rating instances.'
data_type = Rating
class FloatProperty(Property):
'A float property.'
def validate(self, value):
"Validate float.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'float'.\n "
value = (value)
if ((value is not None) and (not (value, float))):
raise (('Property %s must be a float' % self.name))
return value
data_type = float
def empty(self, value):
'Is float property empty.\n\n 0.0 is not an empty value.\n\n Returns:\n True if value is None, else False.\n '
return (value is None)
class BooleanProperty(Property):
'A boolean property.'
def validate(self, value):
"Validate boolean.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'bool'.\n "
value = (value)
if ((value is not None) and (not (value, bool))):
raise (('Property %s must be a bool' % self.name))
return value
data_type = bool
def empty(self, value):
'Is boolean property empty.\n\n False is not an empty value.\n\n Returns:\n True if value is None, else False.\n '
return (value is None)
class UserProperty(Property):
'A user property.'
def __init__(self, verbose_name=None, name=None, required=False, validator=None, choices=None, auto_current_user=False, auto_current_user_add=False, indexed=True):
"Initializes this Property with the given options.\n\n Note: this does *not* support the 'default' keyword argument.\n Use auto_current_user_add=True instead.\n\n Args:\n verbose_name: User friendly name of property.\n name: Storage name for property. By default, uses attribute name\n as it is assigned in the Model sub-class.\n required: Whether property is required.\n validator: User provided method used for validation.\n choices: User provided set of valid property values.\n auto_current_user: If true, the value is set to the current user\n each time the entity is written to the datastore.\n auto_current_user_add: If true, the value is set to the current user\n the first time the entity is written to the datastore.\n indexed: Whether property is indexed.\n "
(verbose_name, name)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"Validate user.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not instance of 'User'.\n "
value = (value)
if ((value is not None) and (not (value, users.User))):
raise (('Property %s must be a User' % self.name))
return value
def default_value(self):
"Default value for user.\n\n Returns:\n Value of users.get_current_user() if auto_current_user or\n auto_current_user_add is set; else None. (But *not* the default\n implementation, since we don't support the 'default' keyword\n argument.)\n "
if (self.auto_current_user or self.auto_current_user_add):
return ()
return None
def get_updated_value_for_datastore(self, model_instance):
'Get new value for property to send to datastore.\n\n Returns:\n Value of users.get_current_user() if auto_current_user is set;\n else AUTO_UPDATE_UNCHANGED.\n '
if self.auto_current_user:
return ()
return AUTO_UPDATE_UNCHANGED
data_type = users.User
class ListProperty(Property):
'A property that stores a list of things.\n\n This is a parameterized property; the parameter must be a valid\n non-list data type, and all items must conform to this type.\n '
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"Construct ListProperty.\n\n Args:\n item_type: Type for the list items; must be one of the allowed property\n types.\n verbose_name: Optional verbose name.\n default: Optional default value; if omitted, an empty list is used.\n **kwds: Optional additional keyword arguments, passed to base class.\n\n Note that the only permissible value for 'required' is True.\n "
if (item_type is str):
item_type = str
if (not (item_type, type)):
raise ('Item type should be a type object')
if (item_type not in _ALLOWED_PROPERTY_TYPES):
raise (('Item type %s is not acceptable' % item_type.__name__))
if (item_type, (Blob, Text)):
(kwds, 'indexed', False)
kwds['indexed'] = True
(kwds, 'required', True)
if (default is None):
default = []
self.item_type = item_type
(verbose_name)
def validate(self, value):
'Validate list.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError if property is not a list whose items are instances of\n the item_type given to the constructor.\n '
value = (value)
if (value is not None):
if (not (value, list)):
raise (('Property %s must be a list' % self.name))
value = (value)
return value
def _load(self, model_instance, value):
if (not (value, list)):
value = [value]
return (model_instance, value)
def validate_list_contents(self, value):
'Validates that all items in the list are of the correct type.\n\n Returns:\n The validated list.\n\n Raises:\n BadValueError if the list has items are not instances of the\n item_type given to the constructor.\n '
if (self.item_type in (int, int)):
item_type = (int, int)
else:
item_type = self.item_type
for item in value:
if (not (item, item_type)):
if (item_type == (int, int)):
raise (('Items in the %s list must all be integers.' % self.name))
else:
raise (('Items in the %s list must all be %s instances' % (self.name, self.item_type.__name__)))
return value
def empty(self, value):
'Is list property empty.\n\n [] is not an empty value.\n\n Returns:\n True if value is None, else false.\n '
return (value is None)
data_type = list
def default_value(self):
"Default value for list.\n\n Because the property supplied to 'default' is a static value,\n that value must be shallow copied to prevent all fields with\n default values from sharing the same instance.\n\n Returns:\n Copy of the default value.\n "
return (())
def get_value_for_datastore(self, model_instance):
'Get value from property to send to datastore.\n\n Returns:\n validated list appropriate to save in the datastore.\n '
value = (model_instance)
if (not value):
return value
value = (value)
if self.validator:
(value)
if (self.item_type == datetime.date):
value = ((_date_to_datetime, value))
elif (self.item_type == datetime.time):
value = ((_time_to_datetime, value))
return value
def make_value_from_datastore(self, value):
'Native representation of this property.\n\n If this list is a list of datetime.date or datetime.time, we convert\n the list of datetime.datetime retrieved from the entity into\n datetime.date or datetime.time.\n\n See base class method documentation for details.\n '
if (self.item_type == datetime.date):
for v in value:
if (not (v, datetime.datetime)):
raise ()
value = [() for x in value]
elif (self.item_type == datetime.time):
for v in value:
if (not (v, datetime.datetime)):
raise ()
value = [() for x in value]
return value
def make_value_from_datastore_index_value(self, index_value):
value = [(index_value, self.item_type)]
return (value)
class StringListProperty(ListProperty):
'A property that stores a list of strings.\n\n A shorthand for the most common type of ListProperty.\n '
def __init__(self, verbose_name=None, default=None, **kwds):
'Construct StringListProperty.\n\n Args:\n verbose_name: Optional verbose name.\n default: Optional default value; if omitted, an empty list is used.\n **kwds: Optional additional keyword arguments, passed to ListProperty().\n '
(str)
class ReferenceProperty(Property):
'A property that represents a many-to-one reference to another model.\n\n For example, a reference property in model A that refers to model B forms\n a many-to-one relationship from A to B: every instance of A refers to a\n single B instance, and every B instance can have many A instances refer\n to it.\n '
def __init__(self, reference_class=None, verbose_name=None, collection_name=None, **attrs):
'Construct ReferenceProperty.\n\n Args:\n reference_class: Which model class this property references.\n verbose_name: User friendly name of property.\n collection_name: If provided, alternate name of collection on\n reference_class to store back references. Use this to allow\n a Model to have multiple fields which refer to the same class.\n '
(verbose_name)
self.collection_name = collection_name
if (reference_class is None):
reference_class = Model
if (not (((reference_class, type) and (reference_class, Model)) or (reference_class is _SELF_REFERENCE))):
raise ('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def make_value_from_datastore_index_value(self, index_value):
value = (index_value, Key)
return (value)
def __property_config__(self, model_class, property_name):
'Loads all of the references that point to this model.\n\n We need to do this to create the ReverseReferenceProperty properties for\n this model and create the <reference>_set attributes on the referenced\n model, e.g.:\n\n class Story(db.Model):\n title = db.StringProperty()\n class Comment(db.Model):\n story = db.ReferenceProperty(Story)\n story = Story.get(id)\n print [c for c in story.comment_set]\n\n In this example, the comment_set property was created based on the reference\n from Comment to Story (which is inherently one to many).\n\n Args:\n model_class: Model class which will have its reference properties\n initialized.\n property_name: Name of property being configured.\n\n Raises:\n DuplicatePropertyError if referenced class already has the provided\n collection name as a property.\n '
(model_class, property_name)
if (self.reference_class is _SELF_REFERENCE):
self.reference_class = self.data_type = model_class
if (self.collection_name is None):
self.collection_name = ('%s_set' % ())
existing_prop = (self.reference_class, self.collection_name, None)
if (existing_prop is not None):
if (not ((existing_prop, _ReverseReferenceProperty) and (existing_prop._prop_name == property_name) and (existing_prop._model.__name__ == model_class.__name__) and (existing_prop._model.__module__ == model_class.__module__))):
raise (('Class %s already has property %s ' % (self.reference_class.__name__, self.collection_name)))
(self.reference_class, self.collection_name, (model_class, property_name))
def __get__(self, model_instance, model_class):
'Get reference object.\n\n This method will fetch unresolved entities from the datastore if\n they are not already loaded.\n\n Returns:\n ReferenceProperty to Model object if property is set, else None.\n\n Raises:\n ReferencePropertyResolveError: if the referenced model does not exist.\n '
if (model_instance is None):
return self
if (model_instance, ()):
reference_id = (model_instance, ())
else:
reference_id = None
if (reference_id is not None):
resolved = (model_instance, ())
if (resolved is not None):
return resolved
else:
instance = (reference_id)
if (instance is None):
raise (('ReferenceProperty failed to be resolved: %s' % ()))
(model_instance, (), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
'Set reference.'
value = (value)
if (value is not None):
if (value, datastore.Key):
(model_instance, (), value)
(model_instance, (), None)
else:
(model_instance, (), ())
(model_instance, (), value)
else:
(model_instance, (), None)
(model_instance, (), None)
def get_value_for_datastore(self, model_instance):
'Get key of reference rather than reference itself.'
return (model_instance, ())
def validate(self, value):
'Validate reference.\n\n Returns:\n A valid value.\n\n Raises:\n BadValueError for the following reasons:\n - Value is not saved.\n - Object not of correct model type for reference.\n '
if (value, datastore.Key):
return value
if ((value is not None) and (not ())):
raise (('%s instance must have a complete key before it can be stored as a reference' % ()))
value = (value)
if ((value is not None) and (not (value, self.reference_class))):
raise (('Property %s must be an instance of %s' % (self.name, ())))
return value
def __id_attr_name(self):
'Get attribute of referenced id.\n\n Returns:\n Attribute where to store id of referenced entity.\n '
return ()
def __resolved_attr_name(self):
'Get attribute of resolved attribute.\n\n The resolved attribute is where the actual loaded reference instance is\n stored on the referring model instance.\n\n Returns:\n Attribute name of where to store resolved reference model instance.\n '
return ('_RESOLVED' + ())
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"Create a self reference.\n\n Function for declaring a self referencing property on a model.\n\n Example:\n class HtmlNode(db.Model):\n parent = db.SelfReferenceProperty('Parent', 'children')\n\n Args:\n verbose_name: User friendly name of property.\n collection_name: Name of collection on model.\n\n Raises:\n ConfigurationError if reference_class provided as parameter.\n "
if ('reference_class' in attrs):
raise ('Do not provide reference_class to self-reference.')
return (_SELF_REFERENCE, verbose_name, collection_name)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
'The inverse of the Reference property above.\n\n We construct reverse references automatically for the model to which\n the Reference property is pointing to create the one-to-many property for\n that model. For example, if you put a Reference property in model A that\n refers to model B, we automatically create a _ReverseReference property in\n B called a_set that can fetch all of the model A instances that refer to\n that instance of model B.\n '
def __init__(self, model, prop):
'Constructor for reverse reference.\n\n Constructor does not take standard values of other property types.\n\n Args:\n model: Model class that this property is a collection of.\n property: Name of foreign property on referred model that points back\n to this properties entity.\n '
self.__model = model
self.__property = prop
@property
def _model(self):
'Internal helper to access the model class, read-only.'
return self.__model
@property
def _prop_name(self):
'Internal helper to access the property name, read-only.'
return self.__property
def __get__(self, model_instance, model_class):
'Fetches collection of model instances of this collection property.'
if (model_instance is not None):
query = (self.__model)
return ((self.__property + ' ='), ())
else:
return self
def __set__(self, model_instance, value):
'Not possible to set a new collection.'
raise ('Virtual property is read-only')
class ComputedProperty(Property):
"Property used for creating properties derived from other values.\n\n Certain attributes should never be set by users but automatically\n calculated at run-time from other values of the same entity. These\n values are implemented as persistent properties because they provide\n useful search keys.\n\n A computed property behaves the same as normal properties except that\n you may not set values on them. Attempting to do so raises\n db.DerivedPropertyError which db.Model knows to ignore during entity\n loading time. Whenever getattr is used for the property\n the value is recaclulated. This happens when the model calls\n get_value_for_datastore on the property.\n\n Example:\n\n import string\n\n class Person(Model):\n\n name = StringProperty(required=True)\n\n @db.ComputedProperty\n def lower_case_name(self):\n return self.name.lower()\n\n # Find all people regardless of case used in name.\n Person.gql('WHERE lower_case_name=:1' % name_to_search_for.lower())\n "
def __init__(self, value_function, indexed=True):
'Constructor.\n\n Args:\n value_function: Callable f(model_instance) -> value used to derive\n persistent property value for storage in datastore.\n indexed: Whether or not the attribute should be indexed.\n '
()
self.__value_function = value_function
def __set__(self, *args):
'Disallow setting this value.\n\n Raises:\n DerivedPropertyError when developer attempts to set attribute manually.\n Model knows to ignore this exception when getting from datastore.\n '
raise (('Computed property %s cannot be set.' % self.name))
def __get__(self, model_instance, model_class):
'Derive property value.\n\n Args:\n model_instance: Instance to derive property for in bound method case,\n else None.\n model_class: Model class associated with this property descriptor.\n\n Returns:\n Result of calling self.__value_funcion as provided by property\n constructor.\n '
if (model_instance is None):
return self
return (model_instance)
def to_dict(model_instance, dictionary=None):
'Convert model to dictionary.\n\n Args:\n model_instance: Model instance for which to make dictionary.\n dictionary: dict instance or compatible to receive model values.\n The dictionary is not cleared of original values. Similar to using\n dictionary.update. If dictionary is None, a new dictionary instance is\n created and returned.\n\n Returns:\n New dictionary appropriate populated with model instances values\n if entity is None, else entity.\n '
if (dictionary is None):
dictionary = {}
(dictionary)
return dictionary
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
run_in_transaction_options = datastore.RunInTransactionOptions
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries
websafe_encode_cursor = datastore_query.Cursor.to_websafe_string
websafe_decode_cursor = datastore_query.Cursor.from_websafe_string
is_in_transaction = datastore.IsInTransaction
transactional = datastore.Transactional
non_transactional = datastore.NonTransactional
create_config = datastore.CreateConfig
create_transaction_options = datastore.CreateTransactionOptions |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Status page handler for mapreduce framework."""
import os
import time
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
from google.appengine.ext import db
from google.appengine.ext.mapreduce import base_handler
from google.appengine.ext.mapreduce import errors
from google.appengine.ext.mapreduce import model
MR_YAML_NAMES = ["mapreduce.yaml", "mapreduce.yml"]
class BadStatusParameterError(Exception):
"""A parameter passed to a status handler was invalid."""
class UserParam(validation.Validated):
"""A user-supplied parameter to a mapreduce job."""
ATTRIBUTES = {
"name": r"[a-zA-Z0-9_\.]+",
"default": validation.Optional(r".*"),
"value": validation.Optional(r".*"),
}
class MapperInfo(validation.Validated):
"""Configuration parameters for the mapper part of the job."""
ATTRIBUTES = {
"handler": r".+",
"input_reader": r".+",
"output_writer": validation.Optional(r".+"),
"params": validation.Optional(validation.Repeated(UserParam)),
"params_validator": validation.Optional(r".+"),
}
class MapreduceInfo(validation.Validated):
"""Mapreduce description in mapreduce.yaml."""
ATTRIBUTES = {
"name": r".+",
"mapper": MapperInfo,
"params": validation.Optional(validation.Repeated(UserParam)),
"params_validator": validation.Optional(r".+"),
}
class MapReduceYaml(validation.Validated):
"""Root class for mapreduce.yaml.
File format:
mapreduce:
- name: <mapreduce_name>
mapper:
- input_reader: google.appengine.ext.mapreduce.DatastoreInputReader
- handler: path_to_my.MapperFunction
- params:
- name: foo
default: bar
- name: blah
default: stuff
- params_validator: path_to_my.ValidatorFunction
Where
mapreduce_name: The name of the mapreduce. Used for UI purposes.
mapper_handler_spec: Full <module_name>.<function_name/class_name> of
mapper handler. See MapreduceSpec class documentation for full handler
specification.
input_reader: Full <module_name>.<function_name/class_name> of the
InputReader sub-class to use for the mapper job.
params: A list of optional parameter names and optional default values
that may be supplied or overridden by the user running the job.
params_validator is full <module_name>.<function_name/class_name> of
a callable to validate the mapper_params after they are input by the
user running the job.
"""
ATTRIBUTES = {"mapreduce": validation.Optional(validation.Repeated(MapreduceInfo))}
@staticmethod
def to_dict(mapreduce_yaml):
"""Converts a MapReduceYaml file into a JSON-encodable dictionary.
For use in user-visible UI and internal methods for interfacing with
user code (like param validation). as a list
Args:
mapreduce_yaml: The Pyton representation of the mapreduce.yaml document.
Returns:
A list of configuration dictionaries.
"""
all_configs = []
for config in mapreduce_yaml.mapreduce:
out = {
"name": config.name,
"mapper_input_reader": config.mapper.input_reader,
"mapper_handler": config.mapper.handler,
}
if config.mapper.params_validator:
out["mapper_params_validator"] = config.mapper.params_validator
if config.mapper.params:
param_defaults = {}
for param in config.mapper.params:
param_defaults[param.name] = param.default or param.value
out["mapper_params"] = param_defaults
if config.params:
param_defaults = {}
for param in config.params:
param_defaults[param.name] = param.default or param.value
out["params"] = param_defaults
if config.mapper.output_writer:
out["mapper_output_writer"] = config.mapper.output_writer
all_configs.append(out)
return all_configs
def find_mapreduce_yaml(status_file=__file__):
"""Traverse directory trees to find mapreduce.yaml file.
Begins with the location of status.py and then moves on to check the working
directory.
Args:
status_file: location of status.py, overridable for testing purposes.
Returns:
the path of mapreduce.yaml file or None if not found.
"""
checked = set()
yaml = _find_mapreduce_yaml(os.path.dirname(status_file), checked)
if not yaml:
yaml = _find_mapreduce_yaml(os.getcwd(), checked)
return yaml
def _find_mapreduce_yaml(start, checked):
"""Traverse the directory tree identified by start until a directory already
in checked is encountered or the path of mapreduce.yaml is found.
Checked is present both to make loop termination easy to reason about and so
that the same directories do not get rechecked.
Args:
start: the path to start in and work upward from
checked: the set of already examined directories
Returns:
the path of mapreduce.yaml file or None if not found.
"""
dir = start
while dir not in checked:
checked.add(dir)
for mr_yaml_name in MR_YAML_NAMES:
yaml_path = os.path.join(dir, mr_yaml_name)
if os.path.exists(yaml_path):
return yaml_path
dir = os.path.dirname(dir)
return None
def parse_mapreduce_yaml(contents):
"""Parses mapreduce.yaml file contents.
Args:
contents: mapreduce.yaml file contents.
Returns:
MapReduceYaml object with all the data from original file.
Raises:
errors.BadYamlError: when contents is not a valid mapreduce.yaml file.
"""
try:
builder = yaml_object.ObjectBuilder(MapReduceYaml)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(contents)
mr_info = handler.GetResults()
except (ValueError, yaml_errors.EventError) as e:
raise errors.BadYamlError(e)
if len(mr_info) < 1:
raise errors.BadYamlError("No configs found in mapreduce.yaml")
if len(mr_info) > 1:
raise errors.MultipleDocumentsInMrYaml("Found %d YAML documents" % len(mr_info))
jobs = mr_info[0]
job_names = set(j.name for j in jobs.mapreduce)
if len(jobs.mapreduce) != len(job_names):
raise errors.BadYamlError("Overlapping mapreduce names; names must be unique")
return jobs
def get_mapreduce_yaml(parse=parse_mapreduce_yaml):
"""Locates mapreduce.yaml, loads and parses its info.
Args:
parse: Used for testing.
Returns:
MapReduceYaml object.
Raises:
errors.BadYamlError: when contents is not a valid mapreduce.yaml file or the
file is missing.
"""
mr_yaml_path = find_mapreduce_yaml()
if not mr_yaml_path:
raise errors.MissingYamlError()
mr_yaml_file = open(mr_yaml_path)
try:
return parse(mr_yaml_file.read())
finally:
mr_yaml_file.close()
class ResourceHandler(base_handler.BaseHandler):
"""Handler for static resources."""
_RESOURCE_MAP = {
"status": ("overview.html", "text/html"),
"detail": ("detail.html", "text/html"),
"base.css": ("base.css", "text/css"),
"jquery.js": ("jquery-1.6.1.min.js", "text/javascript"),
"jquery-json.js": ("jquery.json-2.2.min.js", "text/javascript"),
"status.js": ("status.js", "text/javascript"),
}
def get(self, relative):
if relative not in self._RESOURCE_MAP:
self.response.set_status(404)
self.response.out.write("Resource not found.")
return
real_path, content_type = self._RESOURCE_MAP[relative]
path = os.path.join(os.path.dirname(__file__), "static", real_path)
self.response.headers["Cache-Control"] = "public; max-age=300"
self.response.headers["Content-Type"] = content_type
self.response.out.write(open(path).read())
class ListConfigsHandler(base_handler.GetJsonHandler):
"""Lists mapreduce configs as JSON for users to start jobs."""
def handle(self):
self.json_response["configs"] = MapReduceYaml.to_dict(get_mapreduce_yaml())
class ListJobsHandler(base_handler.GetJsonHandler):
"""Lists running and completed mapreduce jobs for an overview as JSON."""
def handle(self):
cursor = self.request.get("cursor")
count = int(self.request.get("count", "50"))
query = model.MapreduceState.all()
if cursor:
query.filter("__key__ >=", db.Key(cursor))
query.order("__key__")
jobs_list = query.fetch(count + 1)
if len(jobs_list) == (count + 1):
self.json_response["cursor"] = str(jobs_list[-1].key())
jobs_list = jobs_list[:-1]
all_jobs = []
for job in jobs_list:
out = {
"name": job.mapreduce_spec.name,
"mapreduce_id": job.mapreduce_spec.mapreduce_id,
"active": job.active,
"start_timestamp_ms": int(
time.mktime(job.start_time.utctimetuple()) * 1000
),
"updated_timestamp_ms": int(
time.mktime(job.last_poll_time.utctimetuple()) * 1000
),
"chart_url": job.sparkline_url,
"chart_width": job.chart_width,
"active_shards": job.active_shards,
"shards": job.mapreduce_spec.mapper.shard_count,
}
if job.result_status:
out["result_status"] = job.result_status
all_jobs.append(out)
self.json_response["jobs"] = all_jobs
class GetJobDetailHandler(base_handler.GetJsonHandler):
"""Retrieves the details of a mapreduce job as JSON."""
def handle(self):
mapreduce_id = self.request.get("mapreduce_id")
if not mapreduce_id:
raise BadStatusParameterError("'mapreduce_id' was invalid")
job = model.MapreduceState.get_by_key_name(mapreduce_id)
if job is None:
raise KeyError("Could not find job with ID %r" % mapreduce_id)
self.json_response.update(job.mapreduce_spec.to_json())
self.json_response.update(job.counters_map.to_json())
self.json_response.update(
{
"active": job.active,
"start_timestamp_ms": int(
time.mktime(job.start_time.utctimetuple()) * 1000
),
"updated_timestamp_ms": int(
time.mktime(job.last_poll_time.utctimetuple()) * 1000
),
"chart_url": job.chart_url,
"chart_width": job.chart_width,
}
)
self.json_response["result_status"] = job.result_status
shards_list = model.ShardState.find_by_mapreduce_state(job)
all_shards = []
shards_list.sort(key=lambda x: x.shard_number)
for shard in shards_list:
out = {
"active": shard.active,
"result_status": shard.result_status,
"shard_number": shard.shard_number,
"shard_id": shard.shard_id,
"updated_timestamp_ms": int(
time.mktime(shard.update_time.utctimetuple()) * 1000
),
"shard_description": shard.shard_description,
"last_work_item": shard.last_work_item,
}
out.update(shard.counters_map.to_json())
all_shards.append(out)
self.json_response["shards"] = all_shards
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An extremely simple WSGI web application framework.
This module is an alias for the webapp2 module i.e. the following are
equivalent:
1. from google.appengine.ext import webapp
2. import webapp2 as webapp
It exports three primary classes: Request, Response, and RequestHandler. You
implement a web application by subclassing RequestHandler. As WSGI requests come
in, they are passed to instances of your RequestHandlers. The RequestHandler
class provides access to the easy-to-use Request and Response objects so you can
interpret the request and write the response with no knowledge of the esoteric
WSGI semantics. Here is a simple example:
from google.appengine.ext import webapp
import wsgiref.simple_server
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write(
'<html><body><form action="/hello" method="post">'
'Name: <input name="name" type="text" size="20"> '
'<input type="submit" value="Say Hello"></form></body></html>')
class HelloPage(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello, %s' % self.request.get('name'))
application = webapp.WSGIApplication([
('/', MainPage),
('/hello', HelloPage)
], debug=True)
The WSGIApplication class maps URI regular expressions to your RequestHandler
classes. It is a WSGI-compatible application object, so you can use it in
conjunction with wsgiref to make your web application into, e.g., a CGI
script or a simple HTTP server, as in the example above.
The framework does not support streaming output. All output from a response
is stored in memory before it is written.
"""
import logging
import os
from google.appengine.api import lib_config
def __django_version_setup():
"""Selects a particular Django version to load."""
django_version = _config_handle.django_version
if django_version is not None:
from google.appengine.dist import use_library
use_library("django", str(django_version))
else:
from google.appengine.dist import _library
version, explicit = _library.installed.get("django", ("0.96", False))
if not explicit:
logging.warn(
"You are using the default Django version (%s). "
"The default Django version will change in an "
"App Engine release in the near future. "
"Please call use_library() to explicitly select a "
"Django version. "
"For more information see %s",
version,
"https://developers.google.com/appengine/docs/python/tools/"
"libraries#Django",
)
try:
import django
if not hasattr(django, "VERSION"):
from django import v0_96
except ImportError:
pass
def _django_setup():
"""Imports and configures Django.
This can be overridden by defining a function named
webapp_django_setup() in the app's appengine_config.py file (see
lib_config docs). Such a function should import and configure
Django.
In the Python 2.5 runtime, you can also just configure the Django version to
be used by setting webapp_django_version in that file.
Finally, calling use_library('django', <version>) in that file
should also work:
# Example taken from from
# https://developers.google.com/appengine/docs/python/tools/libraries#Django
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from google.appengine.dist import use_library
use_library('django', '1.2')
In the Python 2.7 runtime, the Django version is specified in you app.yaml
file and use_library is not supported.
If your application also imports Django directly it should ensure
that the same code is executed before your app imports Django
(directly or indirectly). Perhaps the simplest way to ensure that
is to include the following in your main.py (and in each alternate
main script):
from google.appengine.ext.webapp import template
import django
This will ensure that whatever Django setup code you have included
in appengine_config.py is executed, as a side effect of importing
the webapp.template module.
"""
if os.environ.get("APPENGINE_RUNTIME") != "python27":
__django_version_setup()
import django
import django.conf
try:
raise ImportError
# TODO: Right now the below line raises a
# django.core.exceptions.ImproperlyConfigured exception. Need to investigate
# why and address accordingly.
# getattr(django.conf.settings, 'FAKE_ATTR', None)
except (ImportError, EnvironmentError) as e:
if os.getenv(django.conf.ENVIRONMENT_VARIABLE):
logging.warning(e)
try:
django.conf.settings.configure(
DEBUG=False,
TEMPLATE_DEBUG=False,
TEMPLATE_LOADERS=(
"django.template.loaders.filesystem.load_template_source",
),
)
except (EnvironmentError, RuntimeError):
pass
if os.environ.get("APPENGINE_RUNTIME") == "python27":
_config_handle = lib_config.register(
"webapp",
{
"add_wsgi_middleware": lambda app: app,
},
)
from webapp2 import *
else:
_config_handle = lib_config.register(
"webapp",
{
"django_setup": _django_setup,
"django_version": None,
"add_wsgi_middleware": lambda app: app,
},
)
from google.appengine.ext.webapp._webapp25 import *
from google.appengine.ext.webapp._webapp25 import __doc__
|
"Pure-Python application server for testing applications locally.\n\nGiven a port and the paths to a valid application directory (with an 'app.yaml'\nfile), the external library directory, and a relative URL to use for logins,\ncreates an HTTP server that can be used to test an application locally. Uses\nstubs instead of actual APIs when SetupStubs() is called first.\n\nExample:\n root_path = '/path/to/application/directory'\n login_url = '/login'\n port = 8080\n server = dev_appserver.CreateServer(root_path, login_url, port)\n server.serve_forever()\n"
from google.appengine.tools import os_compat
import builtins
import http.server
import base64
import binascii
import calendar
import io
import cgi
import cgitb
import email.Utils
import errno
import hashlib
import heapq
import http.client
import imp
import inspect
import logging
import mimetools
import mimetypes
import os
import random
import select
import shutil
import simplejson
import struct
import tempfile
import yaml
import resource
import re
import sre_compile
import sre_constants
import sre_parse
import socket
import sys
import time
import types
import urllib.parse
import urllib.request, urllib.parse, urllib.error
import zlib
import google
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import appinfo
from google.appengine.api import appinfo_includes
from google.appengine.api import app_logging
from google.appengine.api import blobstore
from google.appengine.api import croninfo
from google.appengine.api import datastore
from google.appengine.api import lib_config
from google.appengine.api import mail
from google.appengine.api import namespace_manager
from google.appengine.api import urlfetch_stub
from google.appengine.api import user_service_stub
from google.appengine.api import yaml_errors
from google.appengine.api.app_identity import app_identity_stub
from google.appengine.api.capabilities import capability_stub
from google.appengine.api.channel import channel_service_stub
from google.appengine.api.files import file_service_stub
from google.appengine.api.logservice import logservice
from google.appengine.api.logservice import logservice_stub
from google.appengine.api.search import simple_search_stub
from google.appengine.api.prospective_search import prospective_search_stub
from google.appengine.api.remote_socket import _remote_socket_stub
from google.appengine.api import rdbms_mysqldb
from google.appengine.api.system import system_stub
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext.cloudstorage import stub_dispatcher as gcs_dispatcher
from google.appengine import dist
from google.appengine.api.taskqueue import taskqueue_distributed
from google.appengine.api import mail_stub
from google.appengine.api.blobstore import blobstore_stub
from google.appengine.api import datastore_distributed
from google.appengine.api.blobstore import datastore_blob_storage
from google.appengine.api.memcache import memcache_distributed
from google.appengine.api.xmpp import xmpp_service_real
try:
from google.appengine.runtime import request_environment
from google.appengine.runtime import runtime
except:
request_environment = None
runtime = None
from google.appengine.tools import dev_appserver_apiserver
from google.appengine.tools import dev_appserver_blobimage
from google.appengine.tools import dev_appserver_blobstore
from google.appengine.tools import dev_appserver_channel
from google.appengine.tools import dev_appserver_import_hook
from google.appengine.tools import dev_appserver_login
from google.appengine.tools import dev_appserver_oauth
from google.storage.speckle.python.api import rdbms
CouldNotFindModuleError = dev_appserver_import_hook.CouldNotFindModuleError
FakeAccess = dev_appserver_import_hook.FakeAccess
FakeFile = dev_appserver_import_hook.FakeFile
FakeReadlink = dev_appserver_import_hook.FakeReadlink
FakeSetLocale = dev_appserver_import_hook.FakeSetLocale
FakeUnlink = dev_appserver_import_hook.FakeUnlink
GetSubmoduleName = dev_appserver_import_hook.GetSubmoduleName
HardenedModulesHook = dev_appserver_import_hook.HardenedModulesHook
SDK_ROOT = dev_appserver_import_hook.SDK_ROOT
PYTHON_LIB_VAR = '$PYTHON_LIB'
DEVEL_CONSOLE_PATH = (PYTHON_LIB_VAR + '/google/appengine/ext/admin')
REMOTE_API_PATH = (PYTHON_LIB_VAR + '/google/appengine/ext/remote_api/handler.py')
FILE_MISSING_EXCEPTIONS = ([errno.ENOENT, errno.ENOTDIR])
MAX_URL_LENGTH = 2047
DEFAULT_ENV = {'GATEWAY_INTERFACE': 'CGI/1.1', 'AUTH_DOMAIN': 'gmail.com', 'USER_ORGANIZATION': '', 'TZ': 'UTC', 'COOKIE_SECRET': 'secret', 'LOGIN_SERVER': '0.0.0.0', 'NGINX_HOST': '0.0.0.0', 'NGINX_PORT': '0'}
DEFAULT_SELECT_DELAY = 30.0
for (ext, mime_type) in ():
(mime_type, ('.' + ext))
MAX_RUNTIME_RESPONSE_SIZE = (32 << 20)
MAX_REQUEST_SIZE = ((1024 * 1024) * 1024)
COPY_BLOCK_SIZE = (1 << 20)
API_VERSION = '1'
VERSION_FILE = '../../VERSION'
DEVEL_PAYLOAD_HEADER = 'HTTP_X_APPENGINE_DEVELOPMENT_PAYLOAD'
DEVEL_PAYLOAD_RAW_HEADER = 'X-AppEngine-Development-Payload'
DEVEL_FAKE_IS_ADMIN_HEADER = 'HTTP_X_APPENGINE_FAKE_IS_ADMIN'
DEVEL_FAKE_IS_ADMIN_RAW_HEADER = 'X-AppEngine-Fake-Is-Admin'
FILE_STUB_DEPRECATION_MESSAGE = 'The datastore file stub is deprecated, and\nwill stop being the default in a future release.\nAppend the --use_sqlite flag to use the new SQLite stub.\n\nYou can port your existing data using the --port_sqlite_data flag or\npurge your previous test data with --clear_datastore.\n'
NON_PUBLIC_CACHE_CONTROLS = (['private', 'no-cache', 'no-store'])
DASHBOARD_HTTPS_PORT = '1443'
class Error(Exception):
'Base-class for exceptions in this module.'
class InvalidAppConfigError(Error):
'The supplied application configuration file is invalid.'
class AppConfigNotFoundError(Error):
'Application configuration file not found.'
class CompileError(Error):
'Application could not be compiled.'
def __init__(self, text):
self.text = text
class ExecuteError(Error):
'Application could not be executed.'
def __init__(self, text, log):
self.text = text
self.log = log
def MonkeyPatchPdb(pdb):
'Given a reference to the pdb module, fix its set_trace function.\n\n This will allow the standard trick of setting a breakpoint in your\n code by inserting a call to pdb.set_trace() to work properly, as\n long as the original stdin and stdout of dev_appserver.py are\n connected to a console or shell window.\n '
def NewSetTrace():
'Replacement for set_trace() that uses the original i/o streams.\n\n This is necessary because by the time the user code that might\n invoke pdb.set_trace() runs, the default sys.stdin and sys.stdout\n are redirected to the HTTP request and response streams instead,\n so that pdb will encounter garbage (or EOF) in its input, and its\n output will garble the HTTP response. Fortunately, sys.__stdin__\n and sys.__stderr__ retain references to the original streams --\n this is a standard Python feature. Also, fortunately, as of\n Python 2.5, the Pdb class lets you easily override stdin and\n stdout. The original set_trace() function does essentially the\n same thing as the code here except it instantiates Pdb() without\n arguments.\n '
p = ()
(().f_back)
pdb.set_trace = NewSetTrace
def MonkeyPatchThreadingLocal(_threading_local):
'Given a reference to the _threading_local module, fix _localbase.__new__.\n\n This ensures that using dev_appserver with a Python interpreter older than\n 2.7 will include the fix to the _threading_local._localbase.__new__ method\n which was introduced in Python 2.7 (http://bugs.python.org/issue1522237).\n '
@staticmethod
def New(cls, *args, **kw):
self = (cls)
key = ('_local__key', ('thread.local.' + ((self))))
(self, '_local__key', key)
(self, '_local__args', (args, kw))
(self, '_local__lock', ())
if ((args or kw) and (cls.__init__ is object.__init__)):
raise ('Initialization arguments are not supported')
dict = (self, '__dict__')
().__dict__[key] = dict
return self
_threading_local._localbase.__new__ = New
def SplitURL(relative_url):
"Splits a relative URL into its path and query-string components.\n\n Args:\n relative_url: String containing the relative URL (often starting with '/')\n to split. Should be properly escaped as www-form-urlencoded data.\n\n Returns:\n Tuple (script_name, query_string) where:\n script_name: Relative URL of the script that was accessed.\n query_string: String containing everything after the '?' character.\n "
(unused_scheme, unused_netloc, path, query, unused_fragment) = (relative_url)
return (path, query)
def GetFullURL(server_name, server_port, relative_url):
"Returns the full, original URL used to access the relative URL.\n\n Args:\n server_name: Name of the local host, or the value of the 'host' header\n from the request.\n server_port: Port on which the request was served (string or int).\n relative_url: Relative URL that was accessed, including query string.\n\n Returns:\n String containing the original URL.\n "
if ((server_port) != '80'):
netloc = ('%s:%s' % (server_name, server_port))
else:
netloc = server_name
return ('http://%s%s' % (netloc, relative_url))
def CopyStreamPart(source, destination, content_size):
'Copy a portion of a stream from one file-like object to another.\n\n Args:\n source: Source stream to copy from.\n destination: Destination stream to copy to.\n content_size: Maximum bytes to copy.\n\n Returns:\n Number of bytes actually copied.\n '
bytes_copied = 0
bytes_left = content_size
while (bytes_left > 0):
bytes = ((bytes_left, COPY_BLOCK_SIZE))
bytes_read = (bytes)
if (bytes_read == 0):
break
(bytes)
bytes_copied += bytes_read
bytes_left -= bytes_read
return bytes_copied
def AppIdWithDefaultPartition(app_id, default_partition):
'Add a partition to an application id if necessary.'
if (not default_partition):
return app_id
if ('~' in app_id):
return app_id
return ((default_partition + '~') + app_id)
class AppServerRequest(object):
'Encapsulates app-server request.\n\n Object used to hold a full appserver request. Used as a container that is\n passed through the request forward chain and ultimately sent to the\n URLDispatcher instances.\n\n Attributes:\n relative_url: String containing the URL accessed.\n path: Local path of the resource that was matched; back-references will be\n replaced by values matched in the relative_url. Path may be relative\n or absolute, depending on the resource being served (e.g., static files\n will have an absolute path; scripts will be relative).\n headers: Instance of mimetools.Message with headers from the request.\n infile: File-like object with input data from the request.\n force_admin: Allow request admin-only URLs to proceed regardless of whether\n user is logged in or is an admin.\n secret_hash: Security for task queue paths.\n '
ATTRIBUTES = ['relative_url', 'path', 'headers', 'infile', 'force_admin']
def __init__(self, relative_url, path, headers, infile, secret_hash, force_admin=False):
'Constructor.\n\n Args:\n relative_url: Mapped directly to attribute.\n path: Mapped directly to attribute.\n headers: Mapped directly to attribute.\n infile: Mapped directly to attribute.\n force_admin: Mapped directly to attribute.\n '
self.relative_url = relative_url
self.path = path
self.headers = headers
self.infile = infile
self.force_admin = force_admin
if (DEVEL_PAYLOAD_RAW_HEADER in self.headers):
if (self.headers[DEVEL_PAYLOAD_RAW_HEADER] == secret_hash):
self.force_admin = True
if (DEVEL_FAKE_IS_ADMIN_RAW_HEADER in self.headers):
if (self.headers[DEVEL_FAKE_IS_ADMIN_RAW_HEADER] == secret_hash):
self.force_admin = True
def __eq__(self, other):
'Used mainly for testing.\n\n Returns:\n True if all fields of both requests are equal, else False.\n '
if ((self) == (other)):
for attribute in self.ATTRIBUTES:
if ((self, attribute) != (other, attribute)):
return False
return True
def __repr__(self):
'String representation of request.\n\n Used mainly for testing.\n\n Returns:\n String representation of AppServerRequest. Strings of different\n request objects that have the same values for all fields compare\n as equal.\n '
results = []
for attribute in self.ATTRIBUTES:
(('%s: %s' % (attribute, (self, attribute))))
return ('<AppServerRequest %s>' % (results))
class URLDispatcher(object):
'Base-class for handling HTTP requests.'
def Dispatch(self, request, outfile, base_env_dict=None):
'Dispatch and handle an HTTP request.\n\n base_env_dict should contain at least these CGI variables:\n REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,\n SERVER_PROTOCOL, SERVER_PORT\n\n Args:\n request: AppServerRequest instance.\n outfile: File-like object where output data should be written.\n base_env_dict: Dictionary of CGI environment parameters if available.\n Defaults to None.\n\n Returns:\n None if request handling is complete.\n A new AppServerRequest instance if internal redirect is required.\n '
raise NotImplementedError
def EndRedirect(self, dispatched_output, original_output):
'Process the end of an internal redirect.\n\n This method is called after all subsequent dispatch requests have finished.\n By default the output from the dispatched process is copied to the original.\n\n This will not be called on dispatchers that do not return an internal\n redirect.\n\n Args:\n dispatched_output: StringIO buffer containing the results from the\n dispatched\n original_output: The original output file.\n\n Returns:\n None if request handling is complete.\n A new AppServerRequest instance if internal redirect is required.\n '
(())
class URLMatcher(object):
"Matches an arbitrary URL using a list of URL patterns from an application.\n\n Each URL pattern has an associated URLDispatcher instance and path to the\n resource's location on disk. See AddURL for more details. The first pattern\n that matches an inputted URL will have its associated values returned by\n Match().\n "
def __init__(self):
'Initializer.'
self._url_patterns = []
def AddURL(self, regex, dispatcher, path, requires_login, admin_only, auth_fail_action):
"Adds a URL pattern to the list of patterns.\n\n If the supplied regex starts with a '^' or ends with a '$' an\n InvalidAppConfigError exception will be raised. Start and end symbols\n and implicitly added to all regexes, meaning we assume that all regexes\n consume all input from a URL.\n\n Args:\n regex: String containing the regular expression pattern.\n dispatcher: Instance of URLDispatcher that should handle requests that\n match this regex.\n path: Path on disk for the resource. May contain back-references like\n r'\x01', r'\x02', etc, which will be replaced by the corresponding groups\n matched by the regex if present.\n requires_login: True if the user must be logged-in before accessing this\n URL; False if anyone can access this URL.\n admin_only: True if the user must be a logged-in administrator to\n access the URL; False if anyone can access the URL.\n auth_fail_action: either appinfo.AUTH_FAIL_ACTION_REDIRECT (default)\n which indicates that the server should redirect to the login page when\n an authentication is needed, or appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED\n which indicates that the server should just return a 401 Unauthorized\n message immediately.\n\n Raises:\n TypeError: if dispatcher is not a URLDispatcher sub-class instance.\n InvalidAppConfigError: if regex isn't valid.\n "
if (not (dispatcher, URLDispatcher)):
raise ('dispatcher must be a URLDispatcher sub-class')
if (('^') or ('$')):
raise ('regex starts with "^" or ends with "$"')
adjusted_regex = ('^%s$' % regex)
try:
url_re = (adjusted_regex)
except re.error as e:
raise (('regex invalid: %s' % e))
match_tuple = (url_re, dispatcher, path, requires_login, admin_only, auth_fail_action)
(match_tuple)
def Match(self, relative_url, split_url=SplitURL):
"Matches a URL from a request against the list of URL patterns.\n\n The supplied relative_url may include the query string (i.e., the '?'\n character and everything following).\n\n Args:\n relative_url: Relative URL being accessed in a request.\n split_url: Used for dependency injection.\n\n Returns:\n Tuple (dispatcher, matched_path, requires_login, admin_only,\n auth_fail_action), which are the corresponding values passed to\n AddURL when the matching URL pattern was added to this matcher.\n The matched_path will have back-references replaced using values\n matched by the URL pattern. If no match was found, dispatcher will\n be None.\n "
(adjusted_url, unused_query_string) = (relative_url)
for url_tuple in self._url_patterns:
(url_re, dispatcher, path, requires_login, admin_only, auth_fail_action) = url_tuple
the_match = (adjusted_url)
if the_match:
adjusted_path = (path)
return (dispatcher, adjusted_path, requires_login, admin_only, auth_fail_action)
return (None, None, None, None, None)
def GetDispatchers(self):
'Retrieves the URLDispatcher objects that could be matched.\n\n Should only be used in tests.\n\n Returns:\n A set of URLDispatcher objects.\n '
return ([url_tuple[1] for url_tuple in self._url_patterns])
class MatcherDispatcher(URLDispatcher):
'Dispatcher across multiple URLMatcher instances.'
def __init__(self, config, login_url, module_manager, url_matchers, get_user_info=dev_appserver_login.GetUserInfo, login_redirect=dev_appserver_login.LoginRedirect):
'Initializer.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n login_url: Relative URL which should be used for handling user logins.\n module_manager: ModuleManager instance that is used to detect and reload\n modules if the matched Dispatcher is dynamic.\n url_matchers: Sequence of URLMatcher objects.\n get_user_info: Used for dependency injection.\n login_redirect: Used for dependency injection.\n '
self._config = config
self._login_url = login_url
self._module_manager = module_manager
self._url_matchers = (url_matchers)
self._get_user_info = get_user_info
self._login_redirect = login_redirect
def Dispatch(self, request, outfile, base_env_dict=None):
'Dispatches a request to the first matching dispatcher.\n\n Matchers are checked in the order they were supplied to the constructor.\n If no matcher matches, a 404 error will be written to the outfile. The\n path variable supplied to this method is ignored.\n\n The value of request.path is ignored.\n '
cookies = (('cookie'))
(email_addr, user_id, admin, valid_cookie) = (cookies)
for matcher in self._url_matchers:
(dispatcher, matched_path, requires_login, admin_only, auth_fail_action) = (request.relative_url)
if (dispatcher is None):
continue
('Matched "%s" to %s with path %s', request.relative_url, dispatcher, matched_path)
if ((requires_login or admin_only) and (not email_addr) and (not request.force_admin)):
('Login required, redirecting user')
if (auth_fail_action == appinfo.AUTH_FAIL_ACTION_REDIRECT):
(self._login_url, base_env_dict['SERVER_NAME'], base_env_dict['SERVER_PORT'], request.relative_url, outfile)
elif (auth_fail_action == appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED):
(('Status: %d Not authorized\r\n\r\nLogin required to view page.' % http.client.UNAUTHORIZED))
elif (admin_only and (not admin) and (not request.force_admin)):
(('Status: %d Not authorized\r\n\r\nCurrent logged in user %s is not authorized to view this page.' % (http.client.FORBIDDEN, email_addr)))
else:
request.path = matched_path
if ((not (dispatcher, FileDispatcher)) and ()):
()
forward_request = (request, outfile)
while forward_request:
('Internal redirection to %s', forward_request.relative_url)
new_outfile = ()
(forward_request, new_outfile, (base_env_dict))
(0)
forward_request = (new_outfile, outfile)
return
(('Status: %d URL did not match\r\n\r\nNot found error: %s did not match any patterns in application configuration.' % (http.client.NOT_FOUND, request.relative_url)))
_IGNORE_REQUEST_HEADERS = (['accept-encoding', 'connection', 'keep-alive', 'proxy-authorization', 'te', 'trailer', 'transfer-encoding', 'content-type', 'content-length'])
_request_id = 0
_request_time = 0
def _generate_request_id_hash():
'Generates a hash of the current request id.'
return ()
def _GenerateRequestLogId():
'Generates the request log id for the current request.'
sec = (_request_time)
usec = ((1000000 * (_request_time - sec)))
h = ()[:4]
packed = (sec, usec)
return ((packed + h))
def GetGoogleSqlOAuth2RefreshToken(oauth_file_path):
"Reads the user's Google Cloud SQL OAuth2.0 token from disk."
if (not (oauth_file_path)):
return None
try:
with (oauth_file_path) as oauth_file:
token = (oauth_file)
return token['refresh_token']
except (IOError, KeyError, simplejson.decoder.JSONDecodeError):
('Could not read OAuth2.0 token from %s', oauth_file_path)
return None
def SetupEnvironment(cgi_path, relative_url, headers, infile, split_url=SplitURL, get_user_info=dev_appserver_login.GetUserInfo):
'Sets up environment variables for a CGI.\n\n Args:\n cgi_path: Full file-system path to the CGI being executed.\n relative_url: Relative URL used to access the CGI.\n headers: Instance of mimetools.Message containing request headers.\n infile: File-like object with input data from the request.\n split_url, get_user_info: Used for dependency injection.\n\n Returns:\n Dictionary containing CGI environment variables.\n '
env = ()
(script_name, query_string) = (relative_url)
env['_AH_ENCODED_SCRIPT_NAME'] = script_name
env['SCRIPT_NAME'] = ''
env['QUERY_STRING'] = query_string
env['PATH_INFO'] = (script_name)
env['PATH_TRANSLATED'] = cgi_path
env['CONTENT_TYPE'] = ('content-type', 'application/x-www-form-urlencoded')
env['CONTENT_LENGTH'] = ('content-length', '')
cookies = (('cookie'))
(email_addr, user_id, admin, valid_cookie) = (cookies)
if valid_cookie:
env['USER_EMAIL'] = email_addr
env['USER_ID'] = user_id
env['USER_NICKNAME'] = user_id
else:
env['USER_EMAIL'] = ''
env['USER_ID'] = ''
env['USER_NICKNAME'] = ''
env['USER_IS_ADMIN'] = '0'
if (admin and valid_cookie):
env['USER_IS_ADMIN'] = '1'
if (env['AUTH_DOMAIN'] == '*'):
auth_domain = 'gmail.com'
parts = ('@')
if (((parts) == 2) and parts[1]):
auth_domain = parts[1]
env['AUTH_DOMAIN'] = auth_domain
env['REQUEST_LOG_ID'] = ()
env['REQUEST_ID_HASH'] = ()
for key in headers:
if (key in _IGNORE_REQUEST_HEADERS):
continue
adjusted_name = ()
env[('HTTP_' + adjusted_name)] = ((key))
if (DEVEL_PAYLOAD_HEADER in env):
del env[DEVEL_PAYLOAD_HEADER]
new_data = (())
(0)
()
(new_data)
(0)
env['CONTENT_LENGTH'] = ((new_data))
if (DEVEL_FAKE_IS_ADMIN_HEADER in env):
del env[DEVEL_FAKE_IS_ADMIN_HEADER]
token = ((rdbms.OAUTH_CREDENTIALS_PATH))
if token:
env['GOOGLE_SQL_OAUTH2_REFRESH_TOKEN'] = token
return env
def NotImplementedFake(*args, **kwargs):
'Fake for methods/functions that are not implemented in the production\n environment.\n '
raise ('This class/method is not available.')
class NotImplementedFakeClass(object):
'Fake class for classes that are not implemented in the production env.'
__init__ = NotImplementedFake
def IsEncodingsModule(module_name):
"Determines if the supplied module is related to encodings in any way.\n\n Encodings-related modules cannot be reloaded, so they need to be treated\n specially when sys.modules is modified in any way.\n\n Args:\n module_name: Absolute name of the module regardless of how it is imported\n into the local namespace (e.g., foo.bar.baz).\n\n Returns:\n True if it's an encodings-related module; False otherwise.\n "
if ((module_name in ('codecs', 'encodings')) or ('encodings.')):
return True
return False
def ClearAllButEncodingsModules(module_dict):
'Clear all modules in a module dictionary except for those modules that\n are in any way related to encodings.\n\n Args:\n module_dict: Dictionary in the form used by sys.modules.\n '
for module_name in (()):
if ((not (module_name)) and (module_name != 'sys')):
del module_dict[module_name]
def ConnectAndDisconnectChildModules(old_module_dict, new_module_dict):
'Prepares for switching from old_module_dict to new_module_dict.\n\n Disconnects child modules going away from parents that remain, and reconnects\n child modules that are being added back in to old parents. This is needed to\n satisfy code that follows the getattr() descendant chain rather than looking\n up the desired module directly in the module dict.\n\n Args:\n old_module_dict: The module dict being replaced, looks like sys.modules.\n new_module_dict: The module dict takings its place, looks like sys.modules.\n '
old_keys = (())
new_keys = (())
for deleted_module_name in (old_keys - new_keys):
if (old_module_dict[deleted_module_name] is None):
continue
segments = ('.', 1)
if ((segments) == 2):
parent_module = (segments[0])
if (parent_module and (parent_module, segments[1])):
(parent_module, segments[1])
for added_module_name in (new_keys - old_keys):
if (new_module_dict[added_module_name] is None):
continue
segments = ('.', 1)
if ((segments) == 2):
parent_module = (segments[0])
child_module = new_module_dict[added_module_name]
if (parent_module and ((parent_module, segments[1], None) is not child_module)):
(parent_module, segments[1], child_module)
SHARED_MODULE_PREFIXES = (['google', 'logging', 'sys', 'warnings', 're', 'sre_compile', 'sre_constants', 'sre_parse', 'email', 'wsgiref', 'MySQLdb', 'decimal'])
NOT_SHARED_MODULE_PREFIXES = (['google.appengine.ext'])
def ModuleNameHasPrefix(module_name, prefix_set):
"Determines if a module's name belongs to a set of prefix strings.\n\n Args:\n module_name: String containing the fully qualified module name.\n prefix_set: Iterable set of module name prefixes to check against.\n\n Returns:\n True if the module_name belongs to the prefix set or is a submodule of\n any of the modules specified in the prefix_set. Otherwise False.\n "
for prefix in prefix_set:
if (prefix == module_name):
return True
if ((prefix + '.')):
return True
return False
def SetupSharedModules(module_dict):
'Creates a module dictionary for the hardened part of the process.\n\n Module dictionary will contain modules that should be shared between the\n hardened and unhardened parts of the process.\n\n Args:\n module_dict: Module dictionary from which existing modules should be\n pulled (usually sys.modules).\n\n Returns:\n A new module dictionary.\n '
output_dict = {}
for (module_name, module) in ():
if (module is None):
continue
if (module_name):
output_dict[module_name] = module
continue
shared_prefix = (module_name, SHARED_MODULE_PREFIXES)
banned_prefix = (module_name, NOT_SHARED_MODULE_PREFIXES)
if (shared_prefix and (not banned_prefix)):
output_dict[module_name] = module
return output_dict
def ModuleHasValidMainFunction(module):
'Determines if a module has a main function that takes no arguments.\n\n This includes functions that have arguments with defaults that are all\n assigned, thus requiring no additional arguments in order to be called.\n\n Args:\n module: A types.ModuleType instance.\n\n Returns:\n True if the module has a valid, reusable main function; False otherwise.\n '
if ((module, 'main') and ((module.main) is types.FunctionType)):
(arg_names, var_args, var_kwargs, default_values) = (module.main)
if ((arg_names) == 0):
return True
if ((default_values is not None) and ((arg_names) == (default_values))):
return True
return False
def CheckScriptExists(cgi_path, handler_path):
"Check that the given handler_path is a file that exists on disk.\n\n Args:\n cgi_path: Absolute path to the CGI script file on disk.\n handler_path: CGI path stored in the application configuration (as a path\n like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.\n\n Raises:\n CouldNotFindModuleError: if the given handler_path is a file and doesn't\n have the expected extension.\n "
if ((PYTHON_LIB_VAR + '/')):
return
if ((not (cgi_path)) and (not (cgi_path)) and ((cgi_path + '.py'))):
raise (('Perhaps you meant to have the line "script: %s.py" in your app.yaml' % handler_path))
def GetScriptModuleName(handler_path):
"Determines the fully-qualified Python module name of a script on disk.\n\n Args:\n handler_path: CGI path stored in the application configuration (as a path\n like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.\n\n Returns:\n String containing the corresponding module name (e.g., 'foo.bar.baz').\n "
if ((PYTHON_LIB_VAR + '/')):
handler_path = handler_path[(PYTHON_LIB_VAR):]
handler_path = (handler_path)
extension_index = ('.py')
if (extension_index != (- 1)):
handler_path = handler_path[:extension_index]
module_fullname = (os.sep, '.')
module_fullname = ('.')
module_fullname = ('\\.+', '.', module_fullname)
if ('.__init__'):
module_fullname = module_fullname[:(- ('.__init__'))]
return module_fullname
def FindMissingInitFiles(cgi_path, module_fullname, isfile=os.path.isfile):
"Determines which __init__.py files are missing from a module's parent\n packages.\n\n Args:\n cgi_path: Absolute path of the CGI module file on disk.\n module_fullname: Fully qualified Python module name used to import the\n cgi_path module.\n isfile: Used for testing.\n\n Returns:\n List containing the paths to the missing __init__.py files.\n "
missing_init_files = []
if ('.py'):
module_base = (cgi_path)
else:
module_base = cgi_path
depth_count = ('.')
if (('__init__.py') or (not ('.py'))):
depth_count += 1
for index in (depth_count):
current_init_file = ((module_base, '__init__.py'))
if (not (current_init_file)):
(current_init_file)
module_base = ((module_base, os.pardir))
return missing_init_files
def LoadTargetModule(handler_path, cgi_path, import_hook, module_dict=sys.modules):
"Loads a target CGI script by importing it as a Python module.\n\n If the module for the target CGI script has already been loaded before,\n the new module will be loaded in its place using the same module object,\n possibly overwriting existing module attributes.\n\n Args:\n handler_path: CGI path stored in the application configuration (as a path\n like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.\n cgi_path: Absolute path to the CGI script file on disk.\n import_hook: Instance of HardenedModulesHook to use for module loading.\n module_dict: Used for dependency injection.\n\n Returns:\n Tuple (module_fullname, script_module, module_code) where:\n module_fullname: Fully qualified module name used to import the script.\n script_module: The ModuleType object corresponding to the module_fullname.\n If the module has not already been loaded, this will be an empty\n shell of a module.\n module_code: Code object (returned by compile built-in) corresponding\n to the cgi_path to run. If the script_module was previously loaded\n and has a main() function that can be reused, this will be None.\n\n Raises:\n CouldNotFindModuleError if the given handler_path is a file and doesn't have\n the expected extension.\n "
(cgi_path, handler_path)
module_fullname = (handler_path)
script_module = (module_fullname)
module_code = None
if ((script_module is not None) and (script_module)):
('Reusing main() function of module "%s"', module_fullname)
else:
if (script_module is None):
script_module = (module_fullname)
script_module.__loader__ = import_hook
try:
module_code = (module_fullname)
(full_path, search_path, submodule) = (module_fullname)
script_module.__file__ = full_path
if (search_path is not None):
script_module.__path__ = search_path
except UnicodeDecodeError as e:
error = ('%s please see http://www.python.org/peps/pep-0263.html for details (%s)' % (e, handler_path))
raise (error)
except:
(exc_type, exc_value, exc_tb) = ()
import_error_message = (exc_type)
if exc_value:
import_error_message += (': ' + (exc_value))
('Encountered error loading module "%s": %s', module_fullname, import_error_message)
missing_inits = (cgi_path, module_fullname)
if missing_inits:
('Missing package initialization files: %s', (missing_inits))
else:
('Parent package initialization files are present, but must be broken')
independent_load_successful = True
if (not (cgi_path)):
independent_load_successful = False
else:
try:
source_file = (cgi_path)
try:
module_code = ((), cgi_path, 'exec')
script_module.__file__ = cgi_path
finally:
()
except OSError:
independent_load_successful = False
if (not independent_load_successful):
raise (exc_tb)
module_dict[module_fullname] = script_module
return (module_fullname, script_module, module_code)
def _WriteErrorToOutput(status, message, outfile):
"Writes an error status response to the response outfile.\n\n Args:\n status: The status to return, e.g. '411 Length Required'.\n message: A human-readable error message.\n outfile: Response outfile.\n "
(message)
(('Status: %s\r\n\r\n%s' % (status, message)))
def GetRequestSize(request, env_dict, outfile):
'Gets the size (content length) of the given request.\n\n On error, this method writes an error message to the response outfile and\n returns None. Errors include the request missing a required header and the\n request being too large.\n\n Args:\n request: AppServerRequest instance.\n env_dict: Environment dictionary. May be None.\n outfile: Response outfile.\n\n Returns:\n The calculated request size, or None on error.\n '
if ('content-length' in request.headers):
request_size = (request.headers['content-length'])
elif (env_dict and (('REQUEST_METHOD', '') == 'POST')):
(('%d Length required' % http.client.LENGTH_REQUIRED), 'POST requests require a Content-length header.', outfile)
return None
else:
request_size = 0
if (request_size <= MAX_REQUEST_SIZE):
return request_size
else:
msg = ('HTTP request was too large: %d. The limit is: %d.' % (request_size, MAX_REQUEST_SIZE))
(('%d Request entity too large' % http.client.REQUEST_ENTITY_TOO_LARGE), msg, outfile)
return None
def ExecuteOrImportScript(config, handler_path, cgi_path, import_hook):
'Executes a CGI script by importing it as a new module.\n\n This possibly reuses the module\'s main() function if it is defined and\n takes no arguments.\n\n Basic technique lifted from PEP 338 and Python2.5\'s runpy module. See:\n http://www.python.org/dev/peps/pep-0338/\n\n See the section entitled "Import Statements and the Main Module" to understand\n why a module named \'__main__\' cannot do relative imports. To get around this,\n the requested module\'s path could be added to sys.path on each request.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n handler_path: CGI path stored in the application configuration (as a path\n like \'foo/bar/baz.py\'). Should not have $PYTHON_LIB references.\n cgi_path: Absolute path to the CGI script file on disk.\n import_hook: Instance of HardenedModulesHook to use for module loading.\n\n Returns:\n True if the response code had an error status (e.g., 404), or False if it\n did not.\n\n Raises:\n Any kind of exception that could have been raised when loading the target\n module, running a target script, or executing the application code itself.\n '
(module_fullname, script_module, module_code) = (handler_path, cgi_path, import_hook)
script_module.__name__ = '__main__'
sys.modules['__main__'] = script_module
try:
import pdb
(pdb)
if module_code:
(module_code, script_module.__dict__)
else:
()
()
(0)
try:
headers = (sys.stdout)
finally:
(0, 2)
status_header = ('status')
error_response = False
if status_header:
try:
status_code = ((' ', 1)[0])
error_response = (status_code >= 400)
except ValueError:
error_response = True
if (not error_response):
try:
parent_package = (module_fullname)
except Exception:
parent_package = None
if (parent_package is not None):
submodule = (module_fullname)
(parent_package, submodule, script_module)
return error_response
finally:
script_module.__name__ = module_fullname
def ExecutePy27Handler(config, handler_path, cgi_path, import_hook):
'Equivalent to ExecuteOrImportScript for Python 2.7 runtime.\n\n This dispatches to google.appengine.runtime.runtime,\n which in turn will dispatch to either the cgi or the wsgi module in\n the same package, depending on the form of handler_path.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n handler_path: handler ("script") from the application configuration;\n either a script reference like foo/bar.py, or an object reference\n like foo.bar.app.\n cgi_path: Absolute path to the CGI script file on disk;\n typically the app dir joined with handler_path.\n import_hook: Instance of HardenedModulesHook to use for module loading.\n\n Returns:\n True if the response code had an error status (e.g., 404), or False if it\n did not.\n\n Raises:\n Any kind of exception that could have been raised when loading the target\n module, running a target script, or executing the application code itself.\n '
if ((request_environment is None) or (runtime is None)):
raise ('Python 2.5 is too old to emulate the Python 2.7 runtime. Please use Python 2.6 or Python 2.7.')
import os
save_environ = os.environ
save_getenv = os.getenv
env = (save_environ)
if ('_AH_THREADSAFE'):
env['wsgi.multithread'] = True
url = ('http://%s%s' % (('HTTP_HOST', 'localhost:8080'), ('_AH_ENCODED_SCRIPT_NAME', '/')))
qs = ('QUERY_STRING')
if qs:
url += ('?' + qs)
post_data = ()
if ('CONTENT_TYPE' in env):
if post_data:
env['HTTP_CONTENT_TYPE'] = env['CONTENT_TYPE']
del env['CONTENT_TYPE']
if ('CONTENT_LENGTH' in env):
if env['CONTENT_LENGTH']:
env['HTTP_CONTENT_LENGTH'] = env['CONTENT_LENGTH']
del env['CONTENT_LENGTH']
if (handler_path):
application_root = cgi_path[:(- (handler_path))]
if (('/') and (application_root != '/')):
application_root = application_root[:(- 1)]
else:
application_root = ''
try:
import pdb
(pdb)
import _threading_local
(_threading_local)
os.environ = (request_environment.current_request)
os.getenv = os.environ.get
response = (env, handler_path, url, post_data, application_root, SDK_ROOT, import_hook)
finally:
os.environ = save_environ
os.getenv = save_getenv
error = ('error')
if error:
status = 500
else:
status = 200
status = ('response_code', status)
(('Status: %s\r\n' % status))
for (key, value) in ('headers', ()):
key = (())
value = ('\n', ' ')
(('%s: %s\r\n' % (key, value)))
('\r\n')
body = ('body')
if body:
(body)
logs = ('logs')
if logs:
for (timestamp_usec, severity, message) in logs:
(((severity * 10) + 10), '@%s: %s', ((timestamp_usec * 1e-06)), message)
return error
class LoggingStream(object):
'A stream that writes logs at level error.'
def write(self, message):
(logging.ERROR, message, ())
def writelines(self, lines):
for line in lines:
(logging.ERROR, line, ())
def __getattr__(self, key):
return (sys.__stderr__, key)
def ExecuteCGI(config, root_path, handler_path, cgi_path, env, infile, outfile, module_dict, exec_script=ExecuteOrImportScript, exec_py27_handler=ExecutePy27Handler):
"Executes Python file in this process as if it were a CGI.\n\n Does not return an HTTP response line. CGIs should output headers followed by\n the body content.\n\n The modules in sys.modules should be the same before and after the CGI is\n executed, with the specific exception of encodings-related modules, which\n cannot be reloaded and thus must always stay in sys.modules.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n root_path: Path to the root of the application.\n handler_path: CGI path stored in the application configuration (as a path\n like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.\n cgi_path: Absolute path to the CGI script file on disk.\n env: Dictionary of environment variables to use for the execution.\n infile: File-like object to read HTTP request input data from.\n outfile: FIle-like object to write HTTP response data to.\n module_dict: Dictionary in which application-loaded modules should be\n preserved between requests. This removes the need to reload modules that\n are reused between requests, significantly increasing load performance.\n This dictionary must be separate from the sys.modules dictionary.\n exec_script: Used for dependency injection.\n exec_py27_handler: Used for dependency injection.\n "
if (handler_path == '_go_app'):
from google.appengine.ext.go import execute_go_cgi
return (root_path, handler_path, cgi_path, env, infile, outfile)
old_module_dict = ()
old_builtin = ()
old_argv = sys.argv
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
old_env = ()
old_cwd = ()
old_file_type = types.FileType
reset_modules = False
app_log_handler = None
try:
(sys.modules, module_dict)
(sys.modules)
(module_dict)
sys.argv = [cgi_path]
sys.stdin = (())
sys.stdout = outfile
sys.stderr = ()
logservice._global_buffer = ()
app_log_handler = ()
(app_log_handler)
()
(env)
cgi_dir = ((cgi_path))
root_path = ((root_path))
if (((root_path + os.sep)) and (not (config and (config.runtime == 'python27')))):
(cgi_dir)
else:
(root_path)
(root_path, SDK_ROOT)
hook = (config, sys.modules, root_path)
sys.meta_path = [finder for finder in sys.meta_path if (not (finder, HardenedModulesHook))]
(0, hook)
if (sys, 'path_importer_cache'):
()
builtins.file = FakeFile
builtins.open = FakeFile
types.FileType = FakeFile
if (not (config and (config.runtime == 'python27'))):
builtins.buffer = NotImplementedFakeClass
sys.modules['__builtin__'] = __builtin__
('Executing CGI with env:\n%s', (env))
try:
if (handler_path and config and (config.runtime == 'python27')):
reset_modules = (config, handler_path, cgi_path, hook)
else:
reset_modules = (config, handler_path, cgi_path, hook)
except SystemExit as e:
('CGI exited with status: %s', e)
except:
reset_modules = True
raise
finally:
()
(sys.modules)
(sys.modules)
(sys.modules, old_module_dict)
(sys.modules)
(old_module_dict)
(old_builtin)
sys.argv = old_argv
sys.stdin = old_stdin
sys.stdout = old_stdout
sys.stderr = old_stderr
(app_log_handler)
()
(old_env)
(old_cwd)
types.FileType = old_file_type
class CGIDispatcher(URLDispatcher):
'Dispatcher that executes Python CGI scripts.'
def __init__(self, config, module_dict, root_path, path_adjuster, setup_env=SetupEnvironment, exec_cgi=ExecuteCGI):
'Initializer.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n module_dict: Dictionary in which application-loaded modules should be\n preserved between requests. This dictionary must be separate from the\n sys.modules dictionary.\n path_adjuster: Instance of PathAdjuster to use for finding absolute\n paths of CGI files on disk.\n setup_env, exec_cgi: Used for dependency injection.\n '
self._config = config
self._module_dict = module_dict
self._root_path = root_path
self._path_adjuster = path_adjuster
self._setup_env = setup_env
self._exec_cgi = exec_cgi
def Dispatch(self, request, outfile, base_env_dict=None):
'Dispatches the Python CGI.'
request_size = (request, base_env_dict, outfile)
if (request_size is None):
return
memory_file = ()
(request.infile, memory_file, request_size)
(0)
before_level = logging.root.level
try:
env = {}
if self._config.env_variables:
(self._config.env_variables)
if base_env_dict:
(base_env_dict)
cgi_path = (request.path)
((cgi_path, request.relative_url, request.headers, memory_file))
(self._config, self._root_path, request.path, cgi_path, env, memory_file, outfile, self._module_dict)
finally:
logging.root.level = before_level
def __str__(self):
'Returns a string representation of this dispatcher.'
return 'CGI dispatcher'
class LocalCGIDispatcher(CGIDispatcher):
"Dispatcher that executes local functions like they're CGIs.\n\n The contents of sys.modules will be preserved for local CGIs running this\n dispatcher, but module hardening will still occur for any new imports. Thus,\n be sure that any local CGIs have loaded all of their dependent modules\n _before_ they are executed.\n "
def __init__(self, config, module_dict, path_adjuster, cgi_func):
'Initializer.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n module_dict: Passed to CGIDispatcher.\n path_adjuster: Passed to CGIDispatcher.\n cgi_func: Callable function taking no parameters that should be\n executed in a CGI environment in the current process.\n '
self._cgi_func = cgi_func
def curried_exec_script(*args, **kwargs):
()
return False
def curried_exec_cgi(*args, **kwargs):
kwargs['exec_script'] = curried_exec_script
return (*args)
(self, config, module_dict, '', path_adjuster)
def Dispatch(self, *args, **kwargs):
'Preserves sys.modules for CGIDispatcher.Dispatch.'
(sys.modules)
(self, *args)
def __str__(self):
'Returns a string representation of this dispatcher.'
return ('Local CGI dispatcher for %s' % self._cgi_func)
class PathAdjuster(object):
'Adjusts application file paths to paths relative to the application or\n external library directories.'
def __init__(self, root_path):
'Initializer.\n\n Args:\n root_path: Path to the root of the application running on the server.\n '
self._root_path = (root_path)
def AdjustPath(self, path):
'Adjusts application file paths to relative to the application.\n\n More precisely this method adjusts application file path to paths\n relative to the application or external library directories.\n\n Handler paths that start with $PYTHON_LIB will be converted to paths\n relative to the google directory.\n\n Args:\n path: File path that should be adjusted.\n\n Returns:\n The adjusted path.\n '
if (PYTHON_LIB_VAR):
path = (SDK_ROOT, path[((PYTHON_LIB_VAR) + 1):])
else:
path = (self._root_path, path)
return path
class StaticFileConfigMatcher(object):
"Keeps track of file/directory specific application configuration.\n\n Specifically:\n - Computes mime type based on URLMap and file extension.\n - Decides on cache expiration time based on URLMap and default expiration.\n - Decides what HTTP headers to add to responses.\n\n To determine the mime type, we first see if there is any mime-type property\n on each URLMap entry. If non is specified, we use the mimetypes module to\n guess the mime type from the file path extension, and use\n application/octet-stream if we can't find the mimetype.\n "
def __init__(self, url_map_list, default_expiration):
'Initializer.\n\n Args:\n url_map_list: List of appinfo.URLMap objects.\n If empty or None, then we always use the mime type chosen by the\n mimetypes module.\n default_expiration: String describing default expiration time for browser\n based caching of static files. If set to None this disallows any\n browser caching of static content.\n '
if (default_expiration is not None):
self._default_expiration = (default_expiration)
else:
self._default_expiration = None
self._patterns = []
for url_map in (url_map_list or []):
handler_type = ()
if (handler_type not in (appinfo.STATIC_FILES, appinfo.STATIC_DIR)):
continue
path_re = (url_map)
try:
(((path_re), url_map))
except re.error as e:
raise (('regex %s does not compile: %s' % (path_re, e)))
_DUMMY_URLMAP = ()
def _FirstMatch(self, path):
"Returns the first appinfo.URLMap that matches path, or a dummy instance.\n\n A dummy instance is returned when no appinfo.URLMap matches path (see the\n URLMap.static_file_path_re property). When a dummy instance is returned, it\n is always the same one. The dummy instance is constructed simply by doing\n the following:\n\n appinfo.URLMap()\n\n Args:\n path: A string containing the file's path relative to the app.\n\n Returns:\n The first appinfo.URLMap (in the list that was passed to the constructor)\n that matches path. Matching depends on whether URLMap is a static_dir\n handler or a static_files handler. In either case, matching is done\n according to the URLMap.static_file_path_re property.\n "
for (path_re, url_map) in self._patterns:
if (path):
return url_map
return StaticFileConfigMatcher._DUMMY_URLMAP
def IsStaticFile(self, path):
'Tests if the given path points to a "static" file.\n\n Args:\n path: A string containing the file\'s path relative to the app.\n\n Returns:\n Boolean, True if the file was configured to be static.\n '
return ((path) is not self._DUMMY_URLMAP)
def GetMimeType(self, path):
"Returns the mime type that we should use when serving the specified file.\n\n Args:\n path: A string containing the file's path relative to the app.\n\n Returns:\n String containing the mime type to use. Will be 'application/octet-stream'\n if we have no idea what it should be.\n "
url_map = (path)
if (url_map.mime_type is not None):
return url_map.mime_type
(unused_filename, extension) = (path)
return (extension, 'application/octet-stream')
def GetExpiration(self, path):
"Returns the cache expiration duration to be users for the given file.\n\n Args:\n path: A string containing the file's path relative to the app.\n\n Returns:\n Integer number of seconds to be used for browser cache expiration time.\n "
if (self._default_expiration is None):
return 0
url_map = (path)
if (url_map.expiration is None):
return self._default_expiration
return (url_map.expiration)
def GetHttpHeaders(self, path):
"Returns http_headers of the matching appinfo.URLMap, or an empty one.\n\n Args:\n path: A string containing the file's path relative to the app.\n\n Returns:\n A user-specified HTTP headers to be used in static content response. These\n headers are contained in an appinfo.HttpHeadersDict, which maps header\n names to values (both strings).\n "
return ((path).http_headers or ())
def ReadDataFile(data_path, openfile=file):
'Reads a file on disk, returning a corresponding HTTP status and data.\n\n Args:\n data_path: Path to the file on disk to read.\n openfile: Used for dependency injection.\n\n Returns:\n Tuple (status, data) where status is an HTTP response code, and data is\n the data read; will be an empty string if an error occurred or the\n file was empty.\n '
status = http.client.INTERNAL_SERVER_ERROR
data = ''
try:
data_file = (data_path, 'rb')
try:
data = ()
finally:
()
status = http.client.OK
except (OSError, IOError) as e:
('Error encountered reading file "%s":\n%s', data_path, e)
if (e.errno in FILE_MISSING_EXCEPTIONS):
status = http.client.NOT_FOUND
else:
status = http.client.FORBIDDEN
return (status, data)
class FileDispatcher(URLDispatcher):
'Dispatcher that reads data files from disk.'
def __init__(self, config, path_adjuster, static_file_config_matcher, read_data_file=ReadDataFile):
'Initializer.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n path_adjuster: Instance of PathAdjuster to use for finding absolute\n paths of data files on disk.\n static_file_config_matcher: StaticFileConfigMatcher object.\n read_data_file: Used for dependency injection.\n '
self._config = config
self._path_adjuster = path_adjuster
self._static_file_config_matcher = static_file_config_matcher
self._read_data_file = read_data_file
def Dispatch(self, request, outfile, base_env_dict=None):
'Reads the file and returns the response status and data.'
full_path = (request.path)
(status, data) = (full_path)
content_type = (request.path)
static_file = (request.path)
expiration = (request.path)
current_etag = (data)
if_match_etag = ('if-match', None)
if_none_match_etag = (',')
http_headers = (request.path)
def WriteHeader(name, value):
if ((name) is None):
(('%s: %s\r\n' % (name, value)))
if (if_match_etag and (not ((','), current_etag, False))):
(('Status: %s\r\n' % http.client.PRECONDITION_FAILED))
('ETag', current_etag)
('\r\n')
elif (if_none_match_etag, current_etag, True):
(('Status: %s\r\n' % http.client.NOT_MODIFIED))
('ETag', current_etag)
('\r\n')
else:
(('Status: %d\r\n' % status))
('Content-Type', content_type)
if expiration:
fmt = email.Utils.formatdate
('Expires', ((() + expiration)))
('Cache-Control', ('public, max-age=%i' % expiration))
if static_file:
('ETag', ('"%s"' % current_etag))
for header in ():
(('%s: %s\r\n' % header))
('\r\n')
(data)
def __str__(self):
'Returns a string representation of this dispatcher.'
return 'File dispatcher'
@staticmethod
def CreateEtag(data):
'Returns string of hash of file content, unique per URL.'
data_crc = (data)
return ((data_crc))
@staticmethod
def _CheckETagMatches(supplied_etags, current_etag, allow_weak_match):
'Checks if there is an entity tag match.\n\n Args:\n supplied_etags: list of input etags\n current_etag: the calculated etag for the entity\n allow_weak_match: Allow for weak tag comparison.\n\n Returns:\n True if there is a match, False otherwise.\n '
for tag in supplied_etags:
if (allow_weak_match and ('W/')):
tag = tag[2:]
tag_data = ('"')
if ((tag_data == '*') or (tag_data == current_etag)):
return True
return False
_IGNORE_RESPONSE_HEADERS = (['connection', 'content-encoding', 'date', 'keep-alive', 'proxy-authenticate', 'server', 'trailer', 'transfer-encoding', 'upgrade', blobstore.BLOB_KEY_HEADER])
class AppServerResponse(object):
"Development appserver response object.\n\n Object used to hold the full appserver response. Used as a container\n that is passed through the request rewrite chain and ultimately sent\n to the web client.\n\n Attributes:\n status_code: Integer HTTP response status (e.g., 200, 302, 404, 500)\n status_message: String containing an informational message about the\n response code, possibly derived from the 'status' header, if supplied.\n headers: mimetools.Message containing the HTTP headers of the response.\n body: File-like object containing the body of the response.\n large_response: Indicates that response is permitted to be larger than\n MAX_RUNTIME_RESPONSE_SIZE.\n "
__slots__ = ['status_code', 'status_message', 'headers', 'body', 'large_response']
def __init__(self, response_file=None, **kwds):
'Initializer.\n\n Args:\n response_file: A file-like object that contains the full response\n generated by the user application request handler. If present\n the headers and body are set from this value, although the values\n may be further overridden by the keyword parameters.\n kwds: All keywords are mapped to attributes of AppServerResponse.\n '
self.status_code = 200
self.status_message = 'Good to go'
self.large_response = False
if response_file:
(response_file)
else:
self.headers = (())
self.body = None
for (name, value) in ():
(self, name, value)
def SetResponse(self, response_file):
'Sets headers and body from the response file.\n\n Args:\n response_file: File like object to set body and headers from.\n '
self.headers = (response_file)
self.body = response_file
@property
def header_data(self):
'Get header data as a string.\n\n Returns:\n String representation of header with line breaks cleaned up.\n '
header_list = []
for header in self.headers.headers:
header = ('\n\r')
(header)
if (not ('Content-Type')):
('Content-Type: text/html')
return ((header_list) + '\r\n')
def IgnoreHeadersRewriter(response):
'Ignore specific response headers.\n\n Certain response headers cannot be modified by an Application. For a\n complete list of these headers please see:\n\n https://developers.google.com/appengine/docs/python/tools/webapp/responseclass#Disallowed_HTTP_Response_Headers\n\n This rewriter simply removes those headers.\n '
for h in _IGNORE_RESPONSE_HEADERS:
if (h in response.headers):
del response.headers[h]
def ValidHeadersRewriter(response):
'Remove invalid response headers.\n\n Response headers must be printable ascii characters. This is enforced in\n production by http_proto.cc IsValidHeader.\n\n This rewriter will remove headers that contain non ascii characters.\n '
for (key, value) in (()):
try:
('ascii')
('ascii')
except UnicodeDecodeError:
del response.headers[key]
def ParseStatusRewriter(response):
"Parse status header, if it exists.\n\n Handles the server-side 'status' header, which instructs the server to change\n the HTTP response code accordingly. Handles the 'location' header, which\n issues an HTTP 302 redirect to the client. Also corrects the 'content-length'\n header to reflect actual content length in case extra information has been\n appended to the response body.\n\n If the 'status' header supplied by the client is invalid, this method will\n set the response to a 500 with an error message as content.\n "
location_value = ('location')
status_value = ('status')
if status_value:
response_status = status_value
del response.headers['status']
elif location_value:
response_status = ('%d Redirecting' % http.client.FOUND)
else:
return response
status_parts = (' ', 1)
(response.status_code, response.status_message) = (status_parts + [''])[:2]
try:
response.status_code = (response.status_code)
except ValueError:
response.status_code = 500
response.body = ('Error: Invalid "status" header value returned.')
def GetAllHeaders(message, name):
'Get all headers of a given name in a message.\n\n Args:\n message: A mimetools.Message object.\n name: The name of the header.\n\n Yields:\n A sequence of values of all headers with the given name.\n '
for header_line in (name):
(yield ())
def CacheRewriter(response):
'Update the cache header.'
if (response.status_code == http.client.NOT_MODIFIED):
return
if (not ('Cache-Control' in response.headers)):
response.headers['Cache-Control'] = 'no-cache'
if (not ('Expires' in response.headers)):
response.headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
if ('Set-Cookie' in response.headers):
current_date = ()
expires = ('Expires')
reset_expires = True
if expires:
expires_time = (expires)
if expires_time:
reset_expires = ((expires_time) >= current_date)
if reset_expires:
response.headers['Expires'] = ('%a, %d %b %Y %H:%M:%S GMT', (current_date))
cache_directives = []
for header in (response.headers, 'Cache-Control'):
((() for v in (',')))
cache_directives = [d for d in cache_directives if (d != 'public')]
if (not (cache_directives)):
('private')
response.headers['Cache-Control'] = (cache_directives)
def _RemainingDataSize(input_buffer):
'Computes how much data is remaining in the buffer.\n\n It leaves the buffer in its initial state.\n\n Args:\n input_buffer: a file-like object with seek and tell methods.\n\n Returns:\n integer representing how much data is remaining in the buffer.\n '
current_position = ()
(0, 2)
remaining_data_size = (() - current_position)
(current_position)
return remaining_data_size
def ContentLengthRewriter(response, request_headers, env_dict):
'Rewrite the Content-Length header.\n\n Even though Content-Length is not a user modifiable header, App Engine\n sends a correct Content-Length to the user based on the actual response.\n '
if (env_dict and (('REQUEST_METHOD', '') == 'HEAD')):
return
if (response.status_code != http.client.NOT_MODIFIED):
response.headers['Content-Length'] = ((response.body))
elif ('Content-Length' in response.headers):
del response.headers['Content-Length']
def CreateResponseRewritersChain():
"Create the default response rewriter chain.\n\n A response rewriter is the a function that gets a final chance to change part\n of the dev_appservers response. A rewriter is not like a dispatcher in that\n it is called after every request has been handled by the dispatchers\n regardless of which dispatcher was used.\n\n The order in which rewriters are registered will be the order in which they\n are used to rewrite the response. Modifications from earlier rewriters\n are used as input to later rewriters.\n\n A response rewriter is a function that can rewrite the request in any way.\n Thefunction can returned modified values or the original values it was\n passed.\n\n A rewriter function has the following parameters and return values:\n\n Args:\n status_code: Status code of response from dev_appserver or previous\n rewriter.\n status_message: Text corresponding to status code.\n headers: mimetools.Message instance with parsed headers. NOTE: These\n headers can contain its own 'status' field, but the default\n dev_appserver implementation will remove this. Future rewriters\n should avoid re-introducing the status field and return new codes\n instead.\n body: File object containing the body of the response. This position of\n this file may not be at the start of the file. Any content before the\n files position is considered not to be part of the final body.\n\n Returns:\n An AppServerResponse instance.\n\n Returns:\n List of response rewriters.\n "
rewriters = [ParseStatusRewriter, dev_appserver_blobstore.DownloadRewriter, IgnoreHeadersRewriter, ValidHeadersRewriter, CacheRewriter, ContentLengthRewriter]
return rewriters
def RewriteResponse(response_file, response_rewriters=None, request_headers=None, env_dict=None):
'Allows final rewrite of dev_appserver response.\n\n This function receives the unparsed HTTP response from the application\n or internal handler, parses out the basic structure and feeds that structure\n in to a chain of response rewriters.\n\n It also makes sure the final HTTP headers are properly terminated.\n\n For more about response rewriters, please see documentation for\n CreateResponeRewritersChain.\n\n Args:\n response_file: File-like object containing the full HTTP response including\n the response code, all headers, and the request body.\n response_rewriters: A list of response rewriters. If none is provided it\n will create a new chain using CreateResponseRewritersChain.\n request_headers: Original request headers.\n env_dict: Environment dictionary.\n\n Returns:\n An AppServerResponse instance configured with the rewritten response.\n '
if (response_rewriters is None):
response_rewriters = ()
response = (response_file)
for response_rewriter in response_rewriters:
if (response_rewriter.__code__.co_argcount == 1):
(response)
elif (response_rewriter.__code__.co_argcount == 2):
(response, request_headers)
else:
(response, request_headers, env_dict)
return response
class ModuleManager(object):
'Manages loaded modules in the runtime.\n\n Responsible for monitoring and reporting about file modification times.\n Modules can be loaded from source or precompiled byte-code files. When a\n file has source code, the ModuleManager monitors the modification time of\n the source file even if the module itself is loaded from byte-code.\n '
def __init__(self, modules):
'Initializer.\n\n Args:\n modules: Dictionary containing monitored modules.\n '
self._modules = modules
self._default_modules = ()
self._save_path_hooks = sys.path_hooks[:]
self._modification_times = {}
self._dirty = True
@staticmethod
def GetModuleFile(module, is_file=os.path.isfile):
"Helper method to try to determine modules source file.\n\n Args:\n module: Module object to get file for.\n is_file: Function used to determine if a given path is a file.\n\n Returns:\n Path of the module's corresponding Python source file if it exists, or\n just the module's compiled Python file. If the module has an invalid\n __file__ attribute, None will be returned.\n "
module_file = (module, '__file__', None)
if (module_file is None):
return None
source_file = module_file[:(('py') + 2)]
if (source_file):
return source_file
return module.__file__
def AreModuleFilesModified(self):
'Determines if any monitored files have been modified.\n\n Returns:\n True if one or more files have been modified, False otherwise.\n '
for (name, (mtime, fname)) in ():
if (name not in self._modules):
continue
module = self._modules[name]
try:
if (mtime != (fname)):
self._dirty = True
return True
except OSError as e:
if (e.errno in FILE_MISSING_EXCEPTIONS):
self._dirty = True
return True
raise e
return False
def UpdateModuleFileModificationTimes(self):
'Records the current modification times of all monitored modules.'
if (not self._dirty):
return
()
for (name, module) in (()):
if (not (module, types.ModuleType)):
continue
module_file = (module)
if (not module_file):
continue
try:
self._modification_times[name] = ((module_file), module_file)
except OSError as e:
if (e.errno not in FILE_MISSING_EXCEPTIONS):
raise e
self._dirty = False
def ResetModules(self):
'Clear modules so that when request is run they are reloaded.'
()
()
(self._default_modules)
sys.path_hooks[:] = self._save_path_hooks
sys.meta_path = []
()
()
def GetVersionObject(isfile=os.path.isfile, open_fn=open):
'Gets the version of the SDK by parsing the VERSION file.\n\n Args:\n isfile: used for testing.\n open_fn: Used for testing.\n\n Returns:\n A Yaml object or None if the VERSION file does not exist.\n '
version_filename = ((google.appengine.__file__), VERSION_FILE)
if (not (version_filename)):
('Could not find version file at %s', version_filename)
return None
version_fh = (version_filename, 'r')
try:
version = (version_fh)
finally:
()
return version
def _ClearTemplateCache(module_dict=sys.modules):
'Clear template cache in webapp.template module.\n\n Attempts to load template module. Ignores failure. If module loads, the\n template cache is cleared.\n\n Args:\n module_dict: Used for dependency injection.\n '
template_module = ('google.appengine.ext.webapp.template')
if (template_module is not None):
()
def CreateRequestHandler(root_path, login_url, static_caching=True, default_partition=None, interactive_console=True, secret_hash='xxx'):
"Creates a new BaseHTTPRequestHandler sub-class.\n\n This class will be used with the Python BaseHTTPServer module's HTTP server.\n\n Python's built-in HTTP server does not support passing context information\n along to instances of its request handlers. This function gets around that\n by creating a sub-class of the handler in a closure that has access to\n this context information.\n\n Args:\n root_path: Path to the root of the application running on the server.\n login_url: Relative URL which should be used for handling user logins.\n static_caching: True if browser caching of static files should be allowed.\n default_partition: Default partition to use in the application id.\n interactive_console: Whether to add the interactive console.\n\n Returns:\n Sub-class of BaseHTTPRequestHandler.\n "
application_module_dict = (sys.modules)
application_config_cache = ()
class DevAppServerRequestHandler(http.server.BaseHTTPRequestHandler):
"Dispatches URLs using patterns from a URLMatcher.\n\n The URLMatcher is created by loading an application's configuration file.\n Executes CGI scripts in the local process so the scripts can use mock\n versions of APIs.\n\n HTTP requests that correctly specify a user info cookie\n (dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment\n variable set accordingly. If the user is also an admin, the\n 'USER_IS_ADMIN' variable will exist and be set to '1'. If the user is not\n logged in, 'USER_EMAIL' will be set to the empty string.\n\n On each request, raises an InvalidAppConfigError exception if the\n application configuration file in the directory specified by the root_path\n argument is invalid.\n "
server_version = 'AppScaleServer/1.10'
module_dict = application_module_dict
module_manager = (application_module_dict)
config_cache = application_config_cache
rewriter_chain = ()
def __init__(self, *args, **kwargs):
'Initializer.\n\n Args:\n args: Positional arguments passed to the superclass constructor.\n kwargs: Keyword arguments passed to the superclass constructor.\n '
self._log_record_writer = ()
(self, *args)
def version_string(self):
"Returns server's version string used for Server HTTP header."
return self.server_version
def do_GET(self):
'Handle GET requests.'
if ('GET'):
()
def do_POST(self):
'Handles POST requests.'
()
def do_PUT(self):
'Handle PUT requests.'
()
def do_HEAD(self):
'Handle HEAD requests.'
if ('HEAD'):
()
def do_OPTIONS(self):
'Handles OPTIONS requests.'
()
def do_DELETE(self):
'Handle DELETE requests.'
()
def do_TRACE(self):
'Handles TRACE requests.'
if ('TRACE'):
()
def _HasNoBody(self, method):
'Check for request body in HTTP methods where no body is permitted.\n\n If a request body is present a 400 (Invalid request) response is sent.\n\n Args:\n method: The request method.\n\n Returns:\n True if no request body is present, False otherwise.\n '
content_length = (('content-length', 0))
if content_length:
body = (content_length)
('Request body in %s is not permitted: %s', method, body)
(http.client.BAD_REQUEST)
return False
return True
def _Dispatch(self, dispatcher, socket_infile, outfile, env_dict):
'Copy request data from socket and dispatch.\n\n Args:\n dispatcher: Dispatcher to handle request (MatcherDispatcher).\n socket_infile: Original request file stream.\n outfile: Output file to write response to.\n env_dict: Environment dictionary.\n '
(request_descriptor, request_file_name) = ('.tmp', 'request.')
try:
request_file = (request_file_name, 'wb')
try:
(self.rfile, request_file, (('content-length', 0)))
finally:
()
request_file = (request_file_name, 'rb')
try:
app_server_request = (self.path, None, self.headers, request_file, secret_hash)
(app_server_request, outfile)
finally:
()
finally:
try:
(request_descriptor)
try:
(request_file_name)
except OSError as err:
if ((err, 'winerror', 0) == os_compat.ERROR_SHARING_VIOLATION):
('Failed removing %s', request_file_name)
else:
raise
except OSError as err:
if (err.errno != errno.ENOENT):
raise
def _HandleRequest(self):
'Handles any type of request and prints exceptions if they occur.'
host_name = (('host') or self.server.server_name)
server_name = (':', 1)[0]
env_dict = {'REQUEST_METHOD': self.command, 'REMOTE_ADDR': ('X-Real-IP', self.client_address[0]), 'SERVER_SOFTWARE': self.server_version, 'SERVER_NAME': server_name, 'SERVER_PROTOCOL': self.protocol_version, 'SERVER_PORT': (self.server.server_port)}
full_url = (server_name, self.server.server_port, self.path)
if ((full_url) > MAX_URL_LENGTH):
msg = ('Requested URI too long: %s' % full_url)
(msg)
(http.client.REQUEST_URI_TOO_LONG, msg)
return
tbhandler = ().handle
try:
(config, explicit_matcher, from_cache) = (root_path, self.module_dict)
if (not from_cache):
()
implicit_matcher = (config, self.module_dict, root_path, login_url)
if ('/_ah/admin'):
if (((handler.url == '/_ah/datastore_admin.*') for handler in config.handlers)):
self.headers['X-AppEngine-Datastore-Admin-Enabled'] = 'True'
self.headers['X-AppEngine-Interactive-Console-Enabled'] = (interactive_console)
if (config.api_version != API_VERSION):
('API versions cannot be switched dynamically: %r != %r', config.api_version, API_VERSION)
(1)
(exclude, service_match) = (self.path)
if exclude:
(('Request to %s excluded because %s is not enabled in inbound_services in app.yaml' % (self.path, service_match)))
(http.client.NOT_FOUND)
return
if (config.runtime == 'go'):
from google.appengine.ext import go
go.APP_CONFIG = config
version = ()
env_dict['SDK_VERSION'] = version['release']
env_dict['CURRENT_VERSION_ID'] = (config.version + '.1')
env_dict['APPLICATION_ID'] = config.application
env_dict['DEFAULT_VERSION_HOSTNAME'] = self.server.frontend_hostport
env_dict['APPENGINE_RUNTIME'] = config.runtime
if ((config.runtime == 'python27') and config.threadsafe):
env_dict['_AH_THREADSAFE'] = '1'
global _request_time
global _request_id
_request_time = ()
_request_id += 1
request_id_hash = ()
env_dict['REQUEST_ID_HASH'] = request_id_hash
os.environ['REQUEST_ID_HASH'] = request_id_hash
cookies = (('cookie'))
(email_addr, user_id, admin, valid_cookie) = (cookies)
()
dispatcher = (config, login_url, self.module_manager, [implicit_matcher, explicit_matcher])
outfile = ()
try:
(dispatcher, self.rfile, outfile, env_dict)
finally:
()
()
(0)
response = (outfile, self.rewriter_chain, self.headers, env_dict)
runtime_response_size = (response.body)
if ((self.command == 'HEAD') and (runtime_response_size > 0)):
('Dropping unexpected body in response to HEAD request')
response.body = ('')
elif ((not response.large_response) and (runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE)):
('Response too large: %d, max is %d', runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE)
response.status_code = 500
response.status_message = 'Forbidden'
new_response = ('HTTP response was too large: %d. The limit is: %d.' % (runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE))
response.headers['Content-Length'] = ((new_response))
response.body = (new_response)
except yaml_errors.EventListenerError as e:
title = 'Fatal error when loading application configuration'
msg = ('%s:\n%s' % (title, (e)))
(msg)
(http.client.INTERNAL_SERVER_ERROR, title)
('Content-Type: text/html\r\n\r\n')
(('<pre>%s</pre>' % (msg)))
except KeyboardInterrupt as e:
('Server interrupted by user, terminating')
()
except CompileError as e:
msg = (('Compile error:\n' + e.text) + '\n')
(msg)
(http.client.INTERNAL_SERVER_ERROR, 'Compile error')
('Content-Type: text/plain; charset=utf-8\r\n\r\n')
(msg)
except ExecuteError as e:
(e.text)
(http.client.INTERNAL_SERVER_ERROR, 'Execute error')
('Content-Type: text/html; charset=utf-8\r\n\r\n')
('<title>App failure</title>\n')
((e.text + '\n<pre>\n'))
for l in e.log:
((l))
('</pre>\n')
except:
msg = 'Exception encountered handling request'
(msg)
(http.client.INTERNAL_SERVER_ERROR, msg)
()
else:
try:
(response.status_code, response.status_message)
(response.header_data)
('\r\n')
(response.body, self.wfile, COPY_BLOCK_SIZE)
except (IOError, OSError) as e:
if (e.errno not in [errno.EPIPE, os_compat.WSAECONNABORTED]):
raise e
except socket.error as e:
(('Socket exception: %s' % (e)))
()
def log_error(self, format, *args):
'Redirect error messages through the logging module.'
(format, *args)
def log_message(self, format, *args):
'Redirect log messages through the logging module.'
if (self, 'path'):
(format, *args)
else:
(format, *args)
def log_request(self, code='-', size='-'):
'Indicate that this request has completed.'
(self, code, size)
if (code == '-'):
code = 0
if (size == '-'):
size = 0
()
(self.command, self.path, code, size, self.request_version)
return DevAppServerRequestHandler
def ReadAppConfig(appinfo_path, parse_app_config=appinfo_includes.Parse):
'Reads app.yaml file and returns its app id and list of URLMap instances.\n\n Args:\n appinfo_path: String containing the path to the app.yaml file.\n parse_app_config: Used for dependency injection.\n\n Returns:\n AppInfoExternal instance.\n\n Raises:\n If the config file could not be read or the config does not contain any\n URLMap instances, this function will raise an InvalidAppConfigError\n exception.\n '
try:
appinfo_file = (appinfo_path, 'r')
except IOError as unused_e:
raise (('Application configuration could not be read from "%s"' % appinfo_path))
try:
return (appinfo_file)
finally:
()
def _StaticFilePathRe(url_map):
"Returns a regular expression string that matches static file paths.\n\n Args:\n url_map: A fully initialized static_files or static_dir appinfo.URLMap\n instance.\n\n Returns:\n The regular expression matches paths, relative to the application's root\n directory, of files that this static handler serves. re.compile should\n accept the returned string.\n\n Raises:\n AssertionError: The url_map argument was not an URLMap for a static handler.\n "
handler_type = ()
if (handler_type == 'static_files'):
return (url_map.upload + '$')
elif (handler_type == 'static_dir'):
path = (os.path.sep)
return ((path + (os.path.sep)) + '(.*)')
if (not False):
raise ('This property only applies to static handlers.')
def CreateURLMatcherFromMaps(config, root_path, url_map_list, module_dict, default_expiration, create_url_matcher=URLMatcher, create_cgi_dispatcher=CGIDispatcher, create_file_dispatcher=FileDispatcher, create_path_adjuster=PathAdjuster, normpath=os.path.normpath):
'Creates a URLMatcher instance from URLMap.\n\n Creates all of the correct URLDispatcher instances to handle the various\n content types in the application configuration.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n root_path: Path to the root of the application running on the server.\n url_map_list: List of appinfo.URLMap objects to initialize this\n matcher with. Can be an empty list if you would like to add patterns\n manually or use config.handlers as a default.\n module_dict: Dictionary in which application-loaded modules should be\n preserved between requests. This dictionary must be separate from the\n sys.modules dictionary.\n default_expiration: String describing default expiration time for browser\n based caching of static files. If set to None this disallows any\n browser caching of static content.\n create_url_matcher: Used for dependency injection.\n create_cgi_dispatcher: Used for dependency injection.\n create_file_dispatcher: Used for dependency injection.\n create_path_adjuster: Used for dependency injection.\n normpath: Used for dependency injection.\n\n Returns:\n Instance of URLMatcher with the supplied URLMap objects properly loaded.\n\n Raises:\n InvalidAppConfigError: if a handler is an unknown type.\n '
if (config and config.handlers and (not url_map_list)):
url_map_list = config.handlers
url_matcher = ()
path_adjuster = (root_path)
cgi_dispatcher = (config, module_dict, root_path, path_adjuster)
static_file_config_matcher = (url_map_list, default_expiration)
file_dispatcher = (config, path_adjuster, static_file_config_matcher)
(static_file_config_matcher)
for url_map in url_map_list:
admin_only = (url_map.login == appinfo.LOGIN_ADMIN)
requires_login = ((url_map.login == appinfo.LOGIN_REQUIRED) or admin_only)
auth_fail_action = url_map.auth_fail_action
handler_type = ()
if (handler_type == appinfo.HANDLER_SCRIPT):
dispatcher = cgi_dispatcher
elif (handler_type in (appinfo.STATIC_FILES, appinfo.STATIC_DIR)):
dispatcher = file_dispatcher
else:
raise (('Unknown handler type "%s"' % handler_type))
regex = url_map.url
path = ()
if (handler_type == appinfo.STATIC_DIR):
if (regex[(- 1)] == '/'):
regex = regex[:(- 1)]
if (path[(- 1)] == os.path.sep):
path = path[:(- 1)]
regex = (((regex), '(.*)'))
if (os.path.sep == '\\'):
backref = '\\\\1'
else:
backref = '\\1'
path = ((('\\', '\\\\') + os.path.sep) + backref)
(regex, dispatcher, path, requires_login, admin_only, auth_fail_action)
return url_matcher
class AppConfigCache(object):
'Cache used by LoadAppConfig.\n\n If given to LoadAppConfig instances of this class are used to cache contents\n of the app config (app.yaml or app.yml) and the Matcher created from it.\n\n Code outside LoadAppConfig should treat instances of this class as opaque\n objects and not access its members.\n '
path = None
mtime = None
config = None
matcher = None
def LoadAppConfig(root_path, module_dict, cache=None, static_caching=True, read_app_config=ReadAppConfig, create_matcher=CreateURLMatcherFromMaps, default_partition=None):
'Creates a Matcher instance for an application configuration file.\n\n Raises an InvalidAppConfigError exception if there is anything wrong with\n the application configuration file.\n\n Args:\n root_path: Path to the root of the application to load.\n module_dict: Dictionary in which application-loaded modules should be\n preserved between requests. This dictionary must be separate from the\n sys.modules dictionary.\n cache: Instance of AppConfigCache or None.\n static_caching: True if browser caching of static files should be allowed.\n read_app_config: Used for dependency injection.\n create_matcher: Used for dependency injection.\n default_partition: Default partition to use for the appid.\n\n Returns:\n tuple: (AppInfoExternal, URLMatcher, from_cache)\n\n Raises:\n AppConfigNotFound: if an app.yaml file cannot be found.\n '
for appinfo_path in [(root_path, 'app.yaml'), (root_path, 'app.yml')]:
if (appinfo_path):
if (cache is not None):
mtime = (appinfo_path)
if ((cache.path == appinfo_path) and (cache.mtime == mtime)):
return (cache.config, cache.matcher, True)
cache.config = cache.matcher = cache.path = None
cache.mtime = mtime
config = (appinfo_path, appinfo_includes.Parse)
if static_caching:
if config.default_expiration:
default_expiration = config.default_expiration
else:
default_expiration = '0'
else:
default_expiration = None
matcher = (config, root_path, config.handlers, module_dict, default_expiration)
(config.skip_files)
if (cache is not None):
cache.path = appinfo_path
cache.config = config
cache.matcher = matcher
return (config, matcher, False)
raise (('Could not find app.yaml in "%s".' % (root_path,)))
class ReservedPathFilter():
'Checks a path against a set of inbound_services.'
reserved_paths = {'/_ah/channel/connect': 'channel_presence', '/_ah/channel/disconnect': 'channel_presence'}
def __init__(self, inbound_services):
self.inbound_services = inbound_services
def ExcludePath(self, path):
'Check to see if this is a service url and matches inbound_services.'
skip = False
for reserved_path in (()):
if (reserved_path):
if ((not self.inbound_services) or (self.reserved_paths[reserved_path] not in self.inbound_services)):
return (True, self.reserved_paths[reserved_path])
return (False, None)
def CreateInboundServiceFilter(inbound_services):
return (inbound_services)
def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron):
'Reads cron.yaml file and returns a list of CronEntry instances.\n\n Args:\n croninfo_path: String containing the path to the cron.yaml file.\n parse_cron_config: Used for dependency injection.\n\n Returns:\n A CronInfoExternal object.\n\n Raises:\n If the config file is unreadable, empty or invalid, this function will\n raise an InvalidAppConfigError or a MalformedCronConfiguration exception.\n '
try:
croninfo_file = (croninfo_path, 'r')
except IOError as e:
raise (('Cron configuration could not be read from "%s": %s' % (croninfo_path, e)))
try:
return (croninfo_file)
finally:
()
def _RemoveFile(file_path):
if (file_path and (file_path)):
('Attempting to remove file at %s', file_path)
try:
(file_path)
except OSError as e:
('Removing file failed: %s', e)
def SetupStubs(app_id, **config):
'Sets up testing stubs of APIs.\n\n Args:\n app_id: Application ID being served.\n config: keyword arguments.\n\n Keywords:\n root_path: Root path to the directory of the application which should\n contain the app.yaml, index.yaml, and queue.yaml files.\n login_url: Relative URL which should be used for handling user login/logout.\n datastore_path: Path to the file to store Datastore file stub data in.\n prospective_search_path: Path to the file to store Prospective Search stub\n data in.\n use_sqlite: Use the SQLite stub for the datastore.\n high_replication: Use the high replication consistency model\n history_path: DEPRECATED, No-op.\n clear_datastore: If the datastore should be cleared on startup.\n smtp_host: SMTP host used for sending test mail.\n smtp_port: SMTP port.\n smtp_user: SMTP user.\n smtp_password: SMTP password.\n mysql_host: MySQL host.\n mysql_port: MySQL port.\n mysql_user: MySQL user.\n mysql_password: MySQL password.\n mysql_socket: MySQL socket.\n enable_sendmail: Whether to use sendmail as an alternative to SMTP.\n show_mail_body: Whether to log the body of emails.\n remove: Used for dependency injection.\n disable_task_running: True if tasks should not automatically run after\n they are enqueued.\n task_retry_seconds: How long to wait after an auto-running task before it\n is tried again.\n trusted: True if this app can access data belonging to other apps. This\n behavior is different from the real app server and should be left False\n except for advanced uses of dev_appserver.\n port: The port that this dev_appserver is bound to. Defaults to 8080\n address: The host that this dev_appsever is running on. Defaults to\n localhost.\n search_index_path: Path to the file to store search indexes in.\n clear_search_index: If the search indeces should be cleared on startup.\n '
root_path = ('root_path', None)
login_url = config['login_url']
datastore_path = config['datastore_path']
clear_datastore = config['clear_datastore']
prospective_search_path = ('prospective_search_path', '')
clear_prospective_search = ('clear_prospective_search', False)
use_sqlite = ('use_sqlite', False)
high_replication = ('high_replication', False)
require_indexes = ('require_indexes', False)
mysql_host = ('mysql_host', None)
mysql_port = ('mysql_port', 3306)
mysql_user = ('mysql_user', None)
mysql_password = ('mysql_password', None)
mysql_socket = ('mysql_socket', None)
smtp_host = ('smtp_host', None)
smtp_port = ('smtp_port', 25)
smtp_user = ('smtp_user', '')
smtp_password = ('smtp_password', '')
enable_sendmail = ('enable_sendmail', False)
show_mail_body = ('show_mail_body', False)
remove = ('remove', os.remove)
disable_task_running = ('disable_task_running', False)
task_retry_seconds = ('task_retry_seconds', 30)
logs_path = ('logs_path', ':memory:')
trusted = ('trusted', False)
clear_search_index = ('clear_search_indexes', False)
search_index_path = ('search_indexes_path', None)
_use_atexit_for_datastore_stub = ('_use_atexit_for_datastore_stub', False)
port_sqlite_data = ('port_sqlite_data', False)
serve_port = (('NGINX_PORT', 8080))
serve_address = ('NGINX_HOST', 'localhost')
xmpp_path = config['xmpp_path']
uaserver_path = config['uaserver_path']
login_server = config['login_server']
cookie_secret = config['COOKIE_SECRET']
os.environ['APPLICATION_ID'] = app_id
os.environ['REQUEST_ID_HASH'] = ''
if (clear_prospective_search and prospective_search_path):
(prospective_search_path)
if clear_datastore:
(datastore_path)
if clear_search_index:
(search_index_path)
apiproxy_stub_map.apiproxy = ()
('app_identity_service', ())
('capability_service', ())
datastore = (app_id, datastore_path)
('datastore_v3', datastore)
('mail', (smtp_host, smtp_port, smtp_user, smtp_password))
('memcache', ())
hash_secret = ()
('taskqueue', (app_id, serve_address, serve_port))
('urlfetch', ())
('xmpp', ())
from google.appengine import api
sys.modules['google.appengine.api.rdbms'] = rdbms_mysqldb
api.rdbms = rdbms_mysqldb
()
fixed_login_url = ('%s?%s=%%s' % (login_url, dev_appserver_login.CONTINUE_PARAM))
fixed_logout_url = ('https://%s:%s/logout?%s=%%s' % (login_server, DASHBOARD_HTTPS_PORT, dev_appserver_login.CONTINUE_PARAM))
('user', ())
('channel', ())
('matcher', (prospective_search_path, ('taskqueue')))
('remote_socket', ())
('search', ())
try:
from google.appengine.api.images import images_stub
host_prefix = ('http://%s:%d' % (serve_address, serve_port))
('images', ())
except ImportError as e:
('Could not initialize images API; you are likely missing the Python "PIL" module. ImportError: %s', e)
from google.appengine.api.images import images_not_implemented_stub
('images', ())
blob_storage = (app_id)
('blobstore', (blob_storage))
('file', (blob_storage))
('logservice', (True))
system_service_stub = ()
('system', system_service_stub)
def TearDownStubs():
'Clean up any stubs that need cleanup.'
pass
def CreateImplicitMatcher(config, module_dict, root_path, login_url, create_path_adjuster=PathAdjuster, create_local_dispatcher=LocalCGIDispatcher, create_cgi_dispatcher=CGIDispatcher, get_blob_storage=dev_appserver_blobstore.GetBlobStorage):
'Creates a URLMatcher instance that handles internal URLs.\n\n Used to facilitate handling user login/logout, debugging, info about the\n currently running app, quitting the dev appserver, etc.\n\n Args:\n config: AppInfoExternal instance representing the parsed app.yaml file.\n module_dict: Dictionary in the form used by sys.modules.\n root_path: Path to the root of the application.\n login_url: Relative URL which should be used for handling user login/logout.\n create_path_adjuster: Used for dependedency injection.\n create_local_dispatcher: Used for dependency injection.\n create_cgi_dispatcher: Used for dependedency injection.\n get_blob_storage: Used for dependency injection.\n\n Returns:\n Instance of URLMatcher with appropriate dispatchers.\n '
url_matcher = ()
path_adjuster = (root_path)
def _StatusChecker():
'A path for the application manager to check if this application\n server is up.\n '
pass
status_dispatcher = (config, sys.modules, path_adjuster, _StatusChecker)
('/_ah/health_check', status_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_REDIRECT)
def _HandleQuit():
raise KeyboardInterrupt
quit_dispatcher = (config, sys.modules, path_adjuster, _HandleQuit)
('/_ah/quit?', quit_dispatcher, '', True, True, appinfo.AUTH_FAIL_ACTION_REDIRECT)
login_dispatcher = (config, sys.modules, path_adjuster, dev_appserver_login.main)
(login_url, login_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_REDIRECT)
admin_dispatcher = (config, module_dict, root_path, path_adjuster)
('/_ah/admin(?:/.*)?', admin_dispatcher, DEVEL_CONSOLE_PATH, True, True, appinfo.AUTH_FAIL_ACTION_REDIRECT)
upload_dispatcher = (get_blob_storage)
(dev_appserver_blobstore.UPLOAD_URL_PATTERN, upload_dispatcher, '', True, True, appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED)
blobimage_dispatcher = (('images'))
(dev_appserver_blobimage.BLOBIMAGE_URL_PATTERN, blobimage_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED)
oauth_dispatcher = ()
(dev_appserver_oauth.OAUTH_URL_PATTERN, oauth_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED)
channel_dispatcher = (('channel'))
(dev_appserver_channel.CHANNEL_JSAPI_PATTERN, channel_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED)
apiserver_dispatcher = ()
(dev_appserver_apiserver.API_SERVING_PATTERN, apiserver_dispatcher, '', False, False, appinfo.AUTH_FAIL_ACTION_UNAUTHORIZED)
return url_matcher
def FetchAllEntitites():
'Returns all datastore entities from all namespaces as a list.'
ns = (())
original_ns = ()
entities_set = []
for namespace in ns:
(())
kinds_list = (())
for kind_entity in kinds_list:
ents = (())
for ent in ents:
(ent)
(original_ns)
return entities_set
def PutAllEntities(entities):
'Puts all entities to the current datastore.'
for entity in entities:
(entity)
def PortAllEntities(datastore_path):
'Copies entities from a DatastoreFileStub to an SQLite stub.\n\n Args:\n datastore_path: Path to the file to store Datastore file stub data is.\n '
previous_stub = ('datastore_v3')
try:
app_id = os.environ['APPLICATION_ID']
apiproxy_stub_map.apiproxy = ()
datastore_stub = (app_id, datastore_path)
('datastore_v3', datastore_stub)
entities = ()
sqlite_datastore_stub = (app_id, (datastore_path + '.sqlite'))
('datastore_v3', sqlite_datastore_stub)
(entities)
()
finally:
('datastore_v3', previous_stub)
(datastore_path, (datastore_path + '.filestub'))
(datastore_path)
((datastore_path + '.sqlite'), datastore_path)
def CreateServer(root_path, login_url, port, template_dir=None, serve_address='', allow_skipped_files=False, static_caching=True, python_path_list=sys.path, sdk_dir=SDK_ROOT, default_partition=None, frontend_port=None, interactive_console=True, secret_hash='xxx'):
"Creates a new HTTPServer for an application.\n\n The sdk_dir argument must be specified for the directory storing all code for\n the SDK so as to allow for the sandboxing of module access to work for any\n and all SDK code. While typically this is where the 'google' package lives,\n it can be in another location because of API version support.\n\n Args:\n root_path: String containing the path to the root directory of the\n application where the app.yaml file is.\n login_url: Relative URL which should be used for handling user login/logout.\n port: Port to start the application server on.\n template_dir: Unused.\n serve_address: Address on which the server should serve.\n allow_skipped_files: True if skipped files should be accessible.\n static_caching: True if browser caching of static files should be allowed.\n python_path_list: Used for dependency injection.\n sdk_dir: Directory where the SDK is stored.\n default_partition: Default partition to use for the appid.\n frontend_port: A frontend port (so backends can return an address for a\n frontend). If None, port will be used.\n interactive_console: Whether to add the interactive console.\n secret_hash: For TaskQueue admin rights.\n Returns:\n Instance of BaseHTTPServer.HTTPServer that's ready to start accepting.\n "
absolute_root_path = (root_path)
(absolute_root_path, [sdk_dir])
(allow_skipped_files)
handler_class = (absolute_root_path, login_url, static_caching, default_partition, interactive_console)
if (absolute_root_path not in python_path_list):
(0, absolute_root_path)
server = ((serve_address, port), handler_class)
queue_stub = ('taskqueue')
if (queue_stub and (queue_stub, 'StartBackgroundExecution')):
()
channel_stub = ('channel')
if channel_stub:
channel_stub._add_event = server.AddEvent
channel_stub._update_event = server.UpdateEvent
server.frontend_hostport = ('%s:%d' % ((serve_address or 'localhost'), (frontend_port or port)))
return server
class HTTPServerWithScheduler(http.server.HTTPServer):
'A BaseHTTPServer subclass that calls a method at a regular interval.'
def __init__(self, server_address, request_handler_class):
'Constructor.\n\n Args:\n server_address: the bind address of the server.\n request_handler_class: class used to handle requests.\n '
(self, server_address, request_handler_class)
self._events = []
self._stopped = False
def handle_request(self):
'Override the base handle_request call.\n\n Python 2.6 changed the semantics of handle_request() with r61289.\n This patches it back to the Python 2.5 version, which has\n helpfully been renamed to _handle_request_noblock.\n '
if (self, '_handle_request_noblock'):
()
else:
(self)
def get_request(self, time_func=time.time, select_func=select.select):
'Overrides the base get_request call.\n\n Args:\n time_func: used for testing.\n select_func: used for testing.\n\n Returns:\n a (socket_object, address info) tuple.\n '
while True:
if self._events:
current_time = ()
next_eta = self._events[0][0]
delay = (next_eta - current_time)
else:
delay = DEFAULT_SELECT_DELAY
(readable, _, _) = ([self.socket], [], [], (delay, 0))
if readable:
return ()
current_time = ()
if (self._events and (current_time >= self._events[0][0])):
runnable = (self._events)[1]
request_tuple = ()
if request_tuple:
return request_tuple
def serve_forever(self):
'Handle one request at a time until told to stop.'
while (not self._stopped):
()
()
def stop_serving_forever(self):
"Stop the serve_forever() loop.\n\n Stop happens on the next handle_request() loop; it will not stop\n immediately. Since dev_appserver.py must run on py2.5 we can't\n use newer features of SocketServer (e.g. shutdown(), added in py2.6).\n "
self._stopped = True
def AddEvent(self, eta, runnable, service=None, event_id=None):
'Add a runnable event to be run at the specified time.\n\n Args:\n eta: when to run the event, in seconds since epoch.\n runnable: a callable object.\n service: the service that owns this event. Should be set if id is set.\n event_id: optional id of the event. Used for UpdateEvent below.\n '
(self._events, (eta, runnable, service, event_id))
def UpdateEvent(self, service, event_id, eta):
'Update a runnable event in the heap with a new eta.\n TODO(moishel): come up with something better than a linear scan to\n update items. For the case this is used for now -- updating events to\n "time out" channels -- this works fine because those events are always\n soon (within seconds) and thus found quickly towards the front of the heap.\n One could easily imagine a scenario where this is always called for events\n that tend to be at the back of the heap, of course...\n\n Args:\n service: the service that owns this event.\n event_id: the id of the event.\n eta: the new eta of the event.\n '
for id in ((self._events)):
item = self._events[id]
if ((item[2] == service) and (item[3] == event_id)):
item = (eta, item[1], item[2], item[3])
del self._events[id]
(self._events, item)
break |
'Tests for google.apphosting.tools.devappserver2.inotify_file_watcher.'
import logging
import os
import os.path
import shutil
import sys
import tempfile
import unittest
from google.appengine.tools.devappserver2 import inotify_file_watcher
@(('linux'), 'requires linux')
class TestInotifyFileWatcher(unittest.TestCase):
'Tests for inotify_file_watcher.InotifyFileWatcher.'
def setUp(self):
self._directory = ()
self._junk_directory = ()
self._watcher = (self._directory)
('watched directory=%r, junk directory=%r', self._directory, self._junk_directory)
def tearDown(self):
()
(self._directory)
(self._junk_directory)
def _create_file(self, relative_path):
realpath = ((self._directory, relative_path))
with (realpath, 'w'):
pass
return realpath
def _create_directory(self, relative_path):
realpath = ((self._directory, relative_path))
(realpath)
return realpath
def _create_directory_tree(self, path, num_directories):
'Create exactly num_directories subdirectories in path.'
if (not (num_directories >= 0)):
raise ()
if (not num_directories):
return
(path)
num_directories -= 1
for i in (4, 0, (- 1)):
sub_dir_size = (num_directories / i)
((path, ('dir%d' % i)), sub_dir_size)
num_directories -= sub_dir_size
def test_file_created(self):
()
path = ('test')
(([path]), ())
def test_file_modified(self):
path = ('test')
()
with (path, 'w') as f:
('testing')
(([path]), ())
def test_file_read(self):
path = ('test')
with (path, 'w') as f:
('testing')
()
with (path, 'r') as f:
()
((), ())
def test_file_deleted(self):
path = ('test')
()
(path)
(([path]), ())
def test_file_renamed(self):
source = ('test')
target = ((source), 'test2')
()
(source, target)
(([source, target]), ())
def test_create_directory(self):
()
directory = ('test')
(([directory]), ())
def test_file_created_in_directory(self):
directory = ('test')
()
path = ('test/file')
(([path]), ())
def test_move_directory(self):
source = ('test')
target = ((source), 'test2')
()
(source, target)
(([source, target]), ())
def test_move_directory_out_of_watched(self):
source = ('test')
target = (self._junk_directory, 'test')
()
(source, target)
(([source]), ())
with ((target, 'file'), 'w'):
pass
(([]), ())
def test_move_directory_into_watched(self):
source = (self._junk_directory, 'source')
target = (self._directory, 'target')
(source)
()
(source, target)
(([target]), ())
file_path = (target, 'file')
with (file_path, 'w+'):
pass
(([file_path]), ())
def test_directory_deleted(self):
path = ('test')
()
(path)
(([path]), ())
def test_subdirectory_deleted(self):
'Tests that internal _directory_to_subdirs is updated on delete.'
path = ('test')
sub_path = ('test/test2')
()
(([sub_path]), self._watcher._directory_to_subdirs[path])
(sub_path)
(([sub_path]), ())
((), self._watcher._directory_to_subdirs[path])
(path)
(([path]), ())
def test_symlink(self):
sym_target = (self._directory, 'test')
((self._junk_directory, 'subdir'))
()
(self._junk_directory, sym_target)
(([sym_target]), ())
with ((self._junk_directory, 'file1'), 'w'):
pass
(([(self._directory, 'test', 'file1')]), ())
(sym_target)
(([sym_target]), ())
with ((self._junk_directory, 'subdir', 'file2'), 'w'):
pass
((), ())
def test_many_directories(self):
('bigdir')
()
path = ('bigdir/dir4/dir4/file')
(([path]), ())
if (__name__ == '__main__'):
() |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.appengine.tools.devappserver2.shutdown."""
import os
import signal
import time
import unittest
import google
import mox
from google.appengine.tools.devappserver2 import shutdown
class ShutdownTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.mox.StubOutWithMock(os, "abort")
shutdown._shutting_down = False
shutdown._num_terminate_requests = 0
self._sigint_handler = signal.getsignal(signal.SIGINT)
self._sigterm_handler = signal.getsignal(signal.SIGTERM)
def tearDown(self):
self.mox.UnsetStubs()
signal.signal(signal.SIGINT, self._sigint_handler)
signal.signal(signal.SIGTERM, self._sigterm_handler)
def test_async_quit(self):
self.mox.ReplayAll()
shutdown.async_quit()
self.assertTrue(shutdown._shutting_down)
self.mox.VerifyAll()
def test_async_terminate(self):
self.mox.ReplayAll()
shutdown._async_terminate()
self.assertTrue(shutdown._shutting_down)
shutdown._async_terminate()
self.mox.VerifyAll()
def test_async_terminate_abort(self):
os.abort()
self.mox.ReplayAll()
shutdown._async_terminate()
self.assertTrue(shutdown._shutting_down)
shutdown._async_terminate()
shutdown._async_terminate()
self.mox.VerifyAll()
def test_install_signal_handlers(self):
shutdown.install_signal_handlers()
self.assertEqual(shutdown._async_terminate, signal.getsignal(signal.SIGINT))
self.assertEqual(shutdown._async_terminate, signal.getsignal(signal.SIGTERM))
def test_wait_until_shutdown(self):
self.mox.StubOutWithMock(time, "sleep")
time.sleep(1).WithSideEffects(lambda _: shutdown.async_quit())
self.mox.ReplayAll()
shutdown.wait_until_shutdown()
self.mox.VerifyAll()
def test_wait_until_shutdown_raise_interrupted_io(self):
def quit_and_raise(*_):
shutdown.async_quit()
raise IOError
self.mox.StubOutWithMock(time, "sleep")
time.sleep(1).WithSideEffects(quit_and_raise)
self.mox.ReplayAll()
shutdown.wait_until_shutdown()
self.mox.VerifyAll()
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A thread-safe queue in which removed objects put back to the front."""
import logging
import queue
import threading
import time
logger = logging.getLogger("google.appengine.tools.requeue")
class ReQueue(object):
"""A special thread-safe queue.
A ReQueue allows unfinished work items to be returned with a call to
reput(). When an item is reput, task_done() should *not* be called
in addition, getting an item that has been reput does not increase
the number of outstanding tasks.
This class shares an interface with Queue.Queue and provides the
additional reput method.
"""
def __init__(
self,
queue_capacity,
requeue_capacity=None,
queue_factory=queue.Queue,
get_time=time.time,
):
"""Initialize a ReQueue instance.
Args:
queue_capacity: The number of items that can be put in the ReQueue.
requeue_capacity: The numer of items that can be reput in the ReQueue.
queue_factory: Used for dependency injection.
get_time: Used for dependency injection.
"""
if requeue_capacity is None:
requeue_capacity = queue_capacity
self.get_time = get_time
self.queue = queue_factory(queue_capacity)
self.requeue = queue_factory(requeue_capacity)
self.lock = threading.Lock()
self.put_cond = threading.Condition(self.lock)
self.get_cond = threading.Condition(self.lock)
def _DoWithTimeout(
self, action, exc, wait_cond, done_cond, lock, timeout=None, block=True
):
"""Performs the given action with a timeout.
The action must be non-blocking, and raise an instance of exc on a
recoverable failure. If the action fails with an instance of exc,
we wait on wait_cond before trying again. Failure after the
timeout is reached is propagated as an exception. Success is
signalled by notifying on done_cond and returning the result of
the action. If action raises any exception besides an instance of
exc, it is immediately propagated.
Args:
action: A callable that performs a non-blocking action.
exc: An exception type that is thrown by the action to indicate
a recoverable error.
wait_cond: A condition variable which should be waited on when
action throws exc.
done_cond: A condition variable to signal if the action returns.
lock: The lock used by wait_cond and done_cond.
timeout: A non-negative float indicating the maximum time to wait.
block: Whether to block if the action cannot complete immediately.
Returns:
The result of the action, if it is successful.
Raises:
ValueError: If the timeout argument is negative.
"""
if timeout is not None and timeout < 0.0:
raise ValueError("'timeout' must not be a negative number")
if not block:
timeout = 0.0
result = None
success = False
start_time = self.get_time()
lock.acquire()
try:
while not success:
try:
result = action()
success = True
except Exception as e:
if not isinstance(e, exc):
raise e
if timeout is not None:
elapsed_time = self.get_time() - start_time
timeout -= elapsed_time
if timeout <= 0.0:
raise e
wait_cond.wait(timeout)
finally:
if success:
done_cond.notify()
lock.release()
return result
def put(self, item, block=True, timeout=None):
"""Put an item into the requeue.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full if the queue is full and the timeout expires.
"""
def PutAction():
self.queue.put(item, block=False)
self._DoWithTimeout(
PutAction,
queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block,
)
def reput(self, item, block=True, timeout=None):
"""Re-put an item back into the requeue.
Re-putting an item does not increase the number of outstanding
tasks, so the reput item should be uniquely associated with an
item that was previously removed from the requeue and for which
TaskDone has not been called.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full is the queue is full and the timeout expires.
"""
def ReputAction():
self.requeue.put(item, block=False)
self._DoWithTimeout(
ReputAction,
queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block,
)
def get(self, block=True, timeout=None):
"""Get an item from the requeue.
Args:
block: Whether to block if the requeue is empty.
timeout: Maximum on how long to wait until the requeue is non-empty.
Returns:
An item from the requeue.
Raises:
Queue.Empty if the queue is empty and the timeout expires.
"""
def GetAction():
try:
result = self.requeue.get(block=False)
self.requeue.task_done()
except queue.Empty:
result = self.queue.get(block=False)
return result
return self._DoWithTimeout(
GetAction,
queue.Empty,
self.put_cond,
self.get_cond,
self.lock,
timeout=timeout,
block=block,
)
def join(self):
"""Blocks until all of the items in the requeue have been processed."""
self.queue.join()
def task_done(self):
"""Indicate that a previously enqueued item has been fully processed."""
self.queue.task_done()
def empty(self):
"""Returns true if the requeue is empty."""
return self.queue.empty() and self.requeue.empty()
def get_nowait(self):
"""Try to get an item from the queue without blocking."""
return self.get(block=False)
def qsize(self):
return self.queue.qsize() + self.requeue.qsize()
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""MySQL FLAG Constants.
These flags are used along with the FIELD_TYPE to indicate various
properties of columns in a result set.
"""
|
# Copyright (c) The PyAMF Project.
# See LICENSE for details.
"""
SQLAlchemy adapter module.
@see: U{SQLAlchemy homepage<http://www.sqlalchemy.org>}
@since: 0.4
"""
from sqlalchemy.orm import collections
import pyamf
from pyamf.adapters import util
pyamf.add_type(collections.InstrumentedList, util.to_list)
pyamf.add_type(collections.InstrumentedDict, util.to_dict)
pyamf.add_type(collections.InstrumentedSet, util.to_set)
|
'\nPyAMF Django adapter tests.\n\n@since: 0.3.1\n'
import unittest
import sys
import os
import datetime
import pyamf
from pyamf.tests import util
import importlib
try:
import django
except ImportError:
django = None
if (django and (django.VERSION < (1, 0))):
django = None
try:
(settings)
except NameError:
from pyamf.tests.adapters.django_app import settings
context = None
create_test_db = None
destroy_test_db = None
management = None
setup_test_environment = None
teardown_test_environment = None
models = None
adapter = None
def init_django():
'\n Bootstrap Django and initialise this module\n '
global django, management, create_test_db, destroy_test_db
global setup_test_environment, teardown_test_environment
if (not django):
return
from django.core import management
project_dir = (settings)
(0, project_dir)
try:
from django.test.utils import create_test_db, destroy_test_db
except ImportError:
from django.db import connection
create_test_db = connection.creation.create_test_db
destroy_test_db = connection.creation.destroy_test_db
from django.test.utils import setup_test_environment, teardown_test_environment
return True
def setUpModule():
'\n Called to set up the module by the test runner\n '
global context, models, adapter
context = {'sys.path': sys.path[:], 'sys.modules': (), 'os.environ': ()}
if ():
from pyamf.tests.adapters.django_app.adapters import models
from pyamf.adapters import _django_db_models_base as adapter
()
settings.DATABASE_NAME = (0, True)
def teadDownModule():
()
sys.path = context['sys.path']
(context['sys.modules'], sys.modules)
(context['os.environ'], os.environ)
(settings.DATABASE_NAME, 2)
class BaseTestCase(unittest.TestCase):
' '
def setUp(self):
if (not django):
("'django' is not available")
class TypeMapTestCase(BaseTestCase):
'\n Tests for basic encoding functionality\n '
def test_objects_all(self):
encoder = (pyamf.AMF0)
(())
((), '\n\x00\x00\x00\x00')
encoder = (pyamf.AMF3)
(())
((), '\t\x01\x01')
def test_NOT_PROVIDED(self):
from django.db.models import fields
((), '\x06')
encoder = (pyamf.AMF3)
(fields.NOT_PROVIDED)
((), '\x00')
class ClassAliasTestCase(BaseTestCase):
def test_time(self):
x = ()
x.t = (12, 12, 12)
x.d = (2008, 3, 12)
x.dt = (2008, 3, 12, 12, 12, 12)
alias = (models.TimeClass, None)
attrs = (x)
(attrs, {'id': None, 'd': (2008, 3, 12, 0, 0), 'dt': (2008, 3, 12, 12, 12, 12), 't': (1970, 1, 1, 12, 12, 12)})
y = ()
(y, {'id': None, 'd': (2008, 3, 12, 0, 0), 'dt': (2008, 3, 12, 12, 12, 12), 't': (1970, 1, 1, 12, 12, 12)})
(y.id, None)
(y.d, (2008, 3, 12))
(y.dt, (2008, 3, 12, 12, 12, 12))
(y.t, (12, 12, 12))
y = ()
(y, {'id': None, 'd': None, 'dt': None, 't': None})
(y.id, None)
(y.d, None)
(y.dt, None)
(y.t, None)
def test_undefined(self):
from django.db import models
from django.db.models import fields
class UndefinedClass(models.Model):
pass
alias = (UndefinedClass, None)
x = ()
(x, {'id': pyamf.Undefined})
(x.id, fields.NOT_PROVIDED)
x.id = fields.NOT_PROVIDED
attrs = (x)
(attrs, {'id': pyamf.Undefined})
def test_non_field_prop(self):
from django.db import models
class Book(models.Model):
def _get_number_of_odd_pages(self):
return 234
numberOfOddPages = (_get_number_of_odd_pages)
alias = (Book, 'Book')
x = ()
((x), {'numberOfOddPages': 234, 'id': None})
(x, {'numberOfOddPages': 24, 'id': None})
(x.numberOfOddPages, 234)
def test_dynamic(self):
'\n Test for dynamic property encoding.\n '
alias = (models.SimplestModel, 'Book')
x = ()
x.spam = 'eggs'
((x), {'spam': 'eggs', 'id': None})
(x, {'spam': 'foo', 'id': None})
(x.spam, 'foo')
def test_properties(self):
'\n See #764\n '
from django.db import models
class Foob(models.Model):
def _get_days(self):
return 1
def _set_days(self, val):
if (not (1 == val)):
raise ()
days = (_get_days, _set_days)
alias = (Foob, 'Bar')
x = ()
(x.days, 1)
((x), {'days': 1, 'id': None})
(x, {'id': None})
class ForeignKeyTestCase(BaseTestCase):
def test_one_to_many(self):
r = ()
()
(r.delete)
r2 = ()
()
(r2.delete)
a = ()
()
(a.delete)
(a.id, 1)
del a
a = ()[0]
(('_reporter_cache' in a.__dict__))
a.reporter
(('_reporter_cache' in a.__dict__))
del a
a = ()[0]
alias = (models.Article)
((alias, 'fields'))
attrs = (a)
(attrs, {'headline': 'This is a test', 'id': 1, 'publications': []})
(('_reporter_cache' in a.__dict__))
((), '\n\x0b\x01\x11headline\x06\x1dThis is a test\x05id\x04\x01\x19publications\t\x01\x01\x01')
del a
a = ()[0]
alias = (models.Article)
((alias, 'fields'))
((a), {'headline': 'This is a test', 'id': 1, 'reporter': r, 'publications': []})
(('_reporter_cache' in a.__dict__))
((), '\n\x0b\x01\x11reporter\n\x0b\x01\x15first_name\x06\tJohn\x13last_name\x06\x0bSmith\x05id\x04\x01\x0bemail\x06!john@example.com\x01\x11headline\x06\x1dThis is a test\x19publications\t\x01\x01\n\x04\x01\x01')
def test_many_to_many(self):
p1 = ()
()
p2 = ()
()
p3 = ()
()
(p1.delete)
(p2.delete)
(p3.delete)
a1 = ()
()
(a1.delete)
(a1.id, 1)
(p1)
pub_alias = (models.Publication, None)
art_alias = (models.Article, None)
test_publication = ()[0]
test_article = ()[0]
attrs = (test_publication)
(attrs, {'id': 1, 'title': 'The Python Journal'})
attrs = (test_article)
(attrs, {'headline': 'Django lets you build Web apps easily', 'id': 1, 'publications': [p1]})
x = ()
(x, {'headline': 'Test', 'id': 1, 'publications': [p1]})
(x.headline, 'Test')
(x.id, 1)
((()), [p1])
y = ()
attrs = (y, {'headline': 'Django lets you build Web apps easily', 'id': 0, 'publications': []})
(attrs, {'headline': 'Django lets you build Web apps easily'})
def test_nullable_foreign_keys(self):
x = ()
()
(x.delete)
nfk_alias = (models.NullForeignKey, None)
bfk_alias = (models.BlankForeignKey, None)
nfk = ()
attrs = (nfk)
(attrs, {'id': None})
bfk = ()
attrs = (bfk)
(attrs, {'id': None})
def test_static_relation(self):
'\n @see: #693\n '
from pyamf import util
(models.StaticRelation)
alias = (models.StaticRelation)
()
(('gak' in alias.relations))
(('gak' in alias.decodable_properties))
(('gak' in alias.static_attrs))
x = ()
(x, {'id': None, 'gak': 'foo'})
class I18NTestCase(BaseTestCase):
def test_encode(self):
from django.utils.translation import ugettext_lazy
((), '\x06\x0bHello')
class PKTestCase(BaseTestCase):
'\n See ticket #599 for this. Check to make sure that django pk fields\n are set first\n '
def test_behaviour(self):
p = ()
a = ()
(ValueError, (lambda a, p: (p)), a, p)
()
()
(p.delete)
(a.delete)
(a.id, 1)
article_alias = (models.Article, None)
x = ()
(x, {'headline': 'Foo bar!', 'id': 1, 'publications': [p]})
(x.headline, 'Foo bar!')
(x.id, 1)
((()), [p])
def test_none(self):
'\n See #556. Make sure that PK fields with a value of 0 are actually set\n to C{None}.\n '
alias = (models.SimplestModel, None)
x = ()
(x.id, None)
(x, {'id': 0})
(x.id, None)
def test_no_pk(self):
'\n Ensure that Models without a primary key are correctly serialized.\n See #691.\n '
instances = [(), ()]
encoded = ()
decoded = ((encoded))
(decoded[0]['name'], 'a')
(decoded[1]['name'], 'b')
class ModelInheritanceTestCase(BaseTestCase):
'\n Tests for L{Django model inheritance<http://docs.djangoproject.com/en/dev/topics/db/models/#model-inheritance>}\n '
def test_abstract(self):
alias = (models.Student)
x = ()
attrs = (x)
(attrs, {'age': None, 'home_group': '', 'id': None, 'name': ''})
def test_concrete(self):
alias = (models.Place)
x = ()
attrs = (x)
(attrs, {'id': None, 'name': '', 'address': ''})
alias = (models.Restaurant)
x = ()
attrs = (x)
(attrs, {'id': None, 'name': '', 'address': '', 'serves_hot_dogs': False, 'serves_pizza': False})
class MockFile(object):
'\n mock for L{django.core.files.base.File}\n '
def chunks(self):
return []
def __len__(self):
return 0
def read(self, n):
return ''
class FieldsTestCase(BaseTestCase):
'\n Tests for L{fields}\n '
def test_file(self):
alias = (models.FileModel)
i = ()
('bar', ())
(i.file.delete)
()
attrs = (i)
(attrs, {'text': '', 'id': 1, 'file': 'file_model/bar'})
attrs = (i, attrs)
(attrs, {'text': ''})
class ImageTestCase(BaseTestCase):
'\n Tests for L{fields}\n '
def setUp(self):
try:
import PIL
except ImportError:
("'PIL' is not available")
(self)
def test_image(self):
alias = (models.Profile)
i = ()
('bar', ())
(i.file.delete)
()
(i.delete)
attrs = (i)
(attrs, {'text': '', 'id': 1, 'file': 'profile/bar'})
attrs = (i, attrs)
(attrs, {'text': ''})
class ReferenceTestCase(BaseTestCase, util.EncoderMixIn):
'\n Test case to make sure that the same object from the database is encoded\n by reference.\n '
amf_type = pyamf.AMF3
def setUp(self):
(self)
(self)
def test_not_referenced(self):
'\n Test to ensure that we observe the correct behaviour in the Django\n ORM.\n '
f = ()
f.name = 'foo'
b = ()
b.name = 'bar'
()
b.foo = f
()
f.bar = b
()
(f.delete)
(b.delete)
(f.id, 1)
foo = ()
((foo.bar.foo is foo))
def test_referenced_encode(self):
f = ()
f.name = 'foo'
b = ()
b.name = 'bar'
()
b.foo = f
()
f.bar = b
()
(f.delete)
(b.delete)
(f.id, 1)
foo = ()
foo.bar.foo
(foo, '\n\x0b\x01\x07bar\n\x0b\x01\x07foo\n\x00\x05id\x04\x01\tname\x06\x00\x01\x04\x04\x01\x06\x06\x02\x01')
class AuthTestCase(BaseTestCase):
'\n Tests for L{django.contrib.auth.models}\n '
def test_user(self):
from django.contrib.auth import models
alias = (models.User)
(alias, 'django.contrib.auth.models.User')
(alias.exclude_attrs, ('message_set', 'password'))
(alias.readonly_attrs, ('username',))
class DBColumnTestCase(BaseTestCase):
'\n Tests for #807\n '
def setUp(self):
(self)
self.alias = (models.DBColumnModel, None)
self.model = ()
def test_encodable_attrs(self):
def attrs():
return (self.model)
((), {'id': None})
x = ()
()
(x.delete)
self.model.bar = x
((), {'id': None, 'bar': x}) |
import os
import warnings
import cherrypy
from cherrypy._cpcompat import iteritems, copykeys, builtins
class Checker(object):
"""A checker for CherryPy sites and their mounted applications.
When this object is called at engine startup, it executes each
of its own methods whose names start with ``check_``. If you wish
to disable selected checks, simply add a line in your global
config which sets the appropriate method to False::
[global]
checker.check_skipped_app_config = False
You may also dynamically add or replace ``check_*`` methods in this way.
"""
on = True
"""If True (the default), run all checks; if False, turn off all checks."""
def __init__(self):
self._populate_known_types()
def __call__(self):
"""Run all check_* methods."""
if self.on:
oldformatwarning = warnings.formatwarning
warnings.formatwarning = self.formatwarning
try:
for name in dir(self):
if name.startswith("check_"):
method = getattr(self, name)
if method and hasattr(method, "__call__"):
method()
finally:
warnings.formatwarning = oldformatwarning
def formatwarning(self, message, category, filename, lineno, line=None):
"""Function to format a warning."""
return "CherryPy Checker:\n%s\n\n" % message
# This value should be set inside _cpconfig.
global_config_contained_paths = False
def check_app_config_entries_dont_start_with_script_name(self):
"""Check for Application config with sections that repeat script_name."""
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
if sn == "":
continue
sn_atoms = sn.strip("/").split("/")
for key in list(app.config.keys()):
key_atoms = key.strip("/").split("/")
if key_atoms[: len(sn_atoms)] == sn_atoms:
warnings.warn(
"The application mounted at %r has config "
"entries that start with its script name: %r" % (sn, key)
)
def check_site_config_entries_in_app_config(self):
"""Check for mounted Applications that have site-scoped config."""
for sn, app in iteritems(cherrypy.tree.apps):
if not isinstance(app, cherrypy.Application):
continue
msg = []
for section, entries in iteritems(app.config):
if section.startswith("/"):
for key, value in iteritems(entries):
for n in ("engine.", "server.", "tree.", "checker."):
if key.startswith(n):
msg.append("[%s] %s = %s" % (section, key, value))
if msg:
msg.insert(
0,
"The application mounted at %r contains the following "
"config entries, which are only allowed in site-wide "
"config. Move them to a [global] section and pass them "
"to cherrypy.config.update() instead of tree.mount()." % sn,
)
warnings.warn(os.linesep.join(msg))
def check_skipped_app_config(self):
"""Check for mounted Applications that have no config."""
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
msg = "The Application mounted at %r has an empty config." % sn
if self.global_config_contained_paths:
msg += (
" It looks like the config you passed to "
"cherrypy.config.update() contains application-"
"specific sections. You must explicitly pass "
"application config via "
"cherrypy.tree.mount(..., config=app_config)"
)
warnings.warn(msg)
return
def check_app_config_brackets(self):
"""Check for Application config with extraneous brackets in section names."""
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
for key in list(app.config.keys()):
if key.startswith("[") or key.endswith("]"):
warnings.warn(
"The application mounted at %r has config "
"section names with extraneous brackets: %r. "
"Config *files* need brackets; config *dicts* "
"(e.g. passed to tree.mount) do not." % (sn, key)
)
def check_static_paths(self):
"""Check Application config for incorrect static paths."""
# Use the dummy Request object in the main thread.
request = cherrypy.request
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
request.app = app
for section in app.config:
# get_resource will populate request.config
request.get_resource(section + "/dummy.html")
conf = request.config.get
if conf("tools.staticdir.on", False):
msg = ""
root = conf("tools.staticdir.root")
dir = conf("tools.staticdir.dir")
if dir is None:
msg = "tools.staticdir.dir is not set."
else:
fulldir = ""
if os.path.isabs(dir):
fulldir = dir
if root:
msg = (
"dir is an absolute path, even "
"though a root is provided."
)
testdir = os.path.join(root, dir[1:])
if os.path.exists(testdir):
msg += (
"\nIf you meant to serve the "
"filesystem folder at %r, remove "
"the leading slash from dir." % testdir
)
else:
if not root:
msg = "dir is a relative path and no root provided."
else:
fulldir = os.path.join(root, dir)
if not os.path.isabs(fulldir):
msg = "%r is not an absolute path." % fulldir
if fulldir and not os.path.exists(fulldir):
if msg:
msg += "\n"
msg += (
"%r (root + dir) is not an existing "
"filesystem path." % fulldir
)
if msg:
warnings.warn(
"%s\nsection: [%s]\nroot: %r\ndir: %r"
% (msg, section, root, dir)
)
# -------------------------- Compatibility -------------------------- #
obsolete = {
"server.default_content_type": "tools.response_headers.headers",
"log_access_file": "log.access_file",
"log_config_options": None,
"log_file": "log.error_file",
"log_file_not_found": None,
"log_request_headers": "tools.log_headers.on",
"log_to_screen": "log.screen",
"show_tracebacks": "request.show_tracebacks",
"throw_errors": "request.throw_errors",
"profiler.on": (
"cherrypy.tree.mount(profiler.make_app(" "cherrypy.Application(Root())))"
),
}
deprecated = {}
def _compat(self, config):
"""Process config and warn on each obsolete or deprecated entry."""
for section, conf in list(config.items()):
if isinstance(conf, dict):
for k, v in list(conf.items()):
if k in self.obsolete:
warnings.warn(
"%r is obsolete. Use %r instead.\n"
"section: [%s]" % (k, self.obsolete[k], section)
)
elif k in self.deprecated:
warnings.warn(
"%r is deprecated. Use %r instead.\n"
"section: [%s]" % (k, self.deprecated[k], section)
)
else:
if section in self.obsolete:
warnings.warn(
"%r is obsolete. Use %r instead."
% (section, self.obsolete[section])
)
elif section in self.deprecated:
warnings.warn(
"%r is deprecated. Use %r instead."
% (section, self.deprecated[section])
)
def check_compatibility(self):
"""Process config and warn on each obsolete or deprecated entry."""
self._compat(cherrypy.config)
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
self._compat(app.config)
# ------------------------ Known Namespaces ------------------------ #
extra_config_namespaces = []
def _known_ns(self, app):
ns = ["wsgi"]
ns.extend(copykeys(app.toolboxes))
ns.extend(copykeys(app.namespaces))
ns.extend(copykeys(app.request_class.namespaces))
ns.extend(copykeys(cherrypy.config.namespaces))
ns += self.extra_config_namespaces
for section, conf in list(app.config.items()):
is_path_section = section.startswith("/")
if is_path_section and isinstance(conf, dict):
for k, v in list(conf.items()):
atoms = k.split(".")
if len(atoms) > 1:
if atoms[0] not in ns:
# Spit out a special warning if a known
# namespace is preceded by "cherrypy."
if atoms[0] == "cherrypy" and atoms[1] in ns:
msg = (
"The config entry %r is invalid; "
"try %r instead.\nsection: [%s]"
% (k, ".".join(atoms[1:]), section)
)
else:
msg = (
"The config entry %r is invalid, because "
"the %r config namespace is unknown.\n"
"section: [%s]" % (k, atoms[0], section)
)
warnings.warn(msg)
elif atoms[0] == "tools":
if atoms[1] not in dir(cherrypy.tools):
msg = (
"The config entry %r may be invalid, "
"because the %r tool was not found.\n"
"section: [%s]" % (k, atoms[1], section)
)
warnings.warn(msg)
def check_config_namespaces(self):
"""Process config and warn on each unknown config namespace."""
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
self._known_ns(app)
# -------------------------- Config Types -------------------------- #
known_config_types = {}
def _populate_known_types(self):
b = [x for x in list(vars(builtins).values()) if type(x) is type(str)]
def traverse(obj, namespace):
for name in dir(obj):
# Hack for 3.2's warning about body_params
if name == "body_params":
continue
vtype = type(getattr(obj, name, None))
if vtype in b:
self.known_config_types[namespace + "." + name] = vtype
traverse(cherrypy.request, "request")
traverse(cherrypy.response, "response")
traverse(cherrypy.server, "server")
traverse(cherrypy.engine, "engine")
traverse(cherrypy.log, "log")
def _known_types(self, config):
msg = (
"The config entry %r in section %r is of type %r, "
"which does not match the expected type %r."
)
for section, conf in list(config.items()):
if isinstance(conf, dict):
for k, v in list(conf.items()):
if v is not None:
expected_type = self.known_config_types.get(k, None)
vtype = type(v)
if expected_type and vtype != expected_type:
warnings.warn(
msg
% (k, section, vtype.__name__, expected_type.__name__)
)
else:
k, v = section, conf
if v is not None:
expected_type = self.known_config_types.get(k, None)
vtype = type(v)
if expected_type and vtype != expected_type:
warnings.warn(
msg % (k, section, vtype.__name__, expected_type.__name__)
)
def check_config_types(self):
"""Assert that config values are of the same type as default values."""
self._known_types(cherrypy.config)
for sn, app in list(cherrypy.tree.apps.items()):
if not isinstance(app, cherrypy.Application):
continue
self._known_types(app.config)
# -------------------- Specific config warnings -------------------- #
def check_localhost(self):
"""Warn if any socket_host is 'localhost'. See #711."""
for k, v in list(cherrypy.config.items()):
if k == "server.socket_host" and v == "localhost":
warnings.warn(
"The use of 'localhost' as a socket host can "
"cause problems on newer systems, since 'localhost' can "
"map to either an IPv4 or an IPv6 address. You should "
"use '127.0.0.1' or '[::1]' instead."
)
|
import warnings
warnings.warn(
"cherrypy.lib.http has been deprecated and will be removed "
"in CherryPy 3.3 use cherrypy.lib.httputil instead.",
DeprecationWarning,
)
from cherrypy.lib.httputil import *
|
# This file is part of CherryPy <http://www.cherrypy.org/>
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
import cherrypy
from cherrypy.lib import auth_digest
from cherrypy.test import helper
class DigestAuthTest(helper.CPWebCase):
def setup_server():
class Root:
def index(self):
return "This is public."
index.exposed = True
class DigestProtected:
def index(self):
return "Hello %s, you've been authorized." % cherrypy.request.login
index.exposed = True
def fetch_users():
return {"test": "test"}
get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(fetch_users())
conf = {
"/digest": {
"tools.auth_digest.on": True,
"tools.auth_digest.realm": "localhost",
"tools.auth_digest.get_ha1": get_ha1,
"tools.auth_digest.key": "a565c27146791cfb",
"tools.auth_digest.debug": "True",
}
}
root = Root()
root.digest = DigestProtected()
cherrypy.tree.mount(root, config=conf)
setup_server = staticmethod(setup_server)
def testPublic(self):
self.getPage("/")
self.assertStatus("200 OK")
self.assertHeader("Content-Type", "text/html;charset=utf-8")
self.assertBody("This is public.")
def testDigest(self):
self.getPage("/digest/")
self.assertStatus(401)
value = None
for k, v in self.headers:
if k.lower() == "www-authenticate":
if v.startswith("Digest"):
value = v
break
if value is None:
self._handlewebError("Digest authentification scheme was not found")
value = value[7:]
items = value.split(", ")
tokens = {}
for item in items:
key, value = item.split("=")
tokens[key.lower()] = value
missing_msg = "%s is missing"
bad_value_msg = "'%s' was expecting '%s' but found '%s'"
nonce = None
if "realm" not in tokens:
self._handlewebError(missing_msg % "realm")
elif tokens["realm"] != '"localhost"':
self._handlewebError(
bad_value_msg % ("realm", '"localhost"', tokens["realm"])
)
if "nonce" not in tokens:
self._handlewebError(missing_msg % "nonce")
else:
nonce = tokens["nonce"].strip('"')
if "algorithm" not in tokens:
self._handlewebError(missing_msg % "algorithm")
elif tokens["algorithm"] != '"MD5"':
self._handlewebError(
bad_value_msg % ("algorithm", '"MD5"', tokens["algorithm"])
)
if "qop" not in tokens:
self._handlewebError(missing_msg % "qop")
elif tokens["qop"] != '"auth"':
self._handlewebError(bad_value_msg % ("qop", '"auth"', tokens["qop"]))
get_ha1 = auth_digest.get_ha1_dict_plain({"test": "test"})
# Test user agent response with a wrong value for 'realm'
base_auth = 'Digest username="test", realm="wrong realm", nonce="%s", uri="/digest/", algorithm=MD5, response="%s", qop=auth, nc=%s, cnonce="1522e61005789929"'
auth_header = base_auth % (
nonce,
"11111111111111111111111111111111",
"00000001",
)
auth = auth_digest.HttpDigestAuthorization(auth_header, "GET")
# calculate the response digest
ha1 = get_ha1(auth.realm, "test")
response = auth.request_digest(ha1)
# send response with correct response digest, but wrong realm
auth_header = base_auth % (nonce, response, "00000001")
self.getPage("/digest/", [("Authorization", auth_header)])
self.assertStatus(401)
# Test that must pass
base_auth = 'Digest username="test", realm="localhost", nonce="%s", uri="/digest/", algorithm=MD5, response="%s", qop=auth, nc=%s, cnonce="1522e61005789929"'
auth_header = base_auth % (
nonce,
"11111111111111111111111111111111",
"00000001",
)
auth = auth_digest.HttpDigestAuthorization(auth_header, "GET")
# calculate the response digest
ha1 = get_ha1("localhost", "test")
response = auth.request_digest(ha1)
# send response with correct response digest
auth_header = base_auth % (nonce, response, "00000001")
self.getPage("/digest/", [("Authorization", auth_header)])
self.assertStatus("200 OK")
self.assertBody("Hello test, you've been authorized.")
|