label
class label 3
classes | code_before
stringlengths 417
269k
| code_after
stringlengths 415
269k
| label_text
stringclasses 3
values | deleted
dict | added
dict | normalized_code_before
stringlengths 316
220k
| normalized_code_after
stringlengths 316
220k
| before_doc_string_pos
sequence | after_doc_string_pos
sequence |
---|---|---|---|---|---|---|---|---|---|
0CWE-22
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import datetime
from typing import List, Optional
import attr
import requests
import tenacity
from .Line import Line
@attr.s(auto_attribs=True)
class InputContent:
rawLines: List[str]
date: Optional[datetime.date]
@property
def lines(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def content(self) -> str:
return "".join(self.rawLines)
class InputSource:
"""Represents a thing that can produce specification input text.
Input can be read from stdin ("-"), an HTTPS URL, or a file. Other
InputSources can be found relative to URLs and files, and there's a context
manager for temporarily switching to the directory of a file InputSource.
"""
def __new__(cls, sourceName: str):
"""Dispatches to the right subclass."""
if cls != InputSource:
# Only take control of calls to InputSource(...) itself.
return super().__new__(cls)
if sourceName == "-":
return StdinInputSource(sourceName)
if sourceName.startswith("https:"):
return UrlInputSource(sourceName)
return FileInputSource(sourceName)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
return str(self) == str(other)
@abstractmethod
def read(self) -> InputContent:
"""Fully reads the source."""
def hasDirectory(self) -> bool:
"""Only some InputSources have a directory."""
return False
def directory(self) -> str:
"""Suitable for passing to subprocess(cwd=)."""
raise TypeError("{} instances don't have directories.".format(type(self)))
def relative(self, _) -> Optional[InputSource]:
"""Resolves relativePath relative to this InputSource.
For example, InputSource("/foo/bar/baz.txt").relative("quux/fuzzy.txt")
will be InputSource("/foo/bar/quux/fuzzy.txt").
If this source type can't find others relative to itself, returns None.
"""
return None
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this source, if that's known."""
return None
def cheaplyExists(self, _) -> Optional[bool]:
"""If it's cheap to determine, returns whether relativePath exists.
Otherwise, returns None.
"""
return None
def __getattr__(self, name):
"""Hack to make pylint happy, since all the attrs are defined
on the subclasses that __new__ dynamically dispatches to.
See https://stackoverflow.com/a/60731663/455535
"""
print(f"No member '{name}' contained in InputSource.")
return ""
class StdinInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName == "-"
self.type = "stdin"
self.sourceName = sourceName
self.content = None
def __str__(self) -> str:
return "-"
def read(self) -> InputContent:
return InputContent(sys.stdin.readlines(), None)
class UrlInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName.startswith("https:")
self.sourceName = sourceName
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def _fetch(self):
response = requests.get(self.sourceName, timeout=10)
if response.status_code == 404:
# This matches the OSErrors expected by older uses of
# FileInputSource. It skips the retry, since the server has given us
# a concrete, expected answer.
raise FileNotFoundError(errno.ENOENT, response.text, self.sourceName)
response.raise_for_status()
return response
def read(self) -> InputContent:
response = self._fetch()
date = None
if "Date" in response.headers:
# Use the response's Date header, although servers don't always set
# this according to the last change to the file.
date = email.utils.parsedate_to_datetime(response.headers["Date"]).date()
return InputContent(response.text.splitlines(True), date)
def relative(self, relativePath) -> UrlInputSource:
return UrlInputSource(urllib.parse.urljoin(self.sourceName, relativePath))
class FileInputSource(InputSource):
def __init__(self, sourceName: str):
self.sourceName = sourceName
self.type = "file"
self.content = None
def __str__(self) -> str:
return self.sourceName
def read(self) -> InputContent:
with open(self.sourceName, encoding="utf-8") as f:
return InputContent(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def hasDirectory(self) -> bool:
return True
def directory(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def relative(self, relativePath) -> FileInputSource:
return FileInputSource(os.path.join(self.directory(), relativePath))
def cheaplyExists(self, relativePath) -> bool:
return os.access(self.relative(relativePath).sourceName, os.R_OK)
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this file, or None if it doesn't exist."""
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import datetime
from typing import List, Optional
import attr
import requests
import tenacity
from . import config
from .Line import Line
@attr.s(auto_attribs=True)
class InputContent:
rawLines: List[str]
date: Optional[datetime.date]
@property
def lines(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def content(self) -> str:
return "".join(self.rawLines)
class InputSource:
"""Represents a thing that can produce specification input text.
Input can be read from stdin ("-"), an HTTPS URL, or a file. Other
InputSources can be found relative to URLs and files, and there's a context
manager for temporarily switching to the directory of a file InputSource.
"""
def __new__(cls, sourceName: str, **kwargs):
"""Dispatches to the right subclass."""
if cls != InputSource:
# Only take control of calls to InputSource(...) itself.
return super().__new__(cls)
if sourceName == "-":
return StdinInputSource(sourceName, **kwargs)
if sourceName.startswith("https:"):
return UrlInputSource(sourceName, **kwargs)
return FileInputSource(sourceName, **kwargs)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
return str(self) == str(other)
@abstractmethod
def read(self) -> InputContent:
"""Fully reads the source."""
def hasDirectory(self) -> bool:
"""Only some InputSources have a directory."""
return False
def directory(self) -> str:
"""Suitable for passing to subprocess(cwd=)."""
raise TypeError("{} instances don't have directories.".format(type(self)))
def relative(self, _) -> Optional[InputSource]:
"""Resolves relativePath relative to this InputSource.
For example, InputSource("/foo/bar/baz.txt").relative("quux/fuzzy.txt")
will be InputSource("/foo/bar/quux/fuzzy.txt").
If this source type can't find others relative to itself, returns None.
"""
return None
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this source, if that's known."""
return None
def cheaplyExists(self, _) -> Optional[bool]:
"""If it's cheap to determine, returns whether relativePath exists.
Otherwise, returns None.
"""
return None
def __getattr__(self, name):
"""Hack to make pylint happy, since all the attrs are defined
on the subclasses that __new__ dynamically dispatches to.
See https://stackoverflow.com/a/60731663/455535
"""
print(f"No member '{name}' contained in InputSource.")
return ""
class StdinInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName == "-"
self.type = "stdin"
self.sourceName = sourceName
self.content = None
def __str__(self) -> str:
return "-"
def read(self) -> InputContent:
return InputContent(sys.stdin.readlines(), None)
class UrlInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName.startswith("https:")
self.sourceName = sourceName
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def _fetch(self):
response = requests.get(self.sourceName, timeout=10)
if response.status_code == 404:
# This matches the OSErrors expected by older uses of
# FileInputSource. It skips the retry, since the server has given us
# a concrete, expected answer.
raise FileNotFoundError(errno.ENOENT, response.text, self.sourceName)
response.raise_for_status()
return response
def read(self) -> InputContent:
response = self._fetch()
date = None
if "Date" in response.headers:
# Use the response's Date header, although servers don't always set
# this according to the last change to the file.
date = email.utils.parsedate_to_datetime(response.headers["Date"]).date()
return InputContent(response.text.splitlines(True), date)
def relative(self, relativePath) -> UrlInputSource:
return UrlInputSource(urllib.parse.urljoin(self.sourceName, relativePath))
class FileInputSource(InputSource):
def __init__(self, sourceName: str, *, chroot: bool, chrootPath: Optional[str] = None):
self.sourceName = sourceName
self.chrootPath = chrootPath
self.type = "file"
self.content = None
if chroot and self.chrootPath is None:
self.chrootPath = self.directory()
if self.chrootPath is not None:
self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)
def __str__(self) -> str:
return self.sourceName
def read(self) -> InputContent:
with open(self.sourceName, encoding="utf-8") as f:
return InputContent(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def hasDirectory(self) -> bool:
return True
def directory(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def relative(self, relativePath) -> FileInputSource:
return FileInputSource(os.path.join(self.directory(), relativePath), chroot=False, chrootPath=self.chrootPath)
def cheaplyExists(self, relativePath) -> bool:
return os.access(self.relative(relativePath).sourceName, os.R_OK)
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this file, or None if it doesn't exist."""
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| path_disclosure | {
"code": [
" def __new__(cls, sourceName: str):",
" return StdinInputSource(sourceName)",
" return UrlInputSource(sourceName)",
" return FileInputSource(sourceName)",
" def __init__(self, sourceName: str):",
" return FileInputSource(os.path.join(self.directory(), relativePath))"
],
"line_no": [
41,
48,
50,
51,
160,
182
]
} | {
"code": [
"from . import config",
" return StdinInputSource(sourceName, **kwargs)",
" return UrlInputSource(sourceName, **kwargs)",
" def __init__(self, sourceName: str, *, chroot: bool, chrootPath: Optional[str] = None):",
" self.chrootPath = chrootPath",
" self.chrootPath = self.directory()",
" if self.chrootPath is not None:",
" self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)",
" return FileInputSource(os.path.join(self.directory(), relativePath), chroot=False, chrootPath=self.chrootPath)"
],
"line_no": [
16,
49,
51,
161,
163,
168,
169,
170,
189
]
} | from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import .datetime
from typing import List, Optional
import attr
import requests
import tenacity
from .Line import Line
@attr.s(auto_attribs=True)
class CLASS_0:
rawLines: List[str]
VAR_7: Optional[datetime.date]
@property
def FUNC_0(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def FUNC_1(self) -> str:
return "".join(self.rawLines)
class CLASS_1:
def __new__(VAR_0, VAR_1: str):
if VAR_0 != CLASS_1:
return super().__new__(VAR_0)
if VAR_1 == "-":
return CLASS_2(VAR_1)
if VAR_1.startswith("https:"):
return CLASS_3(VAR_1)
return CLASS_4(VAR_1)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, VAR_2):
return str(self) == str(VAR_2)
@abstractmethod
def FUNC_2(self) -> CLASS_0:
def FUNC_3(self) -> bool:
return False
def FUNC_4(self) -> str:
raise TypeError("{} instances don't have directories.".format(type(self)))
def FUNC_5(self, VAR_3) -> Optional[CLASS_1]:
return None
def FUNC_6(self) -> Optional[float]:
return None
def FUNC_7(self, VAR_3) -> Optional[bool]:
return None
def __getattr__(self, VAR_4):
print(f"No member '{VAR_4}' contained in CLASS_1.")
return ""
class CLASS_2(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1 == "-"
self.type = "stdin"
self.sourceName = VAR_1
self.content = None
def __str__(self) -> str:
return "-"
def FUNC_2(self) -> CLASS_0:
return CLASS_0(sys.stdin.readlines(), None)
class CLASS_3(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1.startswith("https:")
self.sourceName = VAR_1
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def FUNC_8(self):
VAR_6 = requests.get(self.sourceName, timeout=10)
if VAR_6.status_code == 404:
raise FileNotFoundError(errno.ENOENT, VAR_6.text, self.sourceName)
VAR_6.raise_for_status()
return VAR_6
def FUNC_2(self) -> CLASS_0:
VAR_6 = self._fetch()
VAR_7 = None
if "Date" in VAR_6.headers:
VAR_7 = email.utils.parsedate_to_datetime(VAR_6.headers["Date"]).date()
return CLASS_0(VAR_6.text.splitlines(True), VAR_7)
def FUNC_5(self, VAR_5) -> CLASS_3:
return CLASS_3(urllib.parse.urljoin(self.sourceName, VAR_5))
class CLASS_4(CLASS_1):
def __init__(self, VAR_1: str):
self.sourceName = VAR_1
self.type = "file"
self.content = None
def __str__(self) -> str:
return self.sourceName
def FUNC_2(self) -> CLASS_0:
with open(self.sourceName, encoding="utf-8") as f:
return CLASS_0(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def FUNC_3(self) -> bool:
return True
def FUNC_4(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def FUNC_5(self, VAR_5) -> CLASS_4:
return CLASS_4(os.path.join(self.directory(), VAR_5))
def FUNC_7(self, VAR_5) -> bool:
return os.access(self.relative(VAR_5).sourceName, os.R_OK)
def FUNC_6(self) -> Optional[float]:
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import .datetime
from typing import List, Optional
import attr
import requests
import tenacity
from . import config
from .Line import Line
@attr.s(auto_attribs=True)
class CLASS_0:
rawLines: List[str]
VAR_10: Optional[datetime.date]
@property
def FUNC_0(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def FUNC_1(self) -> str:
return "".join(self.rawLines)
class CLASS_1:
def __new__(VAR_0, VAR_1: str, **VAR_2):
if VAR_0 != CLASS_1:
return super().__new__(VAR_0)
if VAR_1 == "-":
return CLASS_2(VAR_1, **VAR_2)
if VAR_1.startswith("https:"):
return CLASS_3(VAR_1, **VAR_2)
return CLASS_4(VAR_1, **VAR_2)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, VAR_3):
return str(self) == str(VAR_3)
@abstractmethod
def FUNC_2(self) -> CLASS_0:
def FUNC_3(self) -> bool:
return False
def FUNC_4(self) -> str:
raise TypeError("{} instances don't have directories.".format(type(self)))
def FUNC_5(self, VAR_4) -> Optional[CLASS_1]:
return None
def FUNC_6(self) -> Optional[float]:
return None
def FUNC_7(self, VAR_4) -> Optional[bool]:
return None
def __getattr__(self, VAR_5):
print(f"No member '{VAR_5}' contained in CLASS_1.")
return ""
class CLASS_2(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1 == "-"
self.type = "stdin"
self.sourceName = VAR_1
self.content = None
def __str__(self) -> str:
return "-"
def FUNC_2(self) -> CLASS_0:
return CLASS_0(sys.stdin.readlines(), None)
class CLASS_3(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1.startswith("https:")
self.sourceName = VAR_1
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def FUNC_8(self):
VAR_9 = requests.get(self.sourceName, timeout=10)
if VAR_9.status_code == 404:
raise FileNotFoundError(errno.ENOENT, VAR_9.text, self.sourceName)
VAR_9.raise_for_status()
return VAR_9
def FUNC_2(self) -> CLASS_0:
VAR_9 = self._fetch()
VAR_10 = None
if "Date" in VAR_9.headers:
VAR_10 = email.utils.parsedate_to_datetime(VAR_9.headers["Date"]).date()
return CLASS_0(VAR_9.text.splitlines(True), VAR_10)
def FUNC_5(self, VAR_6) -> CLASS_3:
return CLASS_3(urllib.parse.urljoin(self.sourceName, VAR_6))
class CLASS_4(CLASS_1):
def __init__(self, VAR_1: str, *, VAR_7: bool, VAR_8: Optional[str] = None):
self.sourceName = VAR_1
self.chrootPath = VAR_8
self.type = "file"
self.content = None
if VAR_7 and self.chrootPath is None:
self.chrootPath = self.directory()
if self.chrootPath is not None:
self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)
def __str__(self) -> str:
return self.sourceName
def FUNC_2(self) -> CLASS_0:
with open(self.sourceName, encoding="utf-8") as f:
return CLASS_0(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def FUNC_3(self) -> bool:
return True
def FUNC_4(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def FUNC_5(self, VAR_6) -> CLASS_4:
return CLASS_4(os.path.join(self.directory(), VAR_6), VAR_7=False, VAR_8=self.chrootPath)
def FUNC_7(self, VAR_6) -> bool:
return os.access(self.relative(VAR_6).sourceName, os.R_OK)
def FUNC_6(self) -> Optional[float]:
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| [
2,
11,
15,
17,
18,
23,
27,
31,
32,
35,
40,
44,
46,
52,
56,
59,
62,
65,
69,
73,
77,
80,
83,
87,
91,
94,
98,
106,
107,
114,
117,
120,
121,
127,
130,
139,
140,
141,
145,
150,
151,
154,
157,
158,
164,
167,
174,
177,
180,
183,
186,
193,
34,
35,
36,
37,
38,
39,
42,
68,
71,
75,
79,
80,
81,
82,
83,
84,
85,
89,
93,
94,
95,
96,
100,
101,
102,
103,
188
] | [
2,
11,
15,
18,
19,
24,
28,
32,
33,
36,
41,
45,
47,
53,
57,
60,
63,
66,
70,
74,
78,
81,
84,
88,
92,
95,
99,
107,
108,
115,
118,
121,
122,
128,
131,
140,
141,
142,
146,
151,
152,
155,
158,
159,
166,
171,
174,
181,
184,
187,
190,
193,
200,
35,
36,
37,
38,
39,
40,
43,
69,
72,
76,
80,
81,
82,
83,
84,
85,
86,
90,
94,
95,
96,
97,
101,
102,
103,
104,
195
] |
0CWE-22
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Proxy AMI-related calls from cloud controller to objectstore service."""
import binascii
import os
import shutil
import tarfile
import tempfile
from xml.etree import ElementTree
import boto.s3.connection
import eventlet
from nova import crypto
import nova.db.api
from nova import exception
from nova import flags
from nova import image
from nova import log as logging
from nova import utils
from nova.api.ec2 import ec2utils
LOG = logging.getLogger("nova.image.s3")
FLAGS = flags.FLAGS
flags.DEFINE_string('image_decryption_dir', '/tmp',
'parent dir for tempdir used for image decryption')
flags.DEFINE_string('s3_access_key', 'notchecked',
'access key to use for s3 server for images')
flags.DEFINE_string('s3_secret_key', 'notchecked',
'secret key to use for s3 server for images')
class S3ImageService(object):
"""Wraps an existing image service to support s3 based register."""
def __init__(self, service=None, *args, **kwargs):
self.service = service or image.get_default_image_service()
self.service.__init__(*args, **kwargs)
def get_image_uuid(self, context, image_id):
return nova.db.api.s3_image_get(context, image_id)['uuid']
def get_image_id(self, context, image_uuid):
return nova.db.api.s3_image_get_by_uuid(context, image_uuid)['id']
def _create_image_id(self, context, image_uuid):
return nova.db.api.s3_image_create(context, image_uuid)['id']
def _translate_uuids_to_ids(self, context, images):
return [self._translate_uuid_to_id(context, img) for img in images]
def _translate_uuid_to_id(self, context, image):
def _find_or_create(image_uuid):
if image_uuid is None:
return
try:
return self.get_image_id(context, image_uuid)
except exception.NotFound:
return self._create_image_id(context, image_uuid)
image_copy = image.copy()
try:
image_id = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = _find_or_create(image_id)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_uuid = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_copy['properties'][prop] = _find_or_create(image_uuid)
return image_copy
def create(self, context, metadata, data=None):
"""Create an image.
metadata['properties'] should contain image_location.
"""
image = self._s3_create(context, metadata)
return image
def delete(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
self.service.delete(context, image_uuid)
def update(self, context, image_id, metadata, data=None):
image_uuid = self.get_image_uuid(context, image_id)
image = self.service.update(context, image_uuid, metadata, data)
return self._translate_uuid_to_id(context, image)
def index(self, context):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
images = self.service.index(context, sort_dir='asc')
return self._translate_uuids_to_ids(context, images)
def detail(self, context):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
images = self.service.detail(context, sort_dir='asc')
return self._translate_uuids_to_ids(context, images)
def show(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
image = self.service.show(context, image_uuid)
return self._translate_uuid_to_id(context, image)
def show_by_name(self, context, name):
image = self.service.show_by_name(context, name)
return self._translate_uuid_to_id(context, image)
def get(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
return self.get(self, context, image_uuid)
@staticmethod
def _conn(context):
# NOTE(vish): access and secret keys for s3 server are not
# checked in nova-objectstore
access = FLAGS.s3_access_key
secret = FLAGS.s3_secret_key
calling = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=access,
aws_secret_access_key=secret,
is_secure=False,
calling_format=calling,
port=FLAGS.s3_port,
host=FLAGS.s3_host)
@staticmethod
def _download_file(bucket, filename, local_dir):
key = bucket.get_key(filename)
local_filename = os.path.join(local_dir, filename)
key.get_contents_to_filename(local_filename)
return local_filename
def _s3_parse_manifest(self, context, metadata, manifest):
manifest = ElementTree.fromstring(manifest)
image_format = 'ami'
image_type = 'machine'
try:
kernel_id = manifest.find('machine_configuration/kernel_id').text
if kernel_id == 'true':
image_format = 'aki'
image_type = 'kernel'
kernel_id = None
except Exception:
kernel_id = None
try:
ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text
if ramdisk_id == 'true':
image_format = 'ari'
image_type = 'ramdisk'
ramdisk_id = None
except Exception:
ramdisk_id = None
try:
arch = manifest.find('machine_configuration/architecture').text
except Exception:
arch = 'x86_64'
# NOTE(yamahata):
# EC2 ec2-budlne-image --block-device-mapping accepts
# <virtual name>=<device name> where
# virtual name = {ami, root, swap, ephemeral<N>}
# where N is no negative integer
# device name = the device name seen by guest kernel.
# They are converted into
# block_device_mapping/mapping/{virtual, device}
#
# Do NOT confuse this with ec2-register's block device mapping
# argument.
mappings = []
try:
block_device_mapping = manifest.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in block_device_mapping:
mappings.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
mappings = []
properties = metadata['properties']
properties['project_id'] = context.project_id
properties['architecture'] = arch
def _translate_dependent_image_id(image_key, image_id):
image_id = ec2utils.ec2_id_to_id(image_id)
image_uuid = self.get_image_uuid(context, image_id)
properties['image_id'] = image_uuid
if kernel_id:
_translate_dependent_image_id('kernel_id', kernel_id)
if ramdisk_id:
_translate_dependent_image_id('ramdisk_id', ramdisk_id)
if mappings:
properties['mappings'] = mappings
metadata.update({'disk_format': image_format,
'container_format': image_format,
'status': 'queued',
'is_public': False,
'properties': properties})
metadata['properties']['image_state'] = 'pending'
#TODO(bcwaldon): right now, this removes user-defined ids.
# We need to re-enable this.
image_id = metadata.pop('id', None)
image = self.service.create(context, metadata)
# extract the new uuid and generate an int id to present back to user
image_uuid = image['id']
image['id'] = self._create_image_id(context, image_uuid)
# return image_uuid so the caller can still make use of image_service
return manifest, image, image_uuid
def _s3_create(self, context, metadata):
"""Gets a manifext from s3 and makes an image."""
image_path = tempfile.mkdtemp(dir=FLAGS.image_decryption_dir)
image_location = metadata['properties']['image_location']
bucket_name = image_location.split('/')[0]
manifest_path = image_location[len(bucket_name) + 1:]
bucket = self._conn(context).get_bucket(bucket_name)
key = bucket.get_key(manifest_path)
manifest = key.get_contents_as_string()
manifest, image, image_uuid = self._s3_parse_manifest(context,
metadata,
manifest)
def delayed_create():
"""This handles the fetching and decrypting of the part files."""
log_vars = {'image_location': image_location,
'image_path': image_path}
metadata['properties']['image_state'] = 'downloading'
self.service.update(context, image_uuid, metadata)
try:
parts = []
elements = manifest.find('image').getiterator('filename')
for fn_element in elements:
part = self._download_file(bucket,
fn_element.text,
image_path)
parts.append(part)
# NOTE(vish): this may be suboptimal, should we use cat?
enc_filename = os.path.join(image_path, 'image.encrypted')
with open(enc_filename, 'w') as combined:
for filename in parts:
with open(filename) as part:
shutil.copyfileobj(part, combined)
except Exception:
LOG.exception(_("Failed to download %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_download'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'decrypting'
self.service.update(context, image_uuid, metadata)
try:
hex_key = manifest.find('image/ec2_encrypted_key').text
encrypted_key = binascii.a2b_hex(hex_key)
hex_iv = manifest.find('image/ec2_encrypted_iv').text
encrypted_iv = binascii.a2b_hex(hex_iv)
# FIXME(vish): grab key from common service so this can run on
# any host.
cloud_pk = crypto.key_path(context.project_id)
dec_filename = os.path.join(image_path, 'image.tar.gz')
self._decrypt_image(enc_filename, encrypted_key,
encrypted_iv, cloud_pk,
dec_filename)
except Exception:
LOG.exception(_("Failed to decrypt %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_decrypt'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'untarring'
self.service.update(context, image_uuid, metadata)
try:
unz_filename = self._untarzip_image(image_path, dec_filename)
except Exception:
LOG.exception(_("Failed to untar %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_untar'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'uploading'
self.service.update(context, image_uuid, metadata)
try:
with open(unz_filename) as image_file:
self.service.update(context, image_uuid,
metadata, image_file)
except Exception:
LOG.exception(_("Failed to upload %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_upload'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'available'
metadata['status'] = 'active'
self.service.update(context, image_uuid, metadata)
shutil.rmtree(image_path)
eventlet.spawn_n(delayed_create)
return image
@staticmethod
def _decrypt_image(encrypted_filename, encrypted_key, encrypted_iv,
cloud_private_key, decrypted_filename):
key, err = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % cloud_private_key,
process_input=encrypted_key,
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt private key: %s')
% err)
iv, err = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % cloud_private_key,
process_input=encrypted_iv,
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt initialization '
'vector: %s') % err)
_out, err = utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (encrypted_filename,),
'-K', '%s' % (key,),
'-iv', '%s' % (iv,),
'-out', '%s' % (decrypted_filename,),
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt image file '
'%(image_file)s: %(err)s') %
{'image_file': encrypted_filename,
'err': err})
@staticmethod
def _untarzip_image(path, filename):
tar_file = tarfile.open(filename, 'r|gz')
tar_file.extractall(path)
image_file = tar_file.getnames()[0]
tar_file.close()
return os.path.join(path, image_file)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Proxy AMI-related calls from cloud controller to objectstore service."""
import binascii
import os
import shutil
import tarfile
import tempfile
from xml.etree import ElementTree
import boto.s3.connection
import eventlet
from nova import crypto
import nova.db.api
from nova import exception
from nova import flags
from nova import image
from nova import log as logging
from nova import utils
from nova.api.ec2 import ec2utils
LOG = logging.getLogger("nova.image.s3")
FLAGS = flags.FLAGS
flags.DEFINE_string('image_decryption_dir', '/tmp',
'parent dir for tempdir used for image decryption')
flags.DEFINE_string('s3_access_key', 'notchecked',
'access key to use for s3 server for images')
flags.DEFINE_string('s3_secret_key', 'notchecked',
'secret key to use for s3 server for images')
class S3ImageService(object):
"""Wraps an existing image service to support s3 based register."""
def __init__(self, service=None, *args, **kwargs):
self.service = service or image.get_default_image_service()
self.service.__init__(*args, **kwargs)
def get_image_uuid(self, context, image_id):
return nova.db.api.s3_image_get(context, image_id)['uuid']
def get_image_id(self, context, image_uuid):
return nova.db.api.s3_image_get_by_uuid(context, image_uuid)['id']
def _create_image_id(self, context, image_uuid):
return nova.db.api.s3_image_create(context, image_uuid)['id']
def _translate_uuids_to_ids(self, context, images):
return [self._translate_uuid_to_id(context, img) for img in images]
def _translate_uuid_to_id(self, context, image):
def _find_or_create(image_uuid):
if image_uuid is None:
return
try:
return self.get_image_id(context, image_uuid)
except exception.NotFound:
return self._create_image_id(context, image_uuid)
image_copy = image.copy()
try:
image_id = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = _find_or_create(image_id)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_uuid = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_copy['properties'][prop] = _find_or_create(image_uuid)
return image_copy
def create(self, context, metadata, data=None):
"""Create an image.
metadata['properties'] should contain image_location.
"""
image = self._s3_create(context, metadata)
return image
def delete(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
self.service.delete(context, image_uuid)
def update(self, context, image_id, metadata, data=None):
image_uuid = self.get_image_uuid(context, image_id)
image = self.service.update(context, image_uuid, metadata, data)
return self._translate_uuid_to_id(context, image)
def index(self, context):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
images = self.service.index(context, sort_dir='asc')
return self._translate_uuids_to_ids(context, images)
def detail(self, context):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
images = self.service.detail(context, sort_dir='asc')
return self._translate_uuids_to_ids(context, images)
def show(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
image = self.service.show(context, image_uuid)
return self._translate_uuid_to_id(context, image)
def show_by_name(self, context, name):
image = self.service.show_by_name(context, name)
return self._translate_uuid_to_id(context, image)
def get(self, context, image_id):
image_uuid = self.get_image_uuid(context, image_id)
return self.get(self, context, image_uuid)
@staticmethod
def _conn(context):
# NOTE(vish): access and secret keys for s3 server are not
# checked in nova-objectstore
access = FLAGS.s3_access_key
secret = FLAGS.s3_secret_key
calling = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=access,
aws_secret_access_key=secret,
is_secure=False,
calling_format=calling,
port=FLAGS.s3_port,
host=FLAGS.s3_host)
@staticmethod
def _download_file(bucket, filename, local_dir):
key = bucket.get_key(filename)
local_filename = os.path.join(local_dir, os.path.basename(filename))
key.get_contents_to_filename(local_filename)
return local_filename
def _s3_parse_manifest(self, context, metadata, manifest):
manifest = ElementTree.fromstring(manifest)
image_format = 'ami'
image_type = 'machine'
try:
kernel_id = manifest.find('machine_configuration/kernel_id').text
if kernel_id == 'true':
image_format = 'aki'
image_type = 'kernel'
kernel_id = None
except Exception:
kernel_id = None
try:
ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text
if ramdisk_id == 'true':
image_format = 'ari'
image_type = 'ramdisk'
ramdisk_id = None
except Exception:
ramdisk_id = None
try:
arch = manifest.find('machine_configuration/architecture').text
except Exception:
arch = 'x86_64'
# NOTE(yamahata):
# EC2 ec2-budlne-image --block-device-mapping accepts
# <virtual name>=<device name> where
# virtual name = {ami, root, swap, ephemeral<N>}
# where N is no negative integer
# device name = the device name seen by guest kernel.
# They are converted into
# block_device_mapping/mapping/{virtual, device}
#
# Do NOT confuse this with ec2-register's block device mapping
# argument.
mappings = []
try:
block_device_mapping = manifest.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in block_device_mapping:
mappings.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
mappings = []
properties = metadata['properties']
properties['project_id'] = context.project_id
properties['architecture'] = arch
def _translate_dependent_image_id(image_key, image_id):
image_id = ec2utils.ec2_id_to_id(image_id)
image_uuid = self.get_image_uuid(context, image_id)
properties['image_id'] = image_uuid
if kernel_id:
_translate_dependent_image_id('kernel_id', kernel_id)
if ramdisk_id:
_translate_dependent_image_id('ramdisk_id', ramdisk_id)
if mappings:
properties['mappings'] = mappings
metadata.update({'disk_format': image_format,
'container_format': image_format,
'status': 'queued',
'is_public': False,
'properties': properties})
metadata['properties']['image_state'] = 'pending'
#TODO(bcwaldon): right now, this removes user-defined ids.
# We need to re-enable this.
image_id = metadata.pop('id', None)
image = self.service.create(context, metadata)
# extract the new uuid and generate an int id to present back to user
image_uuid = image['id']
image['id'] = self._create_image_id(context, image_uuid)
# return image_uuid so the caller can still make use of image_service
return manifest, image, image_uuid
def _s3_create(self, context, metadata):
"""Gets a manifext from s3 and makes an image."""
image_path = tempfile.mkdtemp(dir=FLAGS.image_decryption_dir)
image_location = metadata['properties']['image_location']
bucket_name = image_location.split('/')[0]
manifest_path = image_location[len(bucket_name) + 1:]
bucket = self._conn(context).get_bucket(bucket_name)
key = bucket.get_key(manifest_path)
manifest = key.get_contents_as_string()
manifest, image, image_uuid = self._s3_parse_manifest(context,
metadata,
manifest)
def delayed_create():
"""This handles the fetching and decrypting of the part files."""
log_vars = {'image_location': image_location,
'image_path': image_path}
metadata['properties']['image_state'] = 'downloading'
self.service.update(context, image_uuid, metadata)
try:
parts = []
elements = manifest.find('image').getiterator('filename')
for fn_element in elements:
part = self._download_file(bucket,
fn_element.text,
image_path)
parts.append(part)
# NOTE(vish): this may be suboptimal, should we use cat?
enc_filename = os.path.join(image_path, 'image.encrypted')
with open(enc_filename, 'w') as combined:
for filename in parts:
with open(filename) as part:
shutil.copyfileobj(part, combined)
except Exception:
LOG.exception(_("Failed to download %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_download'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'decrypting'
self.service.update(context, image_uuid, metadata)
try:
hex_key = manifest.find('image/ec2_encrypted_key').text
encrypted_key = binascii.a2b_hex(hex_key)
hex_iv = manifest.find('image/ec2_encrypted_iv').text
encrypted_iv = binascii.a2b_hex(hex_iv)
# FIXME(vish): grab key from common service so this can run on
# any host.
cloud_pk = crypto.key_path(context.project_id)
dec_filename = os.path.join(image_path, 'image.tar.gz')
self._decrypt_image(enc_filename, encrypted_key,
encrypted_iv, cloud_pk,
dec_filename)
except Exception:
LOG.exception(_("Failed to decrypt %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_decrypt'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'untarring'
self.service.update(context, image_uuid, metadata)
try:
unz_filename = self._untarzip_image(image_path, dec_filename)
except Exception:
LOG.exception(_("Failed to untar %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_untar'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'uploading'
self.service.update(context, image_uuid, metadata)
try:
with open(unz_filename) as image_file:
self.service.update(context, image_uuid,
metadata, image_file)
except Exception:
LOG.exception(_("Failed to upload %(image_location)s "
"to %(image_path)s"), log_vars)
metadata['properties']['image_state'] = 'failed_upload'
self.service.update(context, image_uuid, metadata)
return
metadata['properties']['image_state'] = 'available'
metadata['status'] = 'active'
self.service.update(context, image_uuid, metadata)
shutil.rmtree(image_path)
eventlet.spawn_n(delayed_create)
return image
@staticmethod
def _decrypt_image(encrypted_filename, encrypted_key, encrypted_iv,
cloud_private_key, decrypted_filename):
key, err = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % cloud_private_key,
process_input=encrypted_key,
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt private key: %s')
% err)
iv, err = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % cloud_private_key,
process_input=encrypted_iv,
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt initialization '
'vector: %s') % err)
_out, err = utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (encrypted_filename,),
'-K', '%s' % (key,),
'-iv', '%s' % (iv,),
'-out', '%s' % (decrypted_filename,),
check_exit_code=False)
if err:
raise exception.Error(_('Failed to decrypt image file '
'%(image_file)s: %(err)s') %
{'image_file': encrypted_filename,
'err': err})
@staticmethod
def _test_for_malicious_tarball(path, filename):
"""Raises exception if extracting tarball would escape extract path"""
tar_file = tarfile.open(filename, 'r|gz')
for n in tar_file.getnames():
if not os.path.abspath(os.path.join(path, n)).startswith(path):
tar_file.close()
raise exception.Error(_('Unsafe filenames in image'))
tar_file.close()
@staticmethod
def _untarzip_image(path, filename):
S3ImageService._test_for_malicious_tarball(path, filename)
tar_file = tarfile.open(filename, 'r|gz')
tar_file.extractall(path)
image_file = tar_file.getnames()[0]
tar_file.close()
return os.path.join(path, image_file)
| path_disclosure | {
"code": [
" local_filename = os.path.join(local_dir, filename)"
],
"line_no": [
158
]
} | {
"code": [
" local_filename = os.path.join(local_dir, os.path.basename(filename))",
" @staticmethod",
" def _test_for_malicious_tarball(path, filename):",
" \"\"\"Raises exception if extracting tarball would escape extract path\"\"\"",
" tar_file = tarfile.open(filename, 'r|gz')",
" for n in tar_file.getnames():",
" if not os.path.abspath(os.path.join(path, n)).startswith(path):",
" tar_file.close()",
" tar_file.close()",
" S3ImageService._test_for_malicious_tarball(path, filename)"
],
"line_no": [
158,
390,
391,
392,
393,
394,
395,
396,
398,
402
]
} |
import binascii
import os
import shutil
import tarfile
import tempfile
from xml.etree import ElementTree
import boto.s3.connection
import eventlet
from nova import crypto
import nova.db.api
from nova import exception
from nova import flags
from nova import .image
from nova import log as logging
from nova import utils
from nova.api.ec2 import ec2utils
VAR_0 = logging.getLogger("nova.image.s3")
VAR_1 = flags.FLAGS
flags.DEFINE_string('image_decryption_dir', '/tmp',
'parent dir for tempdir used for VAR_9 decryption')
flags.DEFINE_string('s3_access_key', 'notchecked',
'access VAR_27 to use for s3 server for images')
flags.DEFINE_string('s3_secret_key', 'notchecked',
'secret VAR_27 to use for s3 server for images')
class CLASS_0(object):
def __init__(self, VAR_2=None, *VAR_3, **VAR_4):
self.service = VAR_2 or VAR_9.get_default_image_service()
self.service.__init__(*VAR_3, **VAR_4)
def FUNC_0(self, VAR_5, VAR_6):
return nova.db.api.s3_image_get(VAR_5, VAR_6)['uuid']
def FUNC_1(self, VAR_5, VAR_7):
return nova.db.api.s3_image_get_by_uuid(VAR_5, VAR_7)['id']
def FUNC_2(self, VAR_5, VAR_7):
return nova.db.api.s3_image_create(VAR_5, VAR_7)['id']
def FUNC_3(self, VAR_5, VAR_8):
return [self._translate_uuid_to_id(VAR_5, img) for img in VAR_8]
def FUNC_4(self, VAR_5, VAR_9):
def FUNC_19(VAR_7):
if VAR_7 is None:
return
try:
return self.get_image_id(VAR_5, VAR_7)
except exception.NotFound:
return self._create_image_id(VAR_5, VAR_7)
VAR_23 = VAR_9.copy()
try:
VAR_6 = VAR_23['id']
except KeyError:
pass
else:
VAR_23['id'] = FUNC_19(VAR_6)
for VAR_48 in ['kernel_id', 'ramdisk_id']:
try:
VAR_7 = VAR_23['properties'][VAR_48]
except (KeyError, ValueError):
pass
else:
VAR_23['properties'][VAR_48] = FUNC_19(VAR_7)
return VAR_23
def FUNC_5(self, VAR_5, VAR_10, VAR_11=None):
VAR_9 = self._s3_create(VAR_5, VAR_10)
return VAR_9
def FUNC_6(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
self.service.delete(VAR_5, VAR_7)
def FUNC_7(self, VAR_5, VAR_6, VAR_10, VAR_11=None):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_9 = self.service.update(VAR_5, VAR_7, VAR_10, VAR_11)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_8(self, VAR_5):
images = self.service.index(VAR_5, sort_dir='asc')
return self._translate_uuids_to_ids(VAR_5, VAR_8)
def FUNC_9(self, VAR_5):
images = self.service.detail(VAR_5, sort_dir='asc')
return self._translate_uuids_to_ids(VAR_5, VAR_8)
def FUNC_10(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_9 = self.service.show(VAR_5, VAR_7)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_11(self, VAR_5, VAR_12):
VAR_9 = self.service.show_by_name(VAR_5, VAR_12)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_12(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
return self.get(self, VAR_5, VAR_7)
@staticmethod
def FUNC_13(VAR_5):
VAR_24 = VAR_1.s3_access_key
VAR_25 = VAR_1.s3_secret_key
VAR_26 = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=VAR_24,
aws_secret_access_key=VAR_25,
is_secure=False,
calling_format=VAR_26,
port=VAR_1.s3_port,
host=VAR_1.s3_host)
@staticmethod
def FUNC_14(VAR_13, VAR_14, VAR_15):
VAR_27 = VAR_13.get_key(VAR_14)
VAR_28 = os.path.join(VAR_15, VAR_14)
VAR_27.get_contents_to_filename(VAR_28)
return VAR_28
def FUNC_15(self, VAR_5, VAR_10, VAR_16):
manifest = ElementTree.fromstring(VAR_16)
VAR_29 = 'ami'
VAR_30 = 'machine'
try:
VAR_43 = VAR_16.find('machine_configuration/kernel_id').text
if VAR_43 == 'true':
VAR_29 = 'aki'
VAR_30 = 'kernel'
VAR_43 = None
except Exception:
VAR_43 = None
try:
VAR_44 = VAR_16.find('machine_configuration/ramdisk_id').text
if VAR_44 == 'true':
VAR_29 = 'ari'
VAR_30 = 'ramdisk'
VAR_44 = None
except Exception:
VAR_44 = None
try:
VAR_45 = VAR_16.find('machine_configuration/architecture').text
except Exception:
VAR_45 = 'x86_64'
VAR_31 = []
try:
VAR_46 = VAR_16.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in VAR_46:
VAR_31.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
VAR_31 = []
VAR_32 = VAR_10['properties']
VAR_32['project_id'] = VAR_5.project_id
VAR_32['architecture'] = VAR_45
def FUNC_20(VAR_33, VAR_6):
VAR_6 = ec2utils.ec2_id_to_id(VAR_6)
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_32['image_id'] = VAR_7
if VAR_43:
FUNC_20('kernel_id', VAR_43)
if VAR_44:
FUNC_20('ramdisk_id', VAR_44)
if VAR_31:
VAR_32['mappings'] = VAR_31
VAR_10.update({'disk_format': VAR_29,
'container_format': VAR_29,
'status': 'queued',
'is_public': False,
'properties': VAR_32})
VAR_10['properties']['image_state'] = 'pending'
VAR_6 = VAR_10.pop('id', None)
VAR_9 = self.service.create(VAR_5, VAR_10)
VAR_7 = VAR_9['id']
VAR_9['id'] = self._create_image_id(VAR_5, VAR_7)
return VAR_16, VAR_9, VAR_7
def FUNC_16(self, VAR_5, VAR_10):
VAR_34 = tempfile.mkdtemp(dir=VAR_1.image_decryption_dir)
VAR_35 = VAR_10['properties']['image_location']
VAR_36 = VAR_35.split('/')[0]
VAR_37 = VAR_35[len(VAR_36) + 1:]
VAR_13 = self._conn(VAR_5).get_bucket(VAR_36)
VAR_27 = VAR_13.get_key(VAR_37)
VAR_16 = VAR_27.get_contents_as_string()
VAR_16, VAR_9, VAR_7 = self._s3_parse_manifest(VAR_5,
VAR_10,
VAR_16)
def FUNC_21():
VAR_47 = {'image_location': VAR_35,
'image_path': VAR_34}
VAR_10['properties']['image_state'] = 'downloading'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_49 = []
VAR_50 = VAR_16.find('image').getiterator('filename')
for fn_element in VAR_50:
VAR_57 = self._download_file(VAR_13,
fn_element.text,
VAR_34)
VAR_49.append(VAR_57)
VAR_51 = os.path.join(VAR_34, 'image.encrypted')
with open(VAR_51, 'w') as combined:
for VAR_14 in VAR_49:
with open(VAR_14) as VAR_57:
shutil.copyfileobj(VAR_57, combined)
except Exception:
VAR_0.exception(_("Failed to download %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_download'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'decrypting'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_52 = VAR_16.find('image/ec2_encrypted_key').text
VAR_18 = binascii.a2b_hex(VAR_52)
VAR_53 = VAR_16.find('image/ec2_encrypted_iv').text
VAR_19 = binascii.a2b_hex(VAR_53)
VAR_54 = crypto.key_path(VAR_5.project_id)
VAR_55 = os.path.join(VAR_34, 'image.tar.gz')
self._decrypt_image(VAR_51, VAR_18,
VAR_19, VAR_54,
VAR_55)
except Exception:
VAR_0.exception(_("Failed to decrypt %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_decrypt'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'untarring'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_56 = self._untarzip_image(VAR_34, VAR_55)
except Exception:
VAR_0.exception(_("Failed to untar %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_untar'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'uploading'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
with open(VAR_56) as VAR_42:
self.service.update(VAR_5, VAR_7,
VAR_10, VAR_42)
except Exception:
VAR_0.exception(_("Failed to upload %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_upload'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'available'
VAR_10['status'] = 'active'
self.service.update(VAR_5, VAR_7, VAR_10)
shutil.rmtree(VAR_34)
eventlet.spawn_n(FUNC_21)
return VAR_9
@staticmethod
def FUNC_17(VAR_17, VAR_18, VAR_19,
VAR_20, VAR_21):
VAR_27, VAR_38 = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % VAR_20,
process_input=VAR_18,
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt private VAR_27: %s')
% VAR_38)
VAR_39, VAR_38 = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % VAR_20,
process_input=VAR_19,
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt initialization '
'vector: %s') % VAR_38)
VAR_40, VAR_38 = utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (VAR_17,),
'-K', '%s' % (VAR_27,),
'-iv', '%s' % (VAR_39,),
'-out', '%s' % (VAR_21,),
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt VAR_9 file '
'%(VAR_42)s: %(VAR_38)s') %
{'image_file': VAR_17,
'err': VAR_38})
@staticmethod
def FUNC_18(VAR_22, VAR_14):
VAR_41 = tarfile.open(VAR_14, 'r|gz')
VAR_41.extractall(VAR_22)
VAR_42 = VAR_41.getnames()[0]
VAR_41.close()
return os.path.join(VAR_22, VAR_42)
|
import binascii
import os
import shutil
import tarfile
import tempfile
from xml.etree import ElementTree
import boto.s3.connection
import eventlet
from nova import crypto
import nova.db.api
from nova import exception
from nova import flags
from nova import .image
from nova import log as logging
from nova import utils
from nova.api.ec2 import ec2utils
VAR_0 = logging.getLogger("nova.image.s3")
VAR_1 = flags.FLAGS
flags.DEFINE_string('image_decryption_dir', '/tmp',
'parent dir for tempdir used for VAR_9 decryption')
flags.DEFINE_string('s3_access_key', 'notchecked',
'access VAR_27 to use for s3 server for images')
flags.DEFINE_string('s3_secret_key', 'notchecked',
'secret VAR_27 to use for s3 server for images')
class CLASS_0(object):
def __init__(self, VAR_2=None, *VAR_3, **VAR_4):
self.service = VAR_2 or VAR_9.get_default_image_service()
self.service.__init__(*VAR_3, **VAR_4)
def FUNC_0(self, VAR_5, VAR_6):
return nova.db.api.s3_image_get(VAR_5, VAR_6)['uuid']
def FUNC_1(self, VAR_5, VAR_7):
return nova.db.api.s3_image_get_by_uuid(VAR_5, VAR_7)['id']
def FUNC_2(self, VAR_5, VAR_7):
return nova.db.api.s3_image_create(VAR_5, VAR_7)['id']
def FUNC_3(self, VAR_5, VAR_8):
return [self._translate_uuid_to_id(VAR_5, img) for img in VAR_8]
def FUNC_4(self, VAR_5, VAR_9):
def FUNC_20(VAR_7):
if VAR_7 is None:
return
try:
return self.get_image_id(VAR_5, VAR_7)
except exception.NotFound:
return self._create_image_id(VAR_5, VAR_7)
VAR_23 = VAR_9.copy()
try:
VAR_6 = VAR_23['id']
except KeyError:
pass
else:
VAR_23['id'] = FUNC_20(VAR_6)
for VAR_48 in ['kernel_id', 'ramdisk_id']:
try:
VAR_7 = VAR_23['properties'][VAR_48]
except (KeyError, ValueError):
pass
else:
VAR_23['properties'][VAR_48] = FUNC_20(VAR_7)
return VAR_23
def FUNC_5(self, VAR_5, VAR_10, VAR_11=None):
VAR_9 = self._s3_create(VAR_5, VAR_10)
return VAR_9
def FUNC_6(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
self.service.delete(VAR_5, VAR_7)
def FUNC_7(self, VAR_5, VAR_6, VAR_10, VAR_11=None):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_9 = self.service.update(VAR_5, VAR_7, VAR_10, VAR_11)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_8(self, VAR_5):
images = self.service.index(VAR_5, sort_dir='asc')
return self._translate_uuids_to_ids(VAR_5, VAR_8)
def FUNC_9(self, VAR_5):
images = self.service.detail(VAR_5, sort_dir='asc')
return self._translate_uuids_to_ids(VAR_5, VAR_8)
def FUNC_10(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_9 = self.service.show(VAR_5, VAR_7)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_11(self, VAR_5, VAR_12):
VAR_9 = self.service.show_by_name(VAR_5, VAR_12)
return self._translate_uuid_to_id(VAR_5, VAR_9)
def FUNC_12(self, VAR_5, VAR_6):
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
return self.get(self, VAR_5, VAR_7)
@staticmethod
def FUNC_13(VAR_5):
VAR_24 = VAR_1.s3_access_key
VAR_25 = VAR_1.s3_secret_key
VAR_26 = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=VAR_24,
aws_secret_access_key=VAR_25,
is_secure=False,
calling_format=VAR_26,
port=VAR_1.s3_port,
host=VAR_1.s3_host)
@staticmethod
def FUNC_14(VAR_13, VAR_14, VAR_15):
VAR_27 = VAR_13.get_key(VAR_14)
VAR_28 = os.path.join(VAR_15, os.path.basename(VAR_14))
VAR_27.get_contents_to_filename(VAR_28)
return VAR_28
def FUNC_15(self, VAR_5, VAR_10, VAR_16):
manifest = ElementTree.fromstring(VAR_16)
VAR_29 = 'ami'
VAR_30 = 'machine'
try:
VAR_43 = VAR_16.find('machine_configuration/kernel_id').text
if VAR_43 == 'true':
VAR_29 = 'aki'
VAR_30 = 'kernel'
VAR_43 = None
except Exception:
VAR_43 = None
try:
VAR_44 = VAR_16.find('machine_configuration/ramdisk_id').text
if VAR_44 == 'true':
VAR_29 = 'ari'
VAR_30 = 'ramdisk'
VAR_44 = None
except Exception:
VAR_44 = None
try:
VAR_45 = VAR_16.find('machine_configuration/architecture').text
except Exception:
VAR_45 = 'x86_64'
VAR_31 = []
try:
VAR_46 = VAR_16.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in VAR_46:
VAR_31.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
VAR_31 = []
VAR_32 = VAR_10['properties']
VAR_32['project_id'] = VAR_5.project_id
VAR_32['architecture'] = VAR_45
def FUNC_21(VAR_33, VAR_6):
VAR_6 = ec2utils.ec2_id_to_id(VAR_6)
VAR_7 = self.get_image_uuid(VAR_5, VAR_6)
VAR_32['image_id'] = VAR_7
if VAR_43:
FUNC_21('kernel_id', VAR_43)
if VAR_44:
FUNC_21('ramdisk_id', VAR_44)
if VAR_31:
VAR_32['mappings'] = VAR_31
VAR_10.update({'disk_format': VAR_29,
'container_format': VAR_29,
'status': 'queued',
'is_public': False,
'properties': VAR_32})
VAR_10['properties']['image_state'] = 'pending'
VAR_6 = VAR_10.pop('id', None)
VAR_9 = self.service.create(VAR_5, VAR_10)
VAR_7 = VAR_9['id']
VAR_9['id'] = self._create_image_id(VAR_5, VAR_7)
return VAR_16, VAR_9, VAR_7
def FUNC_16(self, VAR_5, VAR_10):
VAR_34 = tempfile.mkdtemp(dir=VAR_1.image_decryption_dir)
VAR_35 = VAR_10['properties']['image_location']
VAR_36 = VAR_35.split('/')[0]
VAR_37 = VAR_35[len(VAR_36) + 1:]
VAR_13 = self._conn(VAR_5).get_bucket(VAR_36)
VAR_27 = VAR_13.get_key(VAR_37)
VAR_16 = VAR_27.get_contents_as_string()
VAR_16, VAR_9, VAR_7 = self._s3_parse_manifest(VAR_5,
VAR_10,
VAR_16)
def FUNC_22():
VAR_47 = {'image_location': VAR_35,
'image_path': VAR_34}
VAR_10['properties']['image_state'] = 'downloading'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_49 = []
VAR_50 = VAR_16.find('image').getiterator('filename')
for fn_element in VAR_50:
VAR_57 = self._download_file(VAR_13,
fn_element.text,
VAR_34)
VAR_49.append(VAR_57)
VAR_51 = os.path.join(VAR_34, 'image.encrypted')
with open(VAR_51, 'w') as combined:
for VAR_14 in VAR_49:
with open(VAR_14) as VAR_57:
shutil.copyfileobj(VAR_57, combined)
except Exception:
VAR_0.exception(_("Failed to download %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_download'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'decrypting'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_52 = VAR_16.find('image/ec2_encrypted_key').text
VAR_18 = binascii.a2b_hex(VAR_52)
VAR_53 = VAR_16.find('image/ec2_encrypted_iv').text
VAR_19 = binascii.a2b_hex(VAR_53)
VAR_54 = crypto.key_path(VAR_5.project_id)
VAR_55 = os.path.join(VAR_34, 'image.tar.gz')
self._decrypt_image(VAR_51, VAR_18,
VAR_19, VAR_54,
VAR_55)
except Exception:
VAR_0.exception(_("Failed to decrypt %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_decrypt'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'untarring'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
VAR_56 = self._untarzip_image(VAR_34, VAR_55)
except Exception:
VAR_0.exception(_("Failed to untar %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_untar'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'uploading'
self.service.update(VAR_5, VAR_7, VAR_10)
try:
with open(VAR_56) as VAR_42:
self.service.update(VAR_5, VAR_7,
VAR_10, VAR_42)
except Exception:
VAR_0.exception(_("Failed to upload %(VAR_35)s "
"to %(VAR_34)s"), VAR_47)
VAR_10['properties']['image_state'] = 'failed_upload'
self.service.update(VAR_5, VAR_7, VAR_10)
return
VAR_10['properties']['image_state'] = 'available'
VAR_10['status'] = 'active'
self.service.update(VAR_5, VAR_7, VAR_10)
shutil.rmtree(VAR_34)
eventlet.spawn_n(FUNC_22)
return VAR_9
@staticmethod
def FUNC_17(VAR_17, VAR_18, VAR_19,
VAR_20, VAR_21):
VAR_27, VAR_38 = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % VAR_20,
process_input=VAR_18,
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt private VAR_27: %s')
% VAR_38)
VAR_39, VAR_38 = utils.execute('openssl',
'rsautl',
'-decrypt',
'-inkey', '%s' % VAR_20,
process_input=VAR_19,
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt initialization '
'vector: %s') % VAR_38)
VAR_40, VAR_38 = utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (VAR_17,),
'-K', '%s' % (VAR_27,),
'-iv', '%s' % (VAR_39,),
'-out', '%s' % (VAR_21,),
check_exit_code=False)
if VAR_38:
raise exception.Error(_('Failed to decrypt VAR_9 file '
'%(VAR_42)s: %(VAR_38)s') %
{'image_file': VAR_17,
'err': VAR_38})
@staticmethod
def FUNC_18(VAR_22, VAR_14):
VAR_41 = tarfile.open(VAR_14, 'r|gz')
for n in VAR_41.getnames():
if not os.path.abspath(os.path.join(VAR_22, n)).startswith(VAR_22):
VAR_41.close()
raise exception.Error(_('Unsafe filenames in image'))
VAR_41.close()
@staticmethod
def FUNC_19(VAR_22, VAR_14):
CLASS_0._test_for_malicious_tarball(VAR_22, VAR_14)
VAR_41 = tarfile.open(VAR_14, 'r|gz')
VAR_41.extractall(VAR_22)
VAR_42 = VAR_41.getnames()[0]
VAR_41.close()
return os.path.join(VAR_22, VAR_42)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
20,
27,
30,
39,
40,
49,
50,
53,
57,
60,
63,
66,
69,
78,
80,
87,
95,
97,
100,
102,
106,
110,
115,
117,
118,
121,
123,
124,
127,
132,
136,
140,
143,
144,
154,
161,
166,
175,
184,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
211,
215,
220,
223,
226,
229,
236,
237,
238,
240,
242,
243,
246,
247,
249,
252,
254,
261,
265,
272,
281,
282,
288,
295,
298,
304,
305,
306,
308,
319,
322,
331,
344,
348,
350,
352,
354,
376,
389,
397,
19,
52,
99,
100,
101,
102,
103,
251,
267
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
20,
27,
30,
39,
40,
49,
50,
53,
57,
60,
63,
66,
69,
78,
80,
87,
95,
97,
100,
102,
106,
110,
115,
117,
118,
121,
123,
124,
127,
132,
136,
140,
143,
144,
154,
161,
166,
175,
184,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
211,
215,
220,
223,
226,
229,
236,
237,
238,
240,
242,
243,
246,
247,
249,
252,
254,
261,
265,
272,
281,
282,
288,
295,
298,
304,
305,
306,
308,
319,
322,
331,
344,
348,
350,
352,
354,
376,
389,
399,
408,
19,
52,
99,
100,
101,
102,
103,
251,
392,
267
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import synapse.api.errors
import synapse.handlers.device
import synapse.storage
from tests import unittest
user1 = "@boris:aaa"
user2 = "@theresa:bbb"
class DeviceTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", http_client=None)
self.handler = hs.get_device_handler()
self.store = hs.get_datastore()
return hs
def prepare(self, reactor, clock, hs):
# These tests assume that it starts 1000 seconds in.
self.reactor.advance(1000)
def test_device_is_created_with_invalid_name(self):
self.get_failure(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="foo",
initial_device_display_name="a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1),
),
synapse.api.errors.SynapseError,
)
def test_device_is_created_if_doesnt_exist(self):
res = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(res, "fco")
dev = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(dev["display_name"], "display name")
def test_device_is_preserved_if_exists(self):
res1 = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(res1, "fco")
res2 = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="new display name",
)
)
self.assertEqual(res2, "fco")
dev = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(dev["display_name"], "display name")
def test_device_id_is_made_up_if_unspecified(self):
device_id = self.get_success(
self.handler.check_device_registered(
user_id="@theresa:foo",
device_id=None,
initial_device_display_name="display",
)
)
dev = self.get_success(self.handler.store.get_device("@theresa:foo", device_id))
self.assertEqual(dev["display_name"], "display")
def test_get_devices_by_user(self):
self._record_users()
res = self.get_success(self.handler.get_devices_by_user(user1))
self.assertEqual(3, len(res))
device_map = {d["device_id"]: d for d in res}
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "xyz",
"display_name": "display 0",
"last_seen_ip": None,
"last_seen_ts": None,
},
device_map["xyz"],
)
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "fco",
"display_name": "display 1",
"last_seen_ip": "ip1",
"last_seen_ts": 1000000,
},
device_map["fco"],
)
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
device_map["abc"],
)
def test_get_device(self):
self._record_users()
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
res,
)
def test_delete_device(self):
self._record_users()
# delete the device
self.get_success(self.handler.delete_device(user1, "abc"))
# check the device was deleted
self.get_failure(
self.handler.get_device(user1, "abc"), synapse.api.errors.NotFoundError
)
# we'd like to check the access token was invalidated, but that's a
# bit of a PITA.
def test_update_device(self):
self._record_users()
update = {"display_name": "new display"}
self.get_success(self.handler.update_device(user1, "abc", update))
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertEqual(res["display_name"], "new display")
def test_update_device_too_long_display_name(self):
"""Update a device with a display name that is invalid (too long)."""
self._record_users()
# Request to update a device display name with a new value that is longer than allowed.
update = {
"display_name": "a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1)
}
self.get_failure(
self.handler.update_device(user1, "abc", update),
synapse.api.errors.SynapseError,
)
# Ensure the display name was not updated.
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertEqual(res["display_name"], "display 2")
def test_update_unknown_device(self):
update = {"display_name": "new_display"}
self.get_failure(
self.handler.update_device("user_id", "unknown_device_id", update),
synapse.api.errors.NotFoundError,
)
def _record_users(self):
# check this works for both devices which have a recorded client_ip,
# and those which don't.
self._record_user(user1, "xyz", "display 0")
self._record_user(user1, "fco", "display 1", "token1", "ip1")
self._record_user(user1, "abc", "display 2", "token2", "ip2")
self._record_user(user1, "abc", "display 2", "token3", "ip3")
self._record_user(user2, "def", "dispkay", "token4", "ip4")
self.reactor.advance(10000)
def _record_user(
self, user_id, device_id, display_name, access_token=None, ip=None
):
device_id = self.get_success(
self.handler.check_device_registered(
user_id=user_id,
device_id=device_id,
initial_device_display_name=display_name,
)
)
if ip is not None:
self.get_success(
self.store.insert_client_ip(
user_id, access_token, ip, "user_agent", device_id
)
)
self.reactor.advance(1000)
class DehydrationTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", http_client=None)
self.handler = hs.get_device_handler()
self.registration = hs.get_registration_handler()
self.auth = hs.get_auth()
self.store = hs.get_datastore()
return hs
def test_dehydrate_and_rehydrate_device(self):
user_id = "@boris:dehydration"
self.get_success(self.store.register_user(user_id, "foobar"))
# First check if we can store and fetch a dehydrated device
stored_dehydrated_device_id = self.get_success(
self.handler.store_dehydrated_device(
user_id=user_id,
device_data={"device_data": {"foo": "bar"}},
initial_device_display_name="dehydrated device",
)
)
retrieved_device_id, device_data = self.get_success(
self.handler.get_dehydrated_device(user_id=user_id)
)
self.assertEqual(retrieved_device_id, stored_dehydrated_device_id)
self.assertEqual(device_data, {"device_data": {"foo": "bar"}})
# Create a new login for the user and dehydrated the device
device_id, access_token = self.get_success(
self.registration.register_device(
user_id=user_id, device_id=None, initial_display_name="new device",
)
)
# Trying to claim a nonexistent device should throw an error
self.get_failure(
self.handler.rehydrate_device(
user_id=user_id,
access_token=access_token,
device_id="not the right device ID",
),
synapse.api.errors.NotFoundError,
)
# dehydrating the right devices should succeed and change our device ID
# to the dehydrated device's ID
res = self.get_success(
self.handler.rehydrate_device(
user_id=user_id,
access_token=access_token,
device_id=retrieved_device_id,
)
)
self.assertEqual(res, {"success": True})
# make sure that our device ID has changed
user_info = self.get_success(self.auth.get_user_by_access_token(access_token))
self.assertEqual(user_info.device_id, retrieved_device_id)
# make sure the device has the display name that was set from the login
res = self.get_success(self.handler.get_device(user_id, retrieved_device_id))
self.assertEqual(res["display_name"], "new device")
# make sure that the device ID that we were initially assigned no longer exists
self.get_failure(
self.handler.get_device(user_id, device_id),
synapse.api.errors.NotFoundError,
)
# make sure that there's no device available for dehydrating now
ret = self.get_success(self.handler.get_dehydrated_device(user_id=user_id))
self.assertIsNone(ret)
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import synapse.api.errors
import synapse.handlers.device
import synapse.storage
from tests import unittest
user1 = "@boris:aaa"
user2 = "@theresa:bbb"
class DeviceTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", federation_http_client=None)
self.handler = hs.get_device_handler()
self.store = hs.get_datastore()
return hs
def prepare(self, reactor, clock, hs):
# These tests assume that it starts 1000 seconds in.
self.reactor.advance(1000)
def test_device_is_created_with_invalid_name(self):
self.get_failure(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="foo",
initial_device_display_name="a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1),
),
synapse.api.errors.SynapseError,
)
def test_device_is_created_if_doesnt_exist(self):
res = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(res, "fco")
dev = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(dev["display_name"], "display name")
def test_device_is_preserved_if_exists(self):
res1 = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(res1, "fco")
res2 = self.get_success(
self.handler.check_device_registered(
user_id="@boris:foo",
device_id="fco",
initial_device_display_name="new display name",
)
)
self.assertEqual(res2, "fco")
dev = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(dev["display_name"], "display name")
def test_device_id_is_made_up_if_unspecified(self):
device_id = self.get_success(
self.handler.check_device_registered(
user_id="@theresa:foo",
device_id=None,
initial_device_display_name="display",
)
)
dev = self.get_success(self.handler.store.get_device("@theresa:foo", device_id))
self.assertEqual(dev["display_name"], "display")
def test_get_devices_by_user(self):
self._record_users()
res = self.get_success(self.handler.get_devices_by_user(user1))
self.assertEqual(3, len(res))
device_map = {d["device_id"]: d for d in res}
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "xyz",
"display_name": "display 0",
"last_seen_ip": None,
"last_seen_ts": None,
},
device_map["xyz"],
)
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "fco",
"display_name": "display 1",
"last_seen_ip": "ip1",
"last_seen_ts": 1000000,
},
device_map["fco"],
)
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
device_map["abc"],
)
def test_get_device(self):
self._record_users()
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertDictContainsSubset(
{
"user_id": user1,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
res,
)
def test_delete_device(self):
self._record_users()
# delete the device
self.get_success(self.handler.delete_device(user1, "abc"))
# check the device was deleted
self.get_failure(
self.handler.get_device(user1, "abc"), synapse.api.errors.NotFoundError
)
# we'd like to check the access token was invalidated, but that's a
# bit of a PITA.
def test_update_device(self):
self._record_users()
update = {"display_name": "new display"}
self.get_success(self.handler.update_device(user1, "abc", update))
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertEqual(res["display_name"], "new display")
def test_update_device_too_long_display_name(self):
"""Update a device with a display name that is invalid (too long)."""
self._record_users()
# Request to update a device display name with a new value that is longer than allowed.
update = {
"display_name": "a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1)
}
self.get_failure(
self.handler.update_device(user1, "abc", update),
synapse.api.errors.SynapseError,
)
# Ensure the display name was not updated.
res = self.get_success(self.handler.get_device(user1, "abc"))
self.assertEqual(res["display_name"], "display 2")
def test_update_unknown_device(self):
update = {"display_name": "new_display"}
self.get_failure(
self.handler.update_device("user_id", "unknown_device_id", update),
synapse.api.errors.NotFoundError,
)
def _record_users(self):
# check this works for both devices which have a recorded client_ip,
# and those which don't.
self._record_user(user1, "xyz", "display 0")
self._record_user(user1, "fco", "display 1", "token1", "ip1")
self._record_user(user1, "abc", "display 2", "token2", "ip2")
self._record_user(user1, "abc", "display 2", "token3", "ip3")
self._record_user(user2, "def", "dispkay", "token4", "ip4")
self.reactor.advance(10000)
def _record_user(
self, user_id, device_id, display_name, access_token=None, ip=None
):
device_id = self.get_success(
self.handler.check_device_registered(
user_id=user_id,
device_id=device_id,
initial_device_display_name=display_name,
)
)
if ip is not None:
self.get_success(
self.store.insert_client_ip(
user_id, access_token, ip, "user_agent", device_id
)
)
self.reactor.advance(1000)
class DehydrationTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", federation_http_client=None)
self.handler = hs.get_device_handler()
self.registration = hs.get_registration_handler()
self.auth = hs.get_auth()
self.store = hs.get_datastore()
return hs
def test_dehydrate_and_rehydrate_device(self):
user_id = "@boris:dehydration"
self.get_success(self.store.register_user(user_id, "foobar"))
# First check if we can store and fetch a dehydrated device
stored_dehydrated_device_id = self.get_success(
self.handler.store_dehydrated_device(
user_id=user_id,
device_data={"device_data": {"foo": "bar"}},
initial_device_display_name="dehydrated device",
)
)
retrieved_device_id, device_data = self.get_success(
self.handler.get_dehydrated_device(user_id=user_id)
)
self.assertEqual(retrieved_device_id, stored_dehydrated_device_id)
self.assertEqual(device_data, {"device_data": {"foo": "bar"}})
# Create a new login for the user and dehydrated the device
device_id, access_token = self.get_success(
self.registration.register_device(
user_id=user_id, device_id=None, initial_display_name="new device",
)
)
# Trying to claim a nonexistent device should throw an error
self.get_failure(
self.handler.rehydrate_device(
user_id=user_id,
access_token=access_token,
device_id="not the right device ID",
),
synapse.api.errors.NotFoundError,
)
# dehydrating the right devices should succeed and change our device ID
# to the dehydrated device's ID
res = self.get_success(
self.handler.rehydrate_device(
user_id=user_id,
access_token=access_token,
device_id=retrieved_device_id,
)
)
self.assertEqual(res, {"success": True})
# make sure that our device ID has changed
user_info = self.get_success(self.auth.get_user_by_access_token(access_token))
self.assertEqual(user_info.device_id, retrieved_device_id)
# make sure the device has the display name that was set from the login
res = self.get_success(self.handler.get_device(user_id, retrieved_device_id))
self.assertEqual(res["display_name"], "new device")
# make sure that the device ID that we were initially assigned no longer exists
self.get_failure(
self.handler.get_device(user_id, device_id),
synapse.api.errors.NotFoundError,
)
# make sure that there's no device available for dehydrating now
ret = self.get_success(self.handler.get_dehydrated_device(user_id=user_id))
self.assertIsNone(ret)
| open_redirect | {
"code": [
" hs = self.setup_test_homeserver(\"server\", http_client=None)",
" hs = self.setup_test_homeserver(\"server\", http_client=None)"
],
"line_no": [
30,
232
]
} | {
"code": [
" hs = self.setup_test_homeserver(\"server\", federation_http_client=None)",
" hs = self.setup_test_homeserver(\"server\", federation_http_client=None)"
],
"line_no": [
30,
232
]
} |
import synapse.api.errors
import synapse.handlers.device
import synapse.storage
from tests import unittest
VAR_0 = "@boris:aaa"
VAR_1 = "@theresa:bbb"
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", http_client=None)
self.handler = VAR_4.get_device_handler()
self.store = VAR_4.get_datastore()
return VAR_4
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.reactor.advance(1000)
def FUNC_2(self):
self.get_failure(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="foo",
initial_device_display_name="a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1),
),
synapse.api.errors.SynapseError,
)
def FUNC_3(self):
VAR_10 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(VAR_10, "fco")
VAR_11 = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(VAR_11["display_name"], "display name")
def FUNC_4(self):
VAR_12 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(VAR_12, "fco")
VAR_13 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="new display name",
)
)
self.assertEqual(VAR_13, "fco")
VAR_11 = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(VAR_11["display_name"], "display name")
def FUNC_5(self):
VAR_6 = self.get_success(
self.handler.check_device_registered(
VAR_5="@theresa:foo",
VAR_6=None,
initial_device_display_name="display",
)
)
VAR_11 = self.get_success(self.handler.store.get_device("@theresa:foo", VAR_6))
self.assertEqual(VAR_11["display_name"], "display")
def FUNC_6(self):
self._record_users()
VAR_10 = self.get_success(self.handler.get_devices_by_user(VAR_0))
self.assertEqual(3, len(VAR_10))
VAR_14 = {d["device_id"]: d for d in VAR_10}
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "xyz",
"display_name": "display 0",
"last_seen_ip": None,
"last_seen_ts": None,
},
VAR_14["xyz"],
)
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "fco",
"display_name": "display 1",
"last_seen_ip": "ip1",
"last_seen_ts": 1000000,
},
VAR_14["fco"],
)
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
VAR_14["abc"],
)
def FUNC_7(self):
self._record_users()
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
VAR_10,
)
def FUNC_8(self):
self._record_users()
self.get_success(self.handler.delete_device(VAR_0, "abc"))
self.get_failure(
self.handler.get_device(VAR_0, "abc"), synapse.api.errors.NotFoundError
)
def FUNC_9(self):
self._record_users()
VAR_15 = {"display_name": "new display"}
self.get_success(self.handler.update_device(VAR_0, "abc", VAR_15))
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertEqual(VAR_10["display_name"], "new display")
def FUNC_10(self):
self._record_users()
VAR_15 = {
"display_name": "a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1)
}
self.get_failure(
self.handler.update_device(VAR_0, "abc", VAR_15),
synapse.api.errors.SynapseError,
)
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertEqual(VAR_10["display_name"], "display 2")
def FUNC_11(self):
VAR_15 = {"display_name": "new_display"}
self.get_failure(
self.handler.update_device("user_id", "unknown_device_id", VAR_15),
synapse.api.errors.NotFoundError,
)
def FUNC_12(self):
self._record_user(VAR_0, "xyz", "display 0")
self._record_user(VAR_0, "fco", "display 1", "token1", "ip1")
self._record_user(VAR_0, "abc", "display 2", "token2", "ip2")
self._record_user(VAR_0, "abc", "display 2", "token3", "ip3")
self._record_user(VAR_1, "def", "dispkay", "token4", "ip4")
self.reactor.advance(10000)
def FUNC_13(
self, VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=None
):
VAR_6 = self.get_success(
self.handler.check_device_registered(
VAR_5=user_id,
VAR_6=device_id,
initial_device_display_name=VAR_7,
)
)
if VAR_9 is not None:
self.get_success(
self.store.insert_client_ip(
VAR_5, VAR_8, VAR_9, "user_agent", VAR_6
)
)
self.reactor.advance(1000)
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", http_client=None)
self.handler = VAR_4.get_device_handler()
self.registration = VAR_4.get_registration_handler()
self.auth = VAR_4.get_auth()
self.store = VAR_4.get_datastore()
return VAR_4
def FUNC_14(self):
VAR_5 = "@boris:dehydration"
self.get_success(self.store.register_user(VAR_5, "foobar"))
VAR_16 = self.get_success(
self.handler.store_dehydrated_device(
VAR_5=user_id,
VAR_18={"device_data": {"foo": "bar"}},
initial_device_display_name="dehydrated device",
)
)
VAR_17, VAR_18 = self.get_success(
self.handler.get_dehydrated_device(VAR_5=user_id)
)
self.assertEqual(VAR_17, VAR_16)
self.assertEqual(VAR_18, {"device_data": {"foo": "bar"}})
VAR_6, VAR_8 = self.get_success(
self.registration.register_device(
VAR_5=user_id, VAR_6=None, initial_display_name="new device",
)
)
self.get_failure(
self.handler.rehydrate_device(
VAR_5=user_id,
VAR_8=access_token,
VAR_6="not the right device ID",
),
synapse.api.errors.NotFoundError,
)
VAR_10 = self.get_success(
self.handler.rehydrate_device(
VAR_5=user_id,
VAR_8=access_token,
VAR_6=VAR_17,
)
)
self.assertEqual(VAR_10, {"success": True})
VAR_19 = self.get_success(self.auth.get_user_by_access_token(VAR_8))
self.assertEqual(VAR_19.device_id, VAR_17)
VAR_10 = self.get_success(self.handler.get_device(VAR_5, VAR_17))
self.assertEqual(VAR_10["display_name"], "new device")
self.get_failure(
self.handler.get_device(VAR_5, VAR_6),
synapse.api.errors.NotFoundError,
)
VAR_20 = self.get_success(self.handler.get_dehydrated_device(VAR_5=user_id))
self.assertIsNone(VAR_20)
|
import synapse.api.errors
import synapse.handlers.device
import synapse.storage
from tests import unittest
VAR_0 = "@boris:aaa"
VAR_1 = "@theresa:bbb"
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", federation_http_client=None)
self.handler = VAR_4.get_device_handler()
self.store = VAR_4.get_datastore()
return VAR_4
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.reactor.advance(1000)
def FUNC_2(self):
self.get_failure(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="foo",
initial_device_display_name="a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1),
),
synapse.api.errors.SynapseError,
)
def FUNC_3(self):
VAR_10 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(VAR_10, "fco")
VAR_11 = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(VAR_11["display_name"], "display name")
def FUNC_4(self):
VAR_12 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="display name",
)
)
self.assertEqual(VAR_12, "fco")
VAR_13 = self.get_success(
self.handler.check_device_registered(
VAR_5="@boris:foo",
VAR_6="fco",
initial_device_display_name="new display name",
)
)
self.assertEqual(VAR_13, "fco")
VAR_11 = self.get_success(self.handler.store.get_device("@boris:foo", "fco"))
self.assertEqual(VAR_11["display_name"], "display name")
def FUNC_5(self):
VAR_6 = self.get_success(
self.handler.check_device_registered(
VAR_5="@theresa:foo",
VAR_6=None,
initial_device_display_name="display",
)
)
VAR_11 = self.get_success(self.handler.store.get_device("@theresa:foo", VAR_6))
self.assertEqual(VAR_11["display_name"], "display")
def FUNC_6(self):
self._record_users()
VAR_10 = self.get_success(self.handler.get_devices_by_user(VAR_0))
self.assertEqual(3, len(VAR_10))
VAR_14 = {d["device_id"]: d for d in VAR_10}
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "xyz",
"display_name": "display 0",
"last_seen_ip": None,
"last_seen_ts": None,
},
VAR_14["xyz"],
)
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "fco",
"display_name": "display 1",
"last_seen_ip": "ip1",
"last_seen_ts": 1000000,
},
VAR_14["fco"],
)
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
VAR_14["abc"],
)
def FUNC_7(self):
self._record_users()
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertDictContainsSubset(
{
"user_id": VAR_0,
"device_id": "abc",
"display_name": "display 2",
"last_seen_ip": "ip3",
"last_seen_ts": 3000000,
},
VAR_10,
)
def FUNC_8(self):
self._record_users()
self.get_success(self.handler.delete_device(VAR_0, "abc"))
self.get_failure(
self.handler.get_device(VAR_0, "abc"), synapse.api.errors.NotFoundError
)
def FUNC_9(self):
self._record_users()
VAR_15 = {"display_name": "new display"}
self.get_success(self.handler.update_device(VAR_0, "abc", VAR_15))
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertEqual(VAR_10["display_name"], "new display")
def FUNC_10(self):
self._record_users()
VAR_15 = {
"display_name": "a"
* (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1)
}
self.get_failure(
self.handler.update_device(VAR_0, "abc", VAR_15),
synapse.api.errors.SynapseError,
)
VAR_10 = self.get_success(self.handler.get_device(VAR_0, "abc"))
self.assertEqual(VAR_10["display_name"], "display 2")
def FUNC_11(self):
VAR_15 = {"display_name": "new_display"}
self.get_failure(
self.handler.update_device("user_id", "unknown_device_id", VAR_15),
synapse.api.errors.NotFoundError,
)
def FUNC_12(self):
self._record_user(VAR_0, "xyz", "display 0")
self._record_user(VAR_0, "fco", "display 1", "token1", "ip1")
self._record_user(VAR_0, "abc", "display 2", "token2", "ip2")
self._record_user(VAR_0, "abc", "display 2", "token3", "ip3")
self._record_user(VAR_1, "def", "dispkay", "token4", "ip4")
self.reactor.advance(10000)
def FUNC_13(
self, VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=None
):
VAR_6 = self.get_success(
self.handler.check_device_registered(
VAR_5=user_id,
VAR_6=device_id,
initial_device_display_name=VAR_7,
)
)
if VAR_9 is not None:
self.get_success(
self.store.insert_client_ip(
VAR_5, VAR_8, VAR_9, "user_agent", VAR_6
)
)
self.reactor.advance(1000)
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", federation_http_client=None)
self.handler = VAR_4.get_device_handler()
self.registration = VAR_4.get_registration_handler()
self.auth = VAR_4.get_auth()
self.store = VAR_4.get_datastore()
return VAR_4
def FUNC_14(self):
VAR_5 = "@boris:dehydration"
self.get_success(self.store.register_user(VAR_5, "foobar"))
VAR_16 = self.get_success(
self.handler.store_dehydrated_device(
VAR_5=user_id,
VAR_18={"device_data": {"foo": "bar"}},
initial_device_display_name="dehydrated device",
)
)
VAR_17, VAR_18 = self.get_success(
self.handler.get_dehydrated_device(VAR_5=user_id)
)
self.assertEqual(VAR_17, VAR_16)
self.assertEqual(VAR_18, {"device_data": {"foo": "bar"}})
VAR_6, VAR_8 = self.get_success(
self.registration.register_device(
VAR_5=user_id, VAR_6=None, initial_display_name="new device",
)
)
self.get_failure(
self.handler.rehydrate_device(
VAR_5=user_id,
VAR_8=access_token,
VAR_6="not the right device ID",
),
synapse.api.errors.NotFoundError,
)
VAR_10 = self.get_success(
self.handler.rehydrate_device(
VAR_5=user_id,
VAR_8=access_token,
VAR_6=VAR_17,
)
)
self.assertEqual(VAR_10, {"success": True})
VAR_19 = self.get_success(self.auth.get_user_by_access_token(VAR_8))
self.assertEqual(VAR_19.device_id, VAR_17)
VAR_10 = self.get_success(self.handler.get_device(VAR_5, VAR_17))
self.assertEqual(VAR_10["display_name"], "new device")
self.get_failure(
self.handler.get_device(VAR_5, VAR_6),
synapse.api.errors.NotFoundError,
)
VAR_20 = self.get_success(self.handler.get_dehydrated_device(VAR_5=user_id))
self.assertIsNone(VAR_20)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
21,
23,
26,
27,
34,
36,
38,
49,
59,
62,
72,
81,
84,
93,
96,
99,
101,
134,
137,
149,
152,
153,
155,
156,
160,
161,
162,
163,
166,
169,
172,
176,
177,
186,
187,
190,
197,
199,
200,
205,
207,
209,
220,
228,
229,
238,
241,
243,
244,
252,
256,
259,
260,
266,
267,
276,
277,
278,
286,
288,
289,
291,
293,
294,
296,
298,
299,
304,
305,
307,
309,
174
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
21,
23,
26,
27,
34,
36,
38,
49,
59,
62,
72,
81,
84,
93,
96,
99,
101,
134,
137,
149,
152,
153,
155,
156,
160,
161,
162,
163,
166,
169,
172,
176,
177,
186,
187,
190,
197,
199,
200,
205,
207,
209,
220,
228,
229,
238,
241,
243,
244,
252,
256,
259,
260,
266,
267,
276,
277,
278,
286,
288,
289,
291,
293,
294,
296,
298,
299,
304,
305,
307,
309,
174
] |
2CWE-601
| import logging
from aiohttp import web
import os
logger = logging.getLogger(__package__)
def setup_middlewares(app):
error_middleware = error_pages({404: handle_404,
500: handle_500})
app.middlewares.append(error_middleware)
app.middlewares.append(cache_control_middleware)
# Cache-Control middleware
CACHE_MAX_AGE = int(os.getenv("CACHE_MAX_AGE", "30"))
NO_CACHE_ENDPOINTS = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def cache_control_middleware(app, handler):
async def middleware_handler(request):
response = await handler(request)
cache_control_value = "public; max-age={}".format(CACHE_MAX_AGE)
if request.path in NO_CACHE_ENDPOINTS or CACHE_MAX_AGE <= 0:
cache_control_value = "no-cache"
response.headers.setdefault("Cache-Control", cache_control_value)
return response
return middleware_handler
# Error page middlewares
def error_pages(overrides):
async def middleware(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
override = overrides.get(response.status)
if override is None:
return response
else:
return await override(request, response)
except web.HTTPException as ex:
override = overrides.get(ex.status)
if override is None:
return await handle_any(request, ex)
else:
return await override(request, ex)
except Exception as ex:
return await handle_500(request, error=ex)
return middleware_handler
return middleware
async def handle_any(request, response):
return web.json_response({
"status": response.status,
"message": response.reason
}, status=response.status)
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
return web.HTTPFound(request.path.rstrip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
}, status=404)
return response
async def handle_500(request, response=None, error=None):
logger.exception(error)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| import logging
from aiohttp import web
import os
logger = logging.getLogger(__package__)
def setup_middlewares(app):
error_middleware = error_pages({404: handle_404,
500: handle_500})
app.middlewares.append(error_middleware)
app.middlewares.append(cache_control_middleware)
# Cache-Control middleware
CACHE_MAX_AGE = int(os.getenv("CACHE_MAX_AGE", "30"))
NO_CACHE_ENDPOINTS = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def cache_control_middleware(app, handler):
async def middleware_handler(request):
response = await handler(request)
cache_control_value = "public; max-age={}".format(CACHE_MAX_AGE)
if request.path in NO_CACHE_ENDPOINTS or CACHE_MAX_AGE <= 0:
cache_control_value = "no-cache"
response.headers.setdefault("Cache-Control", cache_control_value)
return response
return middleware_handler
# Error page middlewares
def error_pages(overrides):
async def middleware(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
override = overrides.get(response.status)
if override is None:
return response
else:
return await override(request, response)
except web.HTTPException as ex:
override = overrides.get(ex.status)
if override is None:
return await handle_any(request, ex)
else:
return await override(request, ex)
except Exception as ex:
return await handle_500(request, error=ex)
return middleware_handler
return middleware
async def handle_any(request, response):
return web.json_response({
"status": response.status,
"message": response.reason
}, status=response.status)
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
return web.HTTPFound('/' + request.path.strip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
}, status=404)
return response
async def handle_500(request, response=None, error=None):
logger.exception(error)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| open_redirect | {
"code": [
" return web.HTTPFound(request.path.rstrip('/'))"
],
"line_no": [
64
]
} | {
"code": [
" return web.HTTPFound('/' + request.path.strip('/'))"
],
"line_no": [
64
]
} | import logging
from aiohttp import web
import os
VAR_0 = logging.getLogger(__package__)
def FUNC_0(VAR_1):
VAR_9 = FUNC_2({404: FUNC_4,
500: FUNC_5})
VAR_1.middlewares.append(VAR_9)
VAR_1.middlewares.append(FUNC_1)
VAR_2 = int(os.getenv("CACHE_MAX_AGE", "30"))
VAR_3 = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def FUNC_1(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
VAR_7 = await VAR_4(VAR_6)
VAR_10 = "public; max-age={}".format(VAR_2)
if VAR_6.path in VAR_3 or VAR_2 <= 0:
VAR_10 = "no-cache"
VAR_7.headers.setdefault("Cache-Control", VAR_10)
return VAR_7
return FUNC_6
def FUNC_2(VAR_5):
async def FUNC_7(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
try:
VAR_7 = await VAR_4(VAR_6)
VAR_11 = VAR_5.get(VAR_7.status)
if VAR_11 is None:
return VAR_7
else:
return await VAR_11(VAR_6, VAR_7)
except web.HTTPException as ex:
VAR_11 = VAR_5.get(ex.status)
if VAR_11 is None:
return await FUNC_3(VAR_6, ex)
else:
return await VAR_11(VAR_6, ex)
except Exception as ex:
return await FUNC_5(VAR_6, VAR_8=ex)
return FUNC_6
return FUNC_7
async def FUNC_3(VAR_6, VAR_7):
return web.json_response({
"status": VAR_7.status,
"message": VAR_7.reason
}, status=VAR_7.status)
async def FUNC_4(VAR_6, VAR_7):
if 'json' not in VAR_7.headers['Content-Type']:
if VAR_6.path.endswith('/'):
return web.HTTPFound(VAR_6.path.rstrip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(VAR_6.path)
}, status=404)
return VAR_7
async def FUNC_5(VAR_6, VAR_7=None, VAR_8=None):
VAR_0.exception(VAR_8)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| import logging
from aiohttp import web
import os
VAR_0 = logging.getLogger(__package__)
def FUNC_0(VAR_1):
VAR_9 = FUNC_2({404: FUNC_4,
500: FUNC_5})
VAR_1.middlewares.append(VAR_9)
VAR_1.middlewares.append(FUNC_1)
VAR_2 = int(os.getenv("CACHE_MAX_AGE", "30"))
VAR_3 = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def FUNC_1(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
VAR_7 = await VAR_4(VAR_6)
VAR_10 = "public; max-age={}".format(VAR_2)
if VAR_6.path in VAR_3 or VAR_2 <= 0:
VAR_10 = "no-cache"
VAR_7.headers.setdefault("Cache-Control", VAR_10)
return VAR_7
return FUNC_6
def FUNC_2(VAR_5):
async def FUNC_7(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
try:
VAR_7 = await VAR_4(VAR_6)
VAR_11 = VAR_5.get(VAR_7.status)
if VAR_11 is None:
return VAR_7
else:
return await VAR_11(VAR_6, VAR_7)
except web.HTTPException as ex:
VAR_11 = VAR_5.get(ex.status)
if VAR_11 is None:
return await FUNC_3(VAR_6, ex)
else:
return await VAR_11(VAR_6, ex)
except Exception as ex:
return await FUNC_5(VAR_6, VAR_8=ex)
return FUNC_6
return FUNC_7
async def FUNC_3(VAR_6, VAR_7):
return web.json_response({
"status": VAR_7.status,
"message": VAR_7.reason
}, status=VAR_7.status)
async def FUNC_4(VAR_6, VAR_7):
if 'json' not in VAR_7.headers['Content-Type']:
if VAR_6.path.endswith('/'):
return web.HTTPFound('/' + VAR_6.path.strip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(VAR_6.path)
}, status=404)
return VAR_7
async def FUNC_5(VAR_6, VAR_7=None, VAR_8=None):
VAR_0.exception(VAR_8)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| [
4,
6,
7,
13,
14,
15,
18,
19,
29,
30,
31,
52,
53,
59,
60,
70,
71,
78
] | [
4,
6,
7,
13,
14,
15,
18,
19,
29,
30,
31,
52,
53,
59,
60,
70,
71,
78
] |
1CWE-79
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import voidElements, booleanAttributes, spaceCharacters
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import escape
spaceCharacters = "".join(spaceCharacters)
quoteAttributeSpec = re.compile("[" + spaceCharacters + "\"'=<>`]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
unicode_encode_errors = "strict"
else:
unicode_encode_errors = "htmlentityreplace"
encode_entity_map = {}
is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((is_ucs4 and len(v) > 1) or
(not is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if v not in encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error(unicode_encode_errors, htmlentityreplace_errors)
del register_error
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = False
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer.
Keyword options (default given first unless specified) include:
inject_meta_charset=True|False
Whether it insert a meta element to define the character set of the
document.
quote_attr_values=True|False
Whether to quote attribute values that don't require quoting
per HTML5 parsing rules.
quote_char=u'"'|u"'"
Use given quote character for attribute quoting. Default is to
use double quote unless attribute value contains a double quote,
in which case single quotes are used instead.
escape_lt_in_attrs=False|True
Whether to escape < in attribute values.
escape_rcdata=False|True
Whether to escape characters that need to be escaped within normal
elements within rcdata elements such as style.
resolve_entities=True|False
Whether to resolve named character entities that appear in the
source tree. The XML predefined entities < > & " '
are unaffected by this setting.
strip_whitespace=False|True
Whether to remove semantically meaningless whitespace. (This
compresses all whitespace to a single space except within pre.)
minimize_boolean_attributes=True|False
Shortens boolean attributes to give just the attribute value,
for example <input disabled="disabled"> becomes <input disabled>.
use_trailing_solidus=False|True
Includes a close-tag slash at the end of the start tag of void
elements (empty elements whose end tag is forbidden). E.g. <hr/>.
space_before_trailing_solidus=True|False
Places a space immediately before the closing slash in a tag
using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
sanitize=False|True
Strip all unsafe or unknown constructs from output.
See `html5lib user documentation`_
omit_optional_tags=True|False
Omit start/end tags that are optional.
alphabetical_attributes=False|True
Reorder attributes to be in alphabetical order.
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, unicode_encode_errors)
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from ..filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from ..filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
treewalker = Filter(treewalker)
# Alphabetical attributes must be last, as other filters
# could add attributes and alter the order
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple()) and
k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values:
quote_attr = True
else:
quote_attr = len(v) == 0 or quoteAttributeSpec.search(v)
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if key not in entities:
self.serializeError("Entity %s not recognized" % name)
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
def SerializeError(Exception):
"""Error in serialized tree"""
pass
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import voidElements, booleanAttributes, spaceCharacters
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import escape
spaceCharacters = "".join(spaceCharacters)
quoteAttributeSpecChars = spaceCharacters + "\"'=<>`"
quoteAttributeSpec = re.compile("[" + quoteAttributeSpecChars + "]")
quoteAttributeLegacy = re.compile("[" + quoteAttributeSpecChars +
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
"\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
"\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
"\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
"\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
"\u3000]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
unicode_encode_errors = "strict"
else:
unicode_encode_errors = "htmlentityreplace"
encode_entity_map = {}
is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((is_ucs4 and len(v) > 1) or
(not is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if v not in encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error(unicode_encode_errors, htmlentityreplace_errors)
del register_error
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = "legacy" # be secure by default
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer.
Keyword options (default given first unless specified) include:
inject_meta_charset=True|False
Whether it insert a meta element to define the character set of the
document.
quote_attr_values="legacy"|"spec"|"always"
Whether to quote attribute values that don't require quoting
per legacy browser behaviour, when required by the standard, or always.
quote_char=u'"'|u"'"
Use given quote character for attribute quoting. Default is to
use double quote unless attribute value contains a double quote,
in which case single quotes are used instead.
escape_lt_in_attrs=False|True
Whether to escape < in attribute values.
escape_rcdata=False|True
Whether to escape characters that need to be escaped within normal
elements within rcdata elements such as style.
resolve_entities=True|False
Whether to resolve named character entities that appear in the
source tree. The XML predefined entities < > & " '
are unaffected by this setting.
strip_whitespace=False|True
Whether to remove semantically meaningless whitespace. (This
compresses all whitespace to a single space except within pre.)
minimize_boolean_attributes=True|False
Shortens boolean attributes to give just the attribute value,
for example <input disabled="disabled"> becomes <input disabled>.
use_trailing_solidus=False|True
Includes a close-tag slash at the end of the start tag of void
elements (empty elements whose end tag is forbidden). E.g. <hr/>.
space_before_trailing_solidus=True|False
Places a space immediately before the closing slash in a tag
using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
sanitize=False|True
Strip all unsafe or unknown constructs from output.
See `html5lib user documentation`_
omit_optional_tags=True|False
Omit start/end tags that are optional.
alphabetical_attributes=False|True
Reorder attributes to be in alphabetical order.
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, unicode_encode_errors)
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from ..filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from ..filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
treewalker = Filter(treewalker)
# Alphabetical attributes must be last, as other filters
# could add attributes and alter the order
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple()) and
k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values == "always" or len(v) == 0:
quote_attr = True
elif self.quote_attr_values == "spec":
quote_attr = quoteAttributeSpec.search(v) is not None
elif self.quote_attr_values == "legacy":
quote_attr = quoteAttributeLegacy.search(v) is not None
else:
raise ValueError("quote_attr_values must be one of: "
"'always', 'spec', or 'legacy'")
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if key not in entities:
self.serializeError("Entity %s not recognized" % name)
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
def SerializeError(Exception):
"""Error in serialized tree"""
pass
| xss | {
"code": [
"quoteAttributeSpec = re.compile(\"[\" + spaceCharacters + \"\\\"'=<>`]\")",
" quote_attr_values = False",
" per HTML5 parsing rules.",
" if self.quote_attr_values:",
" quote_attr = len(v) == 0 or quoteAttributeSpec.search(v)"
],
"line_no": [
13,
75,
113,
242,
245
]
} | {
"code": [
"quoteAttributeSpecChars = spaceCharacters + \"\\\"'=<>`\"",
"quoteAttributeLegacy = re.compile(\"[\" + quoteAttributeSpecChars +",
" \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\"",
" \"\\x0b\\x0c\\r\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\"",
" \"\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\"",
" \"\\x20\\x2f\\x60\\xa0\\u1680\\u180e\\u180f\\u2000\"",
" \"\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\"",
" \"\\u3000]\")",
" per legacy browser behaviour, when required by the standard, or always.",
" if self.quote_attr_values == \"always\" or len(v) == 0:",
" elif self.quote_attr_values == \"spec\":",
" quote_attr = quoteAttributeSpec.search(v) is not None",
" elif self.quote_attr_values == \"legacy\":",
" quote_attr = quoteAttributeLegacy.search(v) is not None",
" raise ValueError(\"quote_attr_values must be one of: \"",
" \"'always', 'spec', or 'legacy'\")"
],
"line_no": [
13,
15,
16,
17,
18,
19,
20,
22,
122,
251,
253,
254,
255,
256,
258,
259
]
} | from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import .voidElements, booleanAttributes, VAR_0
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import .escape
VAR_0 = "".join(VAR_0)
VAR_1 = re.compile("[" + VAR_0 + "\"'=<>`]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
VAR_3 = "strict"
else:
VAR_3 = "htmlentityreplace"
VAR_4 = {}
VAR_5 = len("\U0010FFFF") == 1
for VAR_38, VAR_32 in list(entities.items()):
if ((VAR_5 and len(VAR_32) > 1) or
(not VAR_5 and len(VAR_32) > 2)):
continue
if VAR_32 != "&":
if len(VAR_32) == 2:
VAR_32 = utils.surrogatePairToCodepoint(VAR_32)
else:
VAR_32 = ord(VAR_32)
if VAR_32 not in VAR_4 or VAR_38.islower():
VAR_4[VAR_32] = VAR_38
def FUNC_1(VAR_6):
if isinstance(VAR_6, (UnicodeEncodeError, UnicodeTranslateError)):
VAR_28 = []
VAR_29 = []
VAR_30 = False
for i, c in enumerate(VAR_6.object[VAR_6.start:VAR_6.end]):
if VAR_30:
skip = False
continue
VAR_33 = i + VAR_6.start
if utils.isSurrogatePair(VAR_6.object[VAR_33:min([VAR_6.end, VAR_33 + 2])]):
VAR_36 = utils.surrogatePairToCodepoint(VAR_6.object[VAR_33:index + 2])
VAR_30 = True
else:
VAR_36 = ord(c)
VAR_29.append(VAR_36)
for cp in VAR_29:
VAR_34 = VAR_4.get(cp)
if VAR_34:
VAR_28.append("&")
VAR_28.append(VAR_34)
if not VAR_34.endswith(";"):
VAR_28.append(";")
else:
VAR_28.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(VAR_28), VAR_6.end)
else:
return xmlcharrefreplace_errors(VAR_6)
register_error(VAR_3, FUNC_1)
del register_error
class CLASS_0(object):
VAR_7 = False
VAR_8 = '"'
VAR_9 = True
VAR_10 = True
VAR_11 = True
VAR_12 = False
VAR_13 = True
VAR_14 = False
VAR_15 = False
VAR_16 = True
VAR_17 = False
VAR_18 = True
VAR_19 = False
VAR_20 = False
VAR_21 = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **VAR_22):
if 'quote_char' in VAR_22:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, VAR_22.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def FUNC_2(self, VAR_23):
assert(isinstance(VAR_23, text_type))
if self.encoding:
return VAR_23.encode(self.encoding, VAR_3)
else:
return VAR_23
def FUNC_3(self, VAR_23):
assert(isinstance(VAR_23, text_type))
if self.encoding:
return VAR_23.encode(self.encoding, "strict")
else:
return VAR_23
def FUNC_4(self, VAR_24, VAR_25=None):
self.encoding = VAR_25
VAR_27 = False
self.errors = []
if VAR_25 and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
VAR_24 = Filter(VAR_24, VAR_25)
if self.strip_whitespace:
from ..filters.whitespace import Filter
VAR_24 = Filter(VAR_24)
if self.sanitize:
from ..filters.sanitizer import Filter
VAR_24 = Filter(VAR_24)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
VAR_24 = Filter(VAR_24)
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
VAR_24 = Filter(VAR_24)
for token in VAR_24:
VAR_31 = token["type"]
if VAR_31 == "Doctype":
VAR_35 = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
VAR_35 += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
VAR_35 += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
VAR_8 = "'"
else:
VAR_8 = '"'
VAR_35 += " %s%s%s" % (VAR_8, token["systemId"], VAR_8)
VAR_35 += ">"
yield self.encodeStrict(VAR_35)
elif VAR_31 in ("Characters", "SpaceCharacters"):
if VAR_31 == "SpaceCharacters" or VAR_27:
if VAR_27 and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif VAR_31 in ("StartTag", "EmptyTag"):
VAR_37 = token["name"]
yield self.encodeStrict("<%s" % VAR_37)
if VAR_37 in rcdataElements and not self.escape_rcdata:
VAR_27 = True
elif VAR_27:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
VAR_38 = attr_name
VAR_32 = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(VAR_38)
if not self.minimize_boolean_attributes or \
(VAR_38 not in booleanAttributes.get(VAR_37, tuple()) and
VAR_38 not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values:
VAR_39 = True
else:
VAR_39 = len(VAR_32) == 0 or VAR_1.search(VAR_32)
VAR_32 = VAR_32.replace("&", "&")
if self.escape_lt_in_attrs:
VAR_32 = VAR_32.replace("<", "<")
if VAR_39:
VAR_8 = self.quote_char
if self.use_best_quote_char:
if "'" in VAR_32 and '"' not in VAR_32:
VAR_8 = '"'
elif '"' in VAR_32 and "'" not in VAR_32:
VAR_8 = "'"
if VAR_8 == "'":
VAR_32 = VAR_32.replace("'", "'")
else:
VAR_32 = VAR_32.replace('"', """)
yield self.encodeStrict(VAR_8)
yield self.encode(VAR_32)
yield self.encodeStrict(VAR_8)
else:
yield self.encode(VAR_32)
if VAR_37 in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif VAR_31 == "EndTag":
VAR_37 = token["name"]
if VAR_37 in rcdataElements:
VAR_27 = False
elif VAR_27:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % VAR_37)
elif VAR_31 == "Comment":
VAR_26 = token["data"]
if VAR_26.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif VAR_31 == "Entity":
VAR_37 = token["name"]
VAR_40 = VAR_37 + ";"
if VAR_40 not in entities:
self.serializeError("Entity %s not recognized" % VAR_37)
if self.resolve_entities and VAR_40 not in xmlEntities:
VAR_26 = entities[VAR_40]
else:
VAR_26 = "&%s;" % VAR_37
yield self.encodeStrict(VAR_26)
else:
self.serializeError(token["data"])
def FUNC_5(self, VAR_24, VAR_25=None):
if VAR_25:
return b"".join(list(self.serialize(VAR_24, VAR_25)))
else:
return "".join(list(self.serialize(VAR_24)))
def FUNC_6(self, VAR_26="XXX ERROR MESSAGE NEEDED"):
self.errors.append(VAR_26)
if self.strict:
raise FUNC_0
def FUNC_0(VAR_2):
pass
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import .voidElements, booleanAttributes, VAR_0
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import .escape
VAR_0 = "".join(VAR_0)
VAR_1 = VAR_0 + "\"'=<>`"
VAR_2 = re.compile("[" + VAR_1 + "]")
VAR_3 = re.compile("[" + VAR_1 +
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
"\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
"\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
"\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
"\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
"\u3000]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
VAR_5 = "strict"
else:
VAR_5 = "htmlentityreplace"
VAR_6 = {}
VAR_7 = len("\U0010FFFF") == 1
for VAR_40, VAR_34 in list(entities.items()):
if ((VAR_7 and len(VAR_34) > 1) or
(not VAR_7 and len(VAR_34) > 2)):
continue
if VAR_34 != "&":
if len(VAR_34) == 2:
VAR_34 = utils.surrogatePairToCodepoint(VAR_34)
else:
VAR_34 = ord(VAR_34)
if VAR_34 not in VAR_6 or VAR_40.islower():
VAR_6[VAR_34] = VAR_40
def FUNC_1(VAR_8):
if isinstance(VAR_8, (UnicodeEncodeError, UnicodeTranslateError)):
VAR_30 = []
VAR_31 = []
VAR_32 = False
for i, c in enumerate(VAR_8.object[VAR_8.start:VAR_8.end]):
if VAR_32:
skip = False
continue
VAR_35 = i + VAR_8.start
if utils.isSurrogatePair(VAR_8.object[VAR_35:min([VAR_8.end, VAR_35 + 2])]):
VAR_38 = utils.surrogatePairToCodepoint(VAR_8.object[VAR_35:index + 2])
VAR_32 = True
else:
VAR_38 = ord(c)
VAR_31.append(VAR_38)
for cp in VAR_31:
VAR_36 = VAR_6.get(cp)
if VAR_36:
VAR_30.append("&")
VAR_30.append(VAR_36)
if not VAR_36.endswith(";"):
VAR_30.append(";")
else:
VAR_30.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(VAR_30), VAR_8.end)
else:
return xmlcharrefreplace_errors(VAR_8)
register_error(VAR_5, FUNC_1)
del register_error
class CLASS_0(object):
VAR_9 = "legacy" # be secure by default
VAR_10 = '"'
VAR_11 = True
VAR_12 = True
VAR_13 = True
VAR_14 = False
VAR_15 = True
VAR_16 = False
VAR_17 = False
VAR_18 = True
VAR_19 = False
VAR_20 = True
VAR_21 = False
VAR_22 = False
VAR_23 = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **VAR_24):
if 'quote_char' in VAR_24:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, VAR_24.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def FUNC_2(self, VAR_25):
assert(isinstance(VAR_25, text_type))
if self.encoding:
return VAR_25.encode(self.encoding, VAR_5)
else:
return VAR_25
def FUNC_3(self, VAR_25):
assert(isinstance(VAR_25, text_type))
if self.encoding:
return VAR_25.encode(self.encoding, "strict")
else:
return VAR_25
def FUNC_4(self, VAR_26, VAR_27=None):
self.encoding = VAR_27
VAR_29 = False
self.errors = []
if VAR_27 and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
VAR_26 = Filter(VAR_26, VAR_27)
if self.strip_whitespace:
from ..filters.whitespace import Filter
VAR_26 = Filter(VAR_26)
if self.sanitize:
from ..filters.sanitizer import Filter
VAR_26 = Filter(VAR_26)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
VAR_26 = Filter(VAR_26)
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
VAR_26 = Filter(VAR_26)
for token in VAR_26:
VAR_33 = token["type"]
if VAR_33 == "Doctype":
VAR_37 = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
VAR_37 += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
VAR_37 += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
VAR_10 = "'"
else:
VAR_10 = '"'
VAR_37 += " %s%s%s" % (VAR_10, token["systemId"], VAR_10)
VAR_37 += ">"
yield self.encodeStrict(VAR_37)
elif VAR_33 in ("Characters", "SpaceCharacters"):
if VAR_33 == "SpaceCharacters" or VAR_29:
if VAR_29 and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif VAR_33 in ("StartTag", "EmptyTag"):
VAR_39 = token["name"]
yield self.encodeStrict("<%s" % VAR_39)
if VAR_39 in rcdataElements and not self.escape_rcdata:
VAR_29 = True
elif VAR_29:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
VAR_40 = attr_name
VAR_34 = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(VAR_40)
if not self.minimize_boolean_attributes or \
(VAR_40 not in booleanAttributes.get(VAR_39, tuple()) and
VAR_40 not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values == "always" or len(VAR_34) == 0:
VAR_41 = True
elif self.quote_attr_values == "spec":
VAR_41 = VAR_2.search(VAR_34) is not None
elif self.quote_attr_values == "legacy":
VAR_41 = VAR_3.search(VAR_34) is not None
else:
raise ValueError("quote_attr_values must be one of: "
"'always', 'spec', or 'legacy'")
VAR_34 = v.replace("&", "&")
if self.escape_lt_in_attrs:
VAR_34 = v.replace("<", "<")
if VAR_41:
VAR_10 = self.quote_char
if self.use_best_quote_char:
if "'" in VAR_34 and '"' not in VAR_34:
VAR_10 = '"'
elif '"' in VAR_34 and "'" not in VAR_34:
VAR_10 = "'"
if VAR_10 == "'":
VAR_34 = v.replace("'", "'")
else:
VAR_34 = v.replace('"', """)
yield self.encodeStrict(VAR_10)
yield self.encode(VAR_34)
yield self.encodeStrict(VAR_10)
else:
yield self.encode(VAR_34)
if VAR_39 in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif VAR_33 == "EndTag":
VAR_39 = token["name"]
if VAR_39 in rcdataElements:
VAR_29 = False
elif VAR_29:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % VAR_39)
elif VAR_33 == "Comment":
VAR_28 = token["data"]
if VAR_28.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif VAR_33 == "Entity":
VAR_39 = token["name"]
VAR_42 = VAR_39 + ";"
if VAR_42 not in entities:
self.serializeError("Entity %s not recognized" % VAR_39)
if self.resolve_entities and VAR_42 not in xmlEntities:
VAR_28 = entities[VAR_42]
else:
VAR_28 = "&%s;" % VAR_39
yield self.encodeStrict(VAR_28)
else:
self.serializeError(token["data"])
def FUNC_5(self, VAR_26, VAR_27=None):
if VAR_27:
return b"".join(list(self.serialize(VAR_26, VAR_27)))
else:
return "".join(list(self.serialize(VAR_26)))
def FUNC_6(self, VAR_28="XXX ERROR MESSAGE NEEDED"):
self.errors.append(VAR_28)
if self.strict:
raise FUNC_0
def FUNC_0(VAR_4):
pass
| [
3,
5,
10,
12,
14,
21,
25,
35,
37,
66,
68,
70,
71,
73,
74,
78,
79,
84,
85,
89,
90,
95,
102,
105,
107,
146,
155,
162,
169,
174,
178,
179,
189,
190,
194,
199,
212,
215,
223,
232,
236,
271,
279,
285,
296,
299,
305,
307,
311,
312,
316,
314,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148
] | [
3,
5,
10,
12,
23,
30,
34,
44,
46,
75,
77,
79,
80,
82,
83,
87,
88,
93,
94,
98,
99,
104,
111,
114,
116,
155,
164,
171,
178,
183,
187,
188,
198,
199,
203,
208,
221,
224,
232,
241,
245,
285,
293,
299,
310,
313,
319,
321,
325,
326,
330,
328,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157
] |
1CWE-79
| #
# djblets_js.py -- JavaScript-related template tags
#
# Copyright (c) 2007-2009 Christian Hammond
# Copyright (c) 2007-2009 David Trowbridge
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
import json
from django import template
from django.core.serializers import serialize
from django.db.models.query import QuerySet
from django.utils import six
from django.utils.safestring import mark_safe
from djblets.util.serializers import DjbletsJSONEncoder
register = template.Library()
@register.simple_tag
def form_dialog_fields(form):
"""
Translates a Django Form object into a JavaScript list of fields.
The resulting list of fields can be used to represent the form
dynamically.
"""
s = ''
for field in form:
s += "{ name: '%s', " % field.name
if field.is_hidden:
s += "hidden: true, "
else:
s += "label: '%s', " % field.label_tag(field.label + ":")
if field.field.required:
s += "required: true, "
if field.field.help_text:
s += "help_text: '%s', " % field.field.help_text
s += "widget: '%s' }," % six.text_type(field)
# Chop off the last ','
return "[ %s ]" % s[:-1]
@register.filter
def json_dumps(value, indent=None):
if isinstance(value, QuerySet):
result = serialize('json', value, indent=indent)
else:
result = json.dumps(value, indent=indent, cls=DjbletsJSONEncoder)
return mark_safe(result)
@register.filter
def json_dumps_items(d, append=''):
"""Dumps a list of keys/values from a dictionary, without braces.
This works very much like ``json_dumps``, but doesn't output the
surrounding braces. This allows it to be used within a JavaScript
object definition alongside other custom keys.
If the dictionary is not empty, and ``append`` is passed, it will be
appended onto the results. This is most useful when you want to append
a comma after all the dictionary items, in order to provide further
keys in the template.
"""
if not d:
return ''
return mark_safe(json_dumps(d)[1:-1] + append)
| #
# djblets_js.py -- JavaScript-related template tags
#
# Copyright (c) 2007-2009 Christian Hammond
# Copyright (c) 2007-2009 David Trowbridge
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
import json
from django import template
from django.core.serializers import serialize
from django.db.models.query import QuerySet
from django.utils import six
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
from djblets.util.serializers import DjbletsJSONEncoder
register = template.Library()
_safe_js_escapes = {
ord('&'): '\\u0026',
ord('<'): '\\u003C',
ord('>'): '\\u003E',
}
@register.simple_tag
def form_dialog_fields(form):
"""
Translates a Django Form object into a JavaScript list of fields.
The resulting list of fields can be used to represent the form
dynamically.
"""
s = ''
for field in form:
s += "{ name: '%s', " % field.name
if field.is_hidden:
s += "hidden: true, "
else:
s += "label: '%s', " % field.label_tag(field.label + ":")
if field.field.required:
s += "required: true, "
if field.field.help_text:
s += "help_text: '%s', " % field.field.help_text
s += "widget: '%s' }," % six.text_type(field)
# Chop off the last ','
return "[ %s ]" % s[:-1]
@register.filter
def json_dumps(value, indent=None):
if isinstance(value, QuerySet):
result = serialize('json', value, indent=indent)
else:
result = json.dumps(value, indent=indent, cls=DjbletsJSONEncoder)
return mark_safe(force_text(result).translate(_safe_js_escapes))
@register.filter
def json_dumps_items(d, append=''):
"""Dumps a list of keys/values from a dictionary, without braces.
This works very much like ``json_dumps``, but doesn't output the
surrounding braces. This allows it to be used within a JavaScript
object definition alongside other custom keys.
If the dictionary is not empty, and ``append`` is passed, it will be
appended onto the results. This is most useful when you want to append
a comma after all the dictionary items, in order to provide further
keys in the template.
"""
if not d:
return ''
return mark_safe(json_dumps(d)[1:-1] + append)
| xss | {
"code": [
" return mark_safe(result)"
],
"line_no": [
78
]
} | {
"code": [
"from django.utils.encoding import force_text",
"_safe_js_escapes = {",
" ord('&'): '\\\\u0026',",
" ord('>'): '\\\\u003E',",
" return mark_safe(force_text(result).translate(_safe_js_escapes))"
],
"line_no": [
34,
42,
43,
45,
85
]
} |
from __future__ import unicode_literals
import json
from django import template
from django.core.serializers import .serialize
from django.db.models.query import QuerySet
from django.utils import .six
from django.utils.safestring import mark_safe
from djblets.util.serializers import DjbletsJSONEncoder
VAR_0 = template.Library()
@VAR_0.simple_tag
def FUNC_0(VAR_1):
VAR_6 = ''
for field in VAR_1:
VAR_6 += "{ name: '%s', " % field.name
if field.is_hidden:
VAR_6 += "hidden: true, "
else:
VAR_6 += "label: '%s', " % field.label_tag(field.label + ":")
if field.field.required:
VAR_6 += "required: true, "
if field.field.help_text:
VAR_6 += "help_text: '%s', " % field.field.help_text
VAR_6 += "widget: '%s' }," % six.text_type(field)
return "[ %VAR_6 ]" % VAR_6[:-1]
@VAR_0.filter
def FUNC_1(VAR_2, VAR_3=None):
if isinstance(VAR_2, QuerySet):
VAR_7 = serialize('json', VAR_2, VAR_3=indent)
else:
VAR_7 = json.dumps(VAR_2, VAR_3=indent, cls=DjbletsJSONEncoder)
return mark_safe(VAR_7)
@VAR_0.filter
def FUNC_2(VAR_4, VAR_5=''):
if not VAR_4:
return ''
return mark_safe(FUNC_1(VAR_4)[1:-1] + VAR_5)
|
from __future__ import unicode_literals
import json
from django import template
from django.core.serializers import .serialize
from django.db.models.query import QuerySet
from django.utils import .six
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
from djblets.util.serializers import DjbletsJSONEncoder
VAR_0 = template.Library()
VAR_1 = {
ord('&'): '\\u0026',
ord('<'): '\\u003C',
ord('>'): '\\u003E',
}
@VAR_0.simple_tag
def FUNC_0(VAR_2):
VAR_7 = ''
for field in VAR_2:
VAR_7 += "{ name: '%s', " % field.name
if field.is_hidden:
VAR_7 += "hidden: true, "
else:
VAR_7 += "label: '%s', " % field.label_tag(field.label + ":")
if field.field.required:
VAR_7 += "required: true, "
if field.field.help_text:
VAR_7 += "help_text: '%s', " % field.field.help_text
VAR_7 += "widget: '%s' }," % six.text_type(field)
return "[ %VAR_7 ]" % VAR_7[:-1]
@VAR_0.filter
def FUNC_1(VAR_3, VAR_4=None):
if isinstance(VAR_3, QuerySet):
VAR_8 = serialize('json', VAR_3, VAR_4=indent)
else:
VAR_8 = json.dumps(VAR_3, VAR_4=indent, cls=DjbletsJSONEncoder)
return mark_safe(force_text(VAR_8).translate(VAR_1))
@VAR_0.filter
def FUNC_2(VAR_5, VAR_6=''):
if not VAR_5:
return ''
return mark_safe(FUNC_1(VAR_5)[1:-1] + VAR_6)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
27,
29,
35,
37,
38,
40,
41,
50,
53,
58,
61,
64,
66,
67,
69,
70,
77,
79,
80,
84,
88,
96,
98,
44,
45,
46,
47,
48,
83,
84,
85,
86,
87,
88,
89,
90,
91,
92,
93
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
27,
29,
36,
38,
39,
41,
47,
48,
57,
60,
65,
68,
71,
73,
74,
76,
77,
84,
86,
87,
91,
95,
103,
105,
51,
52,
53,
54,
55,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.conf.urls import url
from shuup.xtheme.views.command import command_dispatch
from shuup.xtheme.views.editor import EditorView
from shuup.xtheme.views.extra import extra_view_dispatch
from shuup.xtheme.views.plugins import (
get_category_products_highlight,
get_product_cross_sell_highlight,
get_product_highlight,
get_prouduct_selections_highlight,
)
urlpatterns = [
url(r"^xtheme/editor/$", EditorView.as_view(), name="xtheme_editor"),
url(r"^xtheme/(?P<view>.+)/*$", extra_view_dispatch, name="xtheme_extra_view"),
url(r"^xtheme/$", command_dispatch, name="xtheme"),
url(
r"^xtheme-prod-hl/(?P<plugin_type>.*)/(?P<cutoff_days>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_product_highlight,
name="xtheme-product-highlight",
),
url(
r"""
^xtheme-prod-cross-sell-hl/
(?P<product_id>.*)/(?P<relation_type>.*)/(?P<use_parents>\d+)/
(?P<count>\d+)/(?P<cache_timeout>\d+)/$
""".strip(),
get_product_cross_sell_highlight,
name="xtheme-product-cross-sells-highlight",
),
url(
r"^xtheme-cat-products-hl/(?P<category_id>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_category_products_highlight,
name="xtheme-category-products-highlight",
),
url(
r"^xtheme-prod-selections-hl/(?P<product_ids>.*)/(?P<cache_timeout>\d+)/$",
get_prouduct_selections_highlight,
name="xtheme-product-selections-highlight",
),
]
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.conf.urls import url
from shuup.xtheme.views.command import command_dispatch
from shuup.xtheme.views.editor import EditorView
from shuup.xtheme.views.extra import extra_view_dispatch
from shuup.xtheme.views.plugins import (
get_category_products_highlight,
get_product_cross_sell_highlight,
get_product_highlight,
get_prouduct_selections_highlight,
)
urlpatterns = [
url(r"^xtheme/editor/$", EditorView.as_view(), name="xtheme_editor"),
url(r"^xtheme/(?P<view>.+)/?$", extra_view_dispatch, name="xtheme_extra_view"),
url(r"^xtheme/$", command_dispatch, name="xtheme"),
url(
r"^xtheme-prod-hl/(?P<plugin_type>.*)/(?P<cutoff_days>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_product_highlight,
name="xtheme-product-highlight",
),
url(
r"""
^xtheme-prod-cross-sell-hl/
(?P<product_id>.*)/(?P<relation_type>.*)/(?P<use_parents>\d+)/
(?P<count>\d+)/(?P<cache_timeout>\d+)/$
""".strip(),
get_product_cross_sell_highlight,
name="xtheme-product-cross-sells-highlight",
),
url(
r"^xtheme-cat-products-hl/(?P<category_id>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_category_products_highlight,
name="xtheme-category-products-highlight",
),
url(
r"^xtheme-prod-selections-hl/(?P<product_ids>.*)/(?P<cache_timeout>\d+)/$",
get_prouduct_selections_highlight,
name="xtheme-product-selections-highlight",
),
]
| xss | {
"code": [
" url(r\"^xtheme/(?P<view>.+)/*$\", extra_view_dispatch, name=\"xtheme_extra_view\"),"
],
"line_no": [
22
]
} | {
"code": [
" url(r\"^xtheme/(?P<view>.+)/?$\", extra_view_dispatch, name=\"xtheme_extra_view\"),"
],
"line_no": [
22
]
} |
from django.conf.urls import url
from shuup.xtheme.views.command import command_dispatch
from shuup.xtheme.views.editor import EditorView
from shuup.xtheme.views.extra import extra_view_dispatch
from shuup.xtheme.views.plugins import (
get_category_products_highlight,
get_product_cross_sell_highlight,
get_product_highlight,
get_prouduct_selections_highlight,
)
VAR_0 = [
url(r"^xtheme/editor/$", EditorView.as_view(), name="xtheme_editor"),
url(r"^xtheme/(?P<view>.+)/*$", extra_view_dispatch, name="xtheme_extra_view"),
url(r"^xtheme/$", command_dispatch, name="xtheme"),
url(
r"^xtheme-prod-hl/(?P<plugin_type>.*)/(?P<cutoff_days>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_product_highlight,
name="xtheme-product-highlight",
),
url(
r"""
^xtheme-prod-cross-sell-hl/
(?P<product_id>.*)/(?P<relation_type>.*)/(?P<use_parents>\d+)/
(?P<count>\d+)/(?P<cache_timeout>\d+)/$
""".strip(),
get_product_cross_sell_highlight,
name="xtheme-product-cross-sells-highlight",
),
url(
r"^xtheme-cat-products-hl/(?P<category_id>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_category_products_highlight,
name="xtheme-category-products-highlight",
),
url(
r"^xtheme-prod-selections-hl/(?P<product_ids>.*)/(?P<cache_timeout>\d+)/$",
get_prouduct_selections_highlight,
name="xtheme-product-selections-highlight",
),
]
|
from django.conf.urls import url
from shuup.xtheme.views.command import command_dispatch
from shuup.xtheme.views.editor import EditorView
from shuup.xtheme.views.extra import extra_view_dispatch
from shuup.xtheme.views.plugins import (
get_category_products_highlight,
get_product_cross_sell_highlight,
get_product_highlight,
get_prouduct_selections_highlight,
)
VAR_0 = [
url(r"^xtheme/editor/$", EditorView.as_view(), name="xtheme_editor"),
url(r"^xtheme/(?P<view>.+)/?$", extra_view_dispatch, name="xtheme_extra_view"),
url(r"^xtheme/$", command_dispatch, name="xtheme"),
url(
r"^xtheme-prod-hl/(?P<plugin_type>.*)/(?P<cutoff_days>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_product_highlight,
name="xtheme-product-highlight",
),
url(
r"""
^xtheme-prod-cross-sell-hl/
(?P<product_id>.*)/(?P<relation_type>.*)/(?P<use_parents>\d+)/
(?P<count>\d+)/(?P<cache_timeout>\d+)/$
""".strip(),
get_product_cross_sell_highlight,
name="xtheme-product-cross-sells-highlight",
),
url(
r"^xtheme-cat-products-hl/(?P<category_id>\d+)/(?P<count>\d+)/(?P<cache_timeout>\d+)/$",
get_category_products_highlight,
name="xtheme-category-products-highlight",
),
url(
r"^xtheme-prod-selections-hl/(?P<product_ids>.*)/(?P<cache_timeout>\d+)/$",
get_prouduct_selections_highlight,
name="xtheme-product-selections-highlight",
),
]
| [
1,
2,
3,
4,
5,
6,
7,
9,
19,
49
] | [
1,
2,
3,
4,
5,
6,
7,
9,
19,
49
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import jsonschema
from twisted.internet import defer
from synapse.api.constants import EventContentFields
from synapse.api.errors import SynapseError
from synapse.api.filtering import Filter
from synapse.events import make_event_from_dict
from tests import unittest
from tests.utils import DeferredMockCallable, MockHttpResource, setup_test_homeserver
user_localpart = "test_user"
def MockEvent(**kwargs):
if "event_id" not in kwargs:
kwargs["event_id"] = "fake_event_id"
if "type" not in kwargs:
kwargs["type"] = "fake_type"
return make_event_from_dict(kwargs)
class FilteringTestCase(unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
self.mock_federation_resource = MockHttpResource()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
hs = yield setup_test_homeserver(
self.addCleanup, http_client=self.mock_http_client, keyring=Mock(),
)
self.filtering = hs.get_filtering()
self.datastore = hs.get_datastore()
def test_errors_on_invalid_filters(self):
invalid_filters = [
{"boom": {}},
{"account_data": "Hello World"},
{"event_fields": [r"\\foo"]},
{"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}},
{"event_format": "other"},
{"room": {"not_rooms": ["#foo:pik-test"]}},
{"presence": {"senders": ["@bar;pik.test.com"]}},
]
for filter in invalid_filters:
with self.assertRaises(SynapseError) as check_filter_error:
self.filtering.check_valid_filter(filter)
self.assertIsInstance(check_filter_error.exception, SynapseError)
def test_valid_filters(self):
valid_filters = [
{
"room": {
"timeline": {"limit": 20},
"state": {"not_types": ["m.room.member"]},
"ephemeral": {"limit": 0, "not_types": ["*"]},
"include_leave": False,
"rooms": ["!dee:pik-test"],
"not_rooms": ["!gee:pik-test"],
"account_data": {"limit": 0, "types": ["*"]},
}
},
{
"room": {
"state": {
"types": ["m.room.*"],
"not_rooms": ["!726s6s6q:example.com"],
},
"timeline": {
"limit": 10,
"types": ["m.room.message"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
"org.matrix.labels": ["#fun"],
"org.matrix.not_labels": ["#work"],
},
"ephemeral": {
"types": ["m.receipt", "m.typing"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
},
},
"presence": {
"types": ["m.presence"],
"not_senders": ["@alice:example.com"],
},
"event_format": "client",
"event_fields": ["type", "content", "sender"],
},
# a single backslash should be permitted (though it is debatable whether
# it should be permitted before anything other than `.`, and what that
# actually means)
#
# (note that event_fields is implemented in
# synapse.events.utils.serialize_event, and so whether this actually works
# is tested elsewhere. We just want to check that it is allowed through the
# filter validation)
{"event_fields": [r"foo\.bar"]},
]
for filter in valid_filters:
try:
self.filtering.check_valid_filter(filter)
except jsonschema.ValidationError as e:
self.fail(e)
def test_limits_are_applied(self):
# TODO
pass
def test_definition_types_works_with_literals(self):
definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_types_works_with_wildcards(self):
definition = {"types": ["m.*", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_types_works_with_unknowns(self):
definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(
sender="@foo:bar",
type="now.for.something.completely.different",
room_id="!foo:bar",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_literals(self):
definition = {"not_types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_wildcards(self):
definition = {"not_types": ["m.room.message", "org.matrix.*"]}
event = MockEvent(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_unknowns(self):
definition = {"not_types": ["m.*", "org.*"]}
event = MockEvent(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_not_types_takes_priority_over_types(self):
definition = {
"not_types": ["m.*", "org.*"],
"types": ["m.room.message", "m.room.topic"],
}
event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
self.assertFalse(Filter(definition).check(event))
def test_definition_senders_works_with_literals(self):
definition = {"senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_senders_works_with_unknowns(self):
definition = {"senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_senders_works_with_literals(self):
definition = {"not_senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_senders_works_with_unknowns(self):
definition = {"not_senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_not_senders_takes_priority_over_senders(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets", "@misspiggy:muppets"],
}
event = MockEvent(
sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_rooms_works_with_literals(self):
definition = {"rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_rooms_works_with_unknowns(self):
definition = {"rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_rooms_works_with_literals(self):
definition = {"not_rooms": ["!anothersecretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_rooms_works_with_unknowns(self):
definition = {"not_rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertTrue(Filter(definition).check(event))
def test_definition_not_rooms_takes_priority_over_rooms(self):
definition = {
"not_rooms": ["!secretbase:unknown"],
"rooms": ["!secretbase:unknown"],
}
event = MockEvent(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertTrue(Filter(definition).check(event))
def test_definition_combined_event_bad_sender(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@misspiggy:muppets", # nope
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event_bad_room(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!piggyshouse:muppets", # nope
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event_bad_type(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="muppets.misspiggy.kisses", # nope
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(definition).check(event))
def test_filter_labels(self):
definition = {"org.matrix.labels": ["#fun"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertTrue(Filter(definition).check(event))
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertFalse(Filter(definition).check(event))
def test_filter_not_labels(self):
definition = {"org.matrix.not_labels": ["#fun"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertFalse(Filter(definition).check(event))
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertTrue(Filter(definition).check(event))
@defer.inlineCallbacks
def test_filter_presence_match(self):
user_filter_json = {"presence": {"types": ["m.*"]}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(sender="@foo:bar", type="m.profile")
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_presence(events=events)
self.assertEquals(events, results)
@defer.inlineCallbacks
def test_filter_presence_no_match(self):
user_filter_json = {"presence": {"types": ["m.*"]}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart + "2", user_filter=user_filter_json
)
)
event = MockEvent(
event_id="$asdasd:localhost",
sender="@foo:bar",
type="custom.avatar.3d.crazy",
)
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart + "2", filter_id=filter_id
)
)
results = user_filter.filter_presence(events=events)
self.assertEquals([], results)
@defer.inlineCallbacks
def test_filter_room_state_match(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_room_state(events=events)
self.assertEquals(events, results)
@defer.inlineCallbacks
def test_filter_room_state_no_match(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_room_state(events)
self.assertEquals([], results)
def test_filter_rooms(self):
definition = {
"rooms": ["!allowed:example.com", "!excluded:example.com"],
"not_rooms": ["!excluded:example.com"],
}
room_ids = [
"!allowed:example.com", # Allowed because in rooms and not in not_rooms.
"!excluded:example.com", # Disallowed because in not_rooms.
"!not_included:example.com", # Disallowed because not in rooms.
]
filtered_room_ids = list(Filter(definition).filter_rooms(room_ids))
self.assertEquals(filtered_room_ids, ["!allowed:example.com"])
@defer.inlineCallbacks
def test_add_filter(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.filtering.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
self.assertEquals(filter_id, 0)
self.assertEquals(
user_filter_json,
(
yield defer.ensureDeferred(
self.datastore.get_user_filter(
user_localpart=user_localpart, filter_id=0
)
)
),
)
@defer.inlineCallbacks
def test_get_filter(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
self.assertEquals(filter.get_filter_json(), user_filter_json)
self.assertRegexpMatches(repr(filter), r"<FilterCollection \{.*\}>")
| # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import jsonschema
from twisted.internet import defer
from synapse.api.constants import EventContentFields
from synapse.api.errors import SynapseError
from synapse.api.filtering import Filter
from synapse.events import make_event_from_dict
from tests import unittest
from tests.utils import DeferredMockCallable, MockHttpResource, setup_test_homeserver
user_localpart = "test_user"
def MockEvent(**kwargs):
if "event_id" not in kwargs:
kwargs["event_id"] = "fake_event_id"
if "type" not in kwargs:
kwargs["type"] = "fake_type"
return make_event_from_dict(kwargs)
class FilteringTestCase(unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
self.mock_federation_resource = MockHttpResource()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
hs = yield setup_test_homeserver(
self.addCleanup,
federation_http_client=self.mock_http_client,
keyring=Mock(),
)
self.filtering = hs.get_filtering()
self.datastore = hs.get_datastore()
def test_errors_on_invalid_filters(self):
invalid_filters = [
{"boom": {}},
{"account_data": "Hello World"},
{"event_fields": [r"\\foo"]},
{"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}},
{"event_format": "other"},
{"room": {"not_rooms": ["#foo:pik-test"]}},
{"presence": {"senders": ["@bar;pik.test.com"]}},
]
for filter in invalid_filters:
with self.assertRaises(SynapseError) as check_filter_error:
self.filtering.check_valid_filter(filter)
self.assertIsInstance(check_filter_error.exception, SynapseError)
def test_valid_filters(self):
valid_filters = [
{
"room": {
"timeline": {"limit": 20},
"state": {"not_types": ["m.room.member"]},
"ephemeral": {"limit": 0, "not_types": ["*"]},
"include_leave": False,
"rooms": ["!dee:pik-test"],
"not_rooms": ["!gee:pik-test"],
"account_data": {"limit": 0, "types": ["*"]},
}
},
{
"room": {
"state": {
"types": ["m.room.*"],
"not_rooms": ["!726s6s6q:example.com"],
},
"timeline": {
"limit": 10,
"types": ["m.room.message"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
"org.matrix.labels": ["#fun"],
"org.matrix.not_labels": ["#work"],
},
"ephemeral": {
"types": ["m.receipt", "m.typing"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
},
},
"presence": {
"types": ["m.presence"],
"not_senders": ["@alice:example.com"],
},
"event_format": "client",
"event_fields": ["type", "content", "sender"],
},
# a single backslash should be permitted (though it is debatable whether
# it should be permitted before anything other than `.`, and what that
# actually means)
#
# (note that event_fields is implemented in
# synapse.events.utils.serialize_event, and so whether this actually works
# is tested elsewhere. We just want to check that it is allowed through the
# filter validation)
{"event_fields": [r"foo\.bar"]},
]
for filter in valid_filters:
try:
self.filtering.check_valid_filter(filter)
except jsonschema.ValidationError as e:
self.fail(e)
def test_limits_are_applied(self):
# TODO
pass
def test_definition_types_works_with_literals(self):
definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_types_works_with_wildcards(self):
definition = {"types": ["m.*", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_types_works_with_unknowns(self):
definition = {"types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(
sender="@foo:bar",
type="now.for.something.completely.different",
room_id="!foo:bar",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_literals(self):
definition = {"not_types": ["m.room.message", "org.matrix.foo.bar"]}
event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_wildcards(self):
definition = {"not_types": ["m.room.message", "org.matrix.*"]}
event = MockEvent(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_types_works_with_unknowns(self):
definition = {"not_types": ["m.*", "org.*"]}
event = MockEvent(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar")
self.assertTrue(Filter(definition).check(event))
def test_definition_not_types_takes_priority_over_types(self):
definition = {
"not_types": ["m.*", "org.*"],
"types": ["m.room.message", "m.room.topic"],
}
event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
self.assertFalse(Filter(definition).check(event))
def test_definition_senders_works_with_literals(self):
definition = {"senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_senders_works_with_unknowns(self):
definition = {"senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_senders_works_with_literals(self):
definition = {"not_senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_senders_works_with_unknowns(self):
definition = {"not_senders": ["@flibble:wibble"]}
event = MockEvent(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_not_senders_takes_priority_over_senders(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets", "@misspiggy:muppets"],
}
event = MockEvent(
sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_rooms_works_with_literals(self):
definition = {"rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertTrue(Filter(definition).check(event))
def test_definition_rooms_works_with_unknowns(self):
definition = {"rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_rooms_works_with_literals(self):
definition = {"not_rooms": ["!anothersecretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(definition).check(event))
def test_definition_not_rooms_works_with_unknowns(self):
definition = {"not_rooms": ["!secretbase:unknown"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertTrue(Filter(definition).check(event))
def test_definition_not_rooms_takes_priority_over_rooms(self):
definition = {
"not_rooms": ["!secretbase:unknown"],
"rooms": ["!secretbase:unknown"],
}
event = MockEvent(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertTrue(Filter(definition).check(event))
def test_definition_combined_event_bad_sender(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@misspiggy:muppets", # nope
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event_bad_room(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!piggyshouse:muppets", # nope
)
self.assertFalse(Filter(definition).check(event))
def test_definition_combined_event_bad_type(self):
definition = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
event = MockEvent(
sender="@kermit:muppets", # yup
type="muppets.misspiggy.kisses", # nope
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(definition).check(event))
def test_filter_labels(self):
definition = {"org.matrix.labels": ["#fun"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertTrue(Filter(definition).check(event))
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertFalse(Filter(definition).check(event))
def test_filter_not_labels(self):
definition = {"org.matrix.not_labels": ["#fun"]}
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertFalse(Filter(definition).check(event))
event = MockEvent(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertTrue(Filter(definition).check(event))
@defer.inlineCallbacks
def test_filter_presence_match(self):
user_filter_json = {"presence": {"types": ["m.*"]}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(sender="@foo:bar", type="m.profile")
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_presence(events=events)
self.assertEquals(events, results)
@defer.inlineCallbacks
def test_filter_presence_no_match(self):
user_filter_json = {"presence": {"types": ["m.*"]}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart + "2", user_filter=user_filter_json
)
)
event = MockEvent(
event_id="$asdasd:localhost",
sender="@foo:bar",
type="custom.avatar.3d.crazy",
)
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart + "2", filter_id=filter_id
)
)
results = user_filter.filter_presence(events=events)
self.assertEquals([], results)
@defer.inlineCallbacks
def test_filter_room_state_match(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_room_state(events=events)
self.assertEquals(events, results)
@defer.inlineCallbacks
def test_filter_room_state_no_match(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
event = MockEvent(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
events = [event]
user_filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
results = user_filter.filter_room_state(events)
self.assertEquals([], results)
def test_filter_rooms(self):
definition = {
"rooms": ["!allowed:example.com", "!excluded:example.com"],
"not_rooms": ["!excluded:example.com"],
}
room_ids = [
"!allowed:example.com", # Allowed because in rooms and not in not_rooms.
"!excluded:example.com", # Disallowed because in not_rooms.
"!not_included:example.com", # Disallowed because not in rooms.
]
filtered_room_ids = list(Filter(definition).filter_rooms(room_ids))
self.assertEquals(filtered_room_ids, ["!allowed:example.com"])
@defer.inlineCallbacks
def test_add_filter(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.filtering.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
self.assertEquals(filter_id, 0)
self.assertEquals(
user_filter_json,
(
yield defer.ensureDeferred(
self.datastore.get_user_filter(
user_localpart=user_localpart, filter_id=0
)
)
),
)
@defer.inlineCallbacks
def test_get_filter(self):
user_filter_json = {"room": {"state": {"types": ["m.*"]}}}
filter_id = yield defer.ensureDeferred(
self.datastore.add_user_filter(
user_localpart=user_localpart, user_filter=user_filter_json
)
)
filter = yield defer.ensureDeferred(
self.filtering.get_user_filter(
user_localpart=user_localpart, filter_id=filter_id
)
)
self.assertEquals(filter.get_filter_json(), user_filter_json)
self.assertRegexpMatches(repr(filter), r"<FilterCollection \{.*\}>")
| open_redirect | {
"code": [
" self.addCleanup, http_client=self.mock_http_client, keyring=Mock(),"
],
"line_no": [
53
]
} | {
"code": [
" self.addCleanup,",
" federation_http_client=self.mock_http_client,"
],
"line_no": [
53,
54
]
} |
from mock import Mock
import jsonschema
from twisted.internet import defer
from synapse.api.constants import EventContentFields
from synapse.api.errors import SynapseError
from synapse.api.filtering import Filter
from synapse.events import make_event_from_dict
from tests import unittest
from tests.utils import DeferredMockCallable, MockHttpResource, setup_test_homeserver
VAR_0 = "test_user"
def FUNC_0(**VAR_1):
if "event_id" not in VAR_1:
kwargs["event_id"] = "fake_event_id"
if "type" not in VAR_1:
kwargs["type"] = "fake_type"
return make_event_from_dict(VAR_1)
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_1(self):
self.mock_federation_resource = MockHttpResource()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
VAR_2 = yield setup_test_homeserver(
self.addCleanup, http_client=self.mock_http_client, keyring=Mock(),
)
self.filtering = VAR_2.get_filtering()
self.datastore = VAR_2.get_datastore()
def FUNC_2(self):
VAR_3 = [
{"boom": {}},
{"account_data": "Hello World"},
{"event_fields": [r"\\foo"]},
{"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}},
{"event_format": "other"},
{"room": {"not_rooms": ["#foo:pik-test"]}},
{"presence": {"senders": ["@bar;pik.test.com"]}},
]
for VAR_14 in VAR_3:
with self.assertRaises(SynapseError) as check_filter_error:
self.filtering.check_valid_filter(VAR_14)
self.assertIsInstance(check_filter_error.exception, SynapseError)
def FUNC_3(self):
VAR_4 = [
{
"room": {
"timeline": {"limit": 20},
"state": {"not_types": ["m.room.member"]},
"ephemeral": {"limit": 0, "not_types": ["*"]},
"include_leave": False,
"rooms": ["!dee:pik-test"],
"not_rooms": ["!gee:pik-test"],
"account_data": {"limit": 0, "types": ["*"]},
}
},
{
"room": {
"state": {
"types": ["m.room.*"],
"not_rooms": ["!726s6s6q:example.com"],
},
"timeline": {
"limit": 10,
"types": ["m.room.message"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
"org.matrix.labels": ["#fun"],
"org.matrix.not_labels": ["#work"],
},
"ephemeral": {
"types": ["m.receipt", "m.typing"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
},
},
"presence": {
"types": ["m.presence"],
"not_senders": ["@alice:example.com"],
},
"event_format": "client",
"event_fields": ["type", "content", "sender"],
},
{"event_fields": [r"foo\.bar"]},
]
for VAR_14 in VAR_4:
try:
self.filtering.check_valid_filter(VAR_14)
except jsonschema.ValidationError as e:
self.fail(e)
def FUNC_4(self):
pass
def FUNC_5(self):
VAR_5 = {"types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_6(self):
VAR_5 = {"types": ["m.*", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_7(self):
VAR_5 = {"types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="now.for.something.completely.different",
room_id="!foo:bar",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_8(self):
VAR_5 = {"not_types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_9(self):
VAR_5 = {"not_types": ["m.room.message", "org.matrix.*"]}
VAR_6 = FUNC_0(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_10(self):
VAR_5 = {"not_types": ["m.*", "org.*"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_11(self):
VAR_5 = {
"not_types": ["m.*", "org.*"],
"types": ["m.room.message", "m.room.topic"],
}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_12(self):
VAR_5 = {"senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_13(self):
VAR_5 = {"senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_14(self):
VAR_5 = {"not_senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_15(self):
VAR_5 = {"not_senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_16(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets", "@misspiggy:muppets"],
}
VAR_6 = FUNC_0(
sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_17(self):
VAR_5 = {"rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_18(self):
VAR_5 = {"rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_19(self):
VAR_5 = {"not_rooms": ["!anothersecretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_20(self):
VAR_5 = {"not_rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_21(self):
VAR_5 = {
"not_rooms": ["!secretbase:unknown"],
"rooms": ["!secretbase:unknown"],
}
VAR_6 = FUNC_0(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_22(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_23(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@misspiggy:muppets", # nope
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_24(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!piggyshouse:muppets", # nope
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_25(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="muppets.misspiggy.kisses", # nope
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_26(self):
VAR_5 = {"org.matrix.labels": ["#fun"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
event = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_27(self):
VAR_5 = {"org.matrix.not_labels": ["#fun"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
event = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
@defer.inlineCallbacks
def FUNC_28(self):
VAR_7 = {"presence": {"types": ["m.*"]}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(sender="@foo:bar", type="m.profile")
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_presence(VAR_9=events)
self.assertEquals(VAR_9, VAR_11)
@defer.inlineCallbacks
def FUNC_29(self):
VAR_7 = {"presence": {"types": ["m.*"]}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart + "2", VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(
event_id="$asdasd:localhost",
sender="@foo:bar",
type="custom.avatar.3d.crazy",
)
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart + "2", VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_presence(VAR_9=events)
self.assertEquals([], VAR_11)
@defer.inlineCallbacks
def FUNC_30(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_room_state(VAR_9=events)
self.assertEquals(VAR_9, VAR_11)
@defer.inlineCallbacks
def FUNC_31(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_room_state(VAR_9)
self.assertEquals([], VAR_11)
def FUNC_32(self):
VAR_5 = {
"rooms": ["!allowed:example.com", "!excluded:example.com"],
"not_rooms": ["!excluded:example.com"],
}
VAR_12 = [
"!allowed:example.com", # Allowed because in rooms and not in not_rooms.
"!excluded:example.com", # Disallowed because in not_rooms.
"!not_included:example.com", # Disallowed because not in rooms.
]
VAR_13 = list(Filter(VAR_5).filter_rooms(VAR_12))
self.assertEquals(VAR_13, ["!allowed:example.com"])
@defer.inlineCallbacks
def FUNC_33(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.filtering.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
self.assertEquals(VAR_8, 0)
self.assertEquals(
VAR_7,
(
yield defer.ensureDeferred(
self.datastore.get_user_filter(
VAR_0=user_localpart, VAR_8=0
)
)
),
)
@defer.inlineCallbacks
def FUNC_34(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_14 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
self.assertEquals(VAR_14.get_filter_json(), VAR_7)
self.assertRegexpMatches(repr(VAR_14), r"<FilterCollection \{.*\}>")
|
from mock import Mock
import jsonschema
from twisted.internet import defer
from synapse.api.constants import EventContentFields
from synapse.api.errors import SynapseError
from synapse.api.filtering import Filter
from synapse.events import make_event_from_dict
from tests import unittest
from tests.utils import DeferredMockCallable, MockHttpResource, setup_test_homeserver
VAR_0 = "test_user"
def FUNC_0(**VAR_1):
if "event_id" not in VAR_1:
kwargs["event_id"] = "fake_event_id"
if "type" not in VAR_1:
kwargs["type"] = "fake_type"
return make_event_from_dict(VAR_1)
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_1(self):
self.mock_federation_resource = MockHttpResource()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
VAR_2 = yield setup_test_homeserver(
self.addCleanup,
federation_http_client=self.mock_http_client,
keyring=Mock(),
)
self.filtering = VAR_2.get_filtering()
self.datastore = VAR_2.get_datastore()
def FUNC_2(self):
VAR_3 = [
{"boom": {}},
{"account_data": "Hello World"},
{"event_fields": [r"\\foo"]},
{"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}},
{"event_format": "other"},
{"room": {"not_rooms": ["#foo:pik-test"]}},
{"presence": {"senders": ["@bar;pik.test.com"]}},
]
for VAR_14 in VAR_3:
with self.assertRaises(SynapseError) as check_filter_error:
self.filtering.check_valid_filter(VAR_14)
self.assertIsInstance(check_filter_error.exception, SynapseError)
def FUNC_3(self):
VAR_4 = [
{
"room": {
"timeline": {"limit": 20},
"state": {"not_types": ["m.room.member"]},
"ephemeral": {"limit": 0, "not_types": ["*"]},
"include_leave": False,
"rooms": ["!dee:pik-test"],
"not_rooms": ["!gee:pik-test"],
"account_data": {"limit": 0, "types": ["*"]},
}
},
{
"room": {
"state": {
"types": ["m.room.*"],
"not_rooms": ["!726s6s6q:example.com"],
},
"timeline": {
"limit": 10,
"types": ["m.room.message"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
"org.matrix.labels": ["#fun"],
"org.matrix.not_labels": ["#work"],
},
"ephemeral": {
"types": ["m.receipt", "m.typing"],
"not_rooms": ["!726s6s6q:example.com"],
"not_senders": ["@spam:example.com"],
},
},
"presence": {
"types": ["m.presence"],
"not_senders": ["@alice:example.com"],
},
"event_format": "client",
"event_fields": ["type", "content", "sender"],
},
{"event_fields": [r"foo\.bar"]},
]
for VAR_14 in VAR_4:
try:
self.filtering.check_valid_filter(VAR_14)
except jsonschema.ValidationError as e:
self.fail(e)
def FUNC_4(self):
pass
def FUNC_5(self):
VAR_5 = {"types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_6(self):
VAR_5 = {"types": ["m.*", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_7(self):
VAR_5 = {"types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="now.for.something.completely.different",
room_id="!foo:bar",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_8(self):
VAR_5 = {"not_types": ["m.room.message", "org.matrix.foo.bar"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.message", room_id="!foo:bar")
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_9(self):
VAR_5 = {"not_types": ["m.room.message", "org.matrix.*"]}
VAR_6 = FUNC_0(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_10(self):
VAR_5 = {"not_types": ["m.*", "org.*"]}
VAR_6 = FUNC_0(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar")
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_11(self):
VAR_5 = {
"not_types": ["m.*", "org.*"],
"types": ["m.room.message", "m.room.topic"],
}
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_12(self):
VAR_5 = {"senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_13(self):
VAR_5 = {"senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_14(self):
VAR_5 = {"not_senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_15(self):
VAR_5 = {"not_senders": ["@flibble:wibble"]}
VAR_6 = FUNC_0(
sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_16(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets", "@misspiggy:muppets"],
}
VAR_6 = FUNC_0(
sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_17(self):
VAR_5 = {"rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_18(self):
VAR_5 = {"rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_19(self):
VAR_5 = {"not_rooms": ["!anothersecretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_20(self):
VAR_5 = {"not_rooms": ["!secretbase:unknown"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!anothersecretbase:unknown",
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_21(self):
VAR_5 = {
"not_rooms": ["!secretbase:unknown"],
"rooms": ["!secretbase:unknown"],
}
VAR_6 = FUNC_0(
sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown"
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_22(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
def FUNC_23(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@misspiggy:muppets", # nope
type="m.room.message", # yup
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_24(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="m.room.message", # yup
room_id="!piggyshouse:muppets", # nope
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_25(self):
VAR_5 = {
"not_senders": ["@misspiggy:muppets"],
"senders": ["@kermit:muppets"],
"rooms": ["!stage:unknown"],
"not_rooms": ["!piggyshouse:muppets"],
"types": ["m.room.message", "muppets.kermit.*"],
"not_types": ["muppets.misspiggy.*"],
}
VAR_6 = FUNC_0(
sender="@kermit:muppets", # yup
type="muppets.misspiggy.kisses", # nope
room_id="!stage:unknown", # yup
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_26(self):
VAR_5 = {"org.matrix.labels": ["#fun"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
event = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
def FUNC_27(self):
VAR_5 = {"org.matrix.not_labels": ["#fun"]}
VAR_6 = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#fun"]},
)
self.assertFalse(Filter(VAR_5).check(VAR_6))
event = FUNC_0(
sender="@foo:bar",
type="m.room.message",
room_id="!secretbase:unknown",
content={EventContentFields.LABELS: ["#notfun"]},
)
self.assertTrue(Filter(VAR_5).check(VAR_6))
@defer.inlineCallbacks
def FUNC_28(self):
VAR_7 = {"presence": {"types": ["m.*"]}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(sender="@foo:bar", type="m.profile")
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_presence(VAR_9=events)
self.assertEquals(VAR_9, VAR_11)
@defer.inlineCallbacks
def FUNC_29(self):
VAR_7 = {"presence": {"types": ["m.*"]}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart + "2", VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(
event_id="$asdasd:localhost",
sender="@foo:bar",
type="custom.avatar.3d.crazy",
)
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart + "2", VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_presence(VAR_9=events)
self.assertEquals([], VAR_11)
@defer.inlineCallbacks
def FUNC_30(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar")
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_room_state(VAR_9=events)
self.assertEquals(VAR_9, VAR_11)
@defer.inlineCallbacks
def FUNC_31(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_6 = FUNC_0(
sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar"
)
VAR_9 = [VAR_6]
VAR_10 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
VAR_11 = VAR_10.filter_room_state(VAR_9)
self.assertEquals([], VAR_11)
def FUNC_32(self):
VAR_5 = {
"rooms": ["!allowed:example.com", "!excluded:example.com"],
"not_rooms": ["!excluded:example.com"],
}
VAR_12 = [
"!allowed:example.com", # Allowed because in rooms and not in not_rooms.
"!excluded:example.com", # Disallowed because in not_rooms.
"!not_included:example.com", # Disallowed because not in rooms.
]
VAR_13 = list(Filter(VAR_5).filter_rooms(VAR_12))
self.assertEquals(VAR_13, ["!allowed:example.com"])
@defer.inlineCallbacks
def FUNC_33(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.filtering.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
self.assertEquals(VAR_8, 0)
self.assertEquals(
VAR_7,
(
yield defer.ensureDeferred(
self.datastore.get_user_filter(
VAR_0=user_localpart, VAR_8=0
)
)
),
)
@defer.inlineCallbacks
def FUNC_34(self):
VAR_7 = {"room": {"state": {"types": ["m.*"]}}}
VAR_8 = yield defer.ensureDeferred(
self.datastore.add_user_filter(
VAR_0=user_localpart, VAR_10=VAR_7
)
)
VAR_14 = yield defer.ensureDeferred(
self.filtering.get_user_filter(
VAR_0=user_localpart, VAR_8=filter_id
)
)
self.assertEquals(VAR_14.get_filter_json(), VAR_7)
self.assertRegexpMatches(repr(VAR_14), r"<FilterCollection \{.*\}>")
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
20,
22,
24,
29,
32,
34,
35,
42,
43,
48,
51,
55,
57,
59,
74,
115,
116,
117,
118,
119,
120,
121,
122,
130,
132,
134,
138,
140,
145,
154,
159,
166,
171,
179,
186,
193,
200,
207,
217,
224,
233,
242,
251,
261,
277,
293,
309,
325,
334,
336,
343,
345,
354,
356,
363,
365,
376,
382,
385,
389,
401,
407,
410,
421,
427,
430,
443,
449,
452,
458,
464,
466,
468,
472,
478,
490,
494,
500,
506,
508,
510
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
20,
22,
24,
29,
32,
34,
35,
42,
43,
48,
51,
57,
59,
61,
76,
117,
118,
119,
120,
121,
122,
123,
124,
132,
134,
136,
140,
142,
147,
156,
161,
168,
173,
181,
188,
195,
202,
209,
219,
226,
235,
244,
253,
263,
279,
295,
311,
327,
336,
338,
345,
347,
356,
358,
365,
367,
378,
384,
387,
391,
403,
409,
412,
423,
429,
432,
445,
451,
454,
460,
466,
468,
470,
474,
480,
492,
496,
502,
508,
510,
512
] |
1CWE-79
| # coding: utf-8
"""
mistune
~~~~~~~
The fastest markdown parser in pure Python with renderer feature.
:copyright: (c) 2014 - 2017 by Hsiaoming Yang.
"""
import re
import inspect
__version__ = '0.8'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
_key_pattern = re.compile(r'\s+')
_nonalpha_pattern = re.compile(r'\W')
_escape_pattern = re.compile(r'&(?!#?\w+;)')
_newline_pattern = re.compile(r'\r\n|\r')
_block_quote_leading_pattern = re.compile(r'^ *> ?', flags=re.M)
_block_code_leading_pattern = re.compile(r'^ {4}', re.M)
_inline_tags = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
_pre_tags = ['pre', 'script', 'style']
_valid_end = r'(?!:/|[^\w\s@]*@)\b'
_valid_attr = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
_block_tag = r'(?!(?:%s)\b)\w+%s' % ('|'.join(_inline_tags), _valid_end)
_scheme_blacklist = ('javascript:', 'vbscript:')
def _pure_pattern(regex):
pattern = regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
return pattern
def _keyify(key):
return _key_pattern.sub(' ', key.lower())
def escape(text, quote=False, smart_amp=True):
"""Replace special characters "&", "<" and ">" to HTML-safe sequences.
The original cgi.escape will always escape "&", but you can control
this one for a smart escape amp.
:param quote: if set to True, " and ' will be escaped.
:param smart_amp: if set to False, & will always be escaped.
"""
if smart_amp:
text = _escape_pattern.sub('&', text)
else:
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if quote:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
def escape_link(url):
"""Remove dangerous URL schemes like javascript: and escape afterwards."""
lower_url = url.lower().strip('\x00\x1a \n\r\t')
for scheme in _scheme_blacklist:
if re.sub(r'[^A-Za-z0-9\/:]+', '', lower_url).startswith(scheme):
return ''
return escape(url, quote=True, smart_amp=False)
def preprocessing(text, tab=4):
text = _newline_pattern.sub('\n', text)
text = text.expandtabs(tab)
text = text.replace('\u2424', '\n')
pattern = re.compile(r'^ +$', re.M)
return pattern.sub('', text)
class BlockGrammar(object):
"""Grammars for block level tokens."""
def_links = re.compile(
r'^ *\[([^^\]]+)\]: *' # [key]:
r'<?([^\s>]+)>?' # <link> or link
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
def_footnotes = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^key]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
newline = re.compile(r'^\n+')
block_code = re.compile(r'^( {4}[^\n]+\n*)+')
fences = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```lang
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
hrule = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
heading = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
lheading = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
block_quote = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
list_block = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # hrule
r'|\n+(?=%s)' # def links
r'|\n+(?=%s)' # def footnotes
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
)
)
list_item = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
flags=re.M
)
list_bullet = re.compile(r'^ *(?:[*+-]|\d+\.) +')
paragraph = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
_pure_pattern(fences).replace(r'\1', r'\2'),
_pure_pattern(list_block).replace(r'\1', r'\3'),
_pure_pattern(hrule),
_pure_pattern(heading),
_pure_pattern(lheading),
_pure_pattern(block_quote),
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
'<' + _block_tag,
)
)
block_html = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (_block_tag, _valid_attr),
r'<%s(?:%s)*?\s*\/?>' % (_block_tag, _valid_attr),
)
)
table = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
nptable = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
text = re.compile(r'^[^\n]+')
class BlockLexer(object):
"""Block level lexer for block grammars."""
grammar_class = BlockGrammar
default_rules = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
list_rules = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
footnote_rules = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, rules=None, **kwargs):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not rules:
rules = self.grammar_class()
self.rules = rules
def __call__(self, text, rules=None):
return self.parse(text, rules)
def parse(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = self.default_rules
def manipulate(text):
for key in rules:
rule = getattr(self.rules, key)
m = rule.match(text)
if not m:
continue
getattr(self, 'parse_%s' % key)(m)
return m
return False # pragma: no cover
while text:
m = manipulate(text)
if m is not False:
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return self.tokens
def parse_newline(self, m):
length = len(m.group(0))
if length > 1:
self.tokens.append({'type': 'newline'})
def parse_block_code(self, m):
# clean leading whitespace
code = _block_code_leading_pattern.sub('', m.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': code,
})
def parse_fences(self, m):
self.tokens.append({
'type': 'code',
'lang': m.group(2),
'text': m.group(3),
})
def parse_heading(self, m):
self.tokens.append({
'type': 'heading',
'level': len(m.group(1)),
'text': m.group(2),
})
def parse_lheading(self, m):
"""Parse setext heading."""
self.tokens.append({
'type': 'heading',
'level': 1 if m.group(2) == '=' else 2,
'text': m.group(1),
})
def parse_hrule(self, m):
self.tokens.append({'type': 'hrule'})
def parse_list_block(self, m):
bull = m.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in bull,
})
cap = m.group(0)
self._process_list_item(cap, bull)
self.tokens.append({'type': 'list_end'})
def _process_list_item(self, cap, bull):
cap = self.rules.list_item.findall(cap)
_next = False
length = len(cap)
for i in range(length):
item = cap[i][0]
# remove the bullet
space = len(item)
item = self.rules.list_bullet.sub('', item)
# outdent
if '\n ' in item:
space = space - len(item)
pattern = re.compile(r'^ {1,%d}' % space, flags=re.M)
item = pattern.sub('', item)
# determine whether item is loose or not
loose = _next
if not loose and re.search(r'\n\n(?!\s*$)', item):
loose = True
rest = len(item)
if i != length - 1 and rest:
_next = item[rest-1] == '\n'
if not loose:
loose = _next
if loose:
t = 'loose_item_start'
else:
t = 'list_item_start'
self.tokens.append({'type': t})
# recurse
self.parse(item, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def parse_block_quote(self, m):
self.tokens.append({'type': 'block_quote_start'})
# clean leading >
cap = _block_quote_leading_pattern.sub('', m.group(0))
self.parse(cap)
self.tokens.append({'type': 'block_quote_end'})
def parse_def_links(self, m):
key = _keyify(m.group(1))
self.def_links[key] = {
'link': m.group(2),
'title': m.group(3),
}
def parse_def_footnotes(self, m):
key = _keyify(m.group(1))
if key in self.def_footnotes:
# footnote is already defined
return
self.def_footnotes[key] = 0
self.tokens.append({
'type': 'footnote_start',
'key': key,
})
text = m.group(2)
if '\n' in text:
lines = text.split('\n')
whitespace = None
for line in lines[1:]:
space = len(line) - len(line.lstrip())
if space and (not whitespace or space < whitespace):
whitespace = space
newlines = [lines[0]]
for line in lines[1:]:
newlines.append(line[whitespace:])
text = '\n'.join(newlines)
self.parse(text, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': key,
})
def parse_table(self, m):
item = self._process_table(m)
cells = re.sub(r'(?: *\| *)?\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
v = re.sub(r'^ *\| *| *\| *$', '', v)
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def parse_nptable(self, m):
item = self._process_table(m)
cells = re.sub(r'\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def _process_table(self, m):
header = re.sub(r'^ *| *\| *$', '', m.group(1))
header = re.split(r' *\| *', header)
align = re.sub(r' *|\| *$', '', m.group(2))
align = re.split(r' *\| *', align)
for i, v in enumerate(align):
if re.search(r'^ *-+: *$', v):
align[i] = 'right'
elif re.search(r'^ *:-+: *$', v):
align[i] = 'center'
elif re.search(r'^ *:-+ *$', v):
align[i] = 'left'
else:
align[i] = None
item = {
'type': 'table',
'header': header,
'align': align,
}
return item
def parse_block_html(self, m):
tag = m.group(1)
if not tag:
text = m.group(0)
self.tokens.append({
'type': 'close_html',
'text': text
})
else:
attr = m.group(2)
text = m.group(3)
self.tokens.append({
'type': 'open_html',
'tag': tag,
'extra': attr,
'text': text
})
def parse_paragraph(self, m):
text = m.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': text})
def parse_text(self, m):
text = m.group(0)
self.tokens.append({'type': 'text', 'text': text})
class InlineGrammar(object):
"""Grammars for inline level tokens."""
escape = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
inline_html = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (_valid_end, _valid_attr),
r'<\w+%s(?:%s)*?\s*\/?>' % (_valid_end, _valid_attr),
)
)
autolink = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
link = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
reflink = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
nolink = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
url = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
double_emphasis = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
emphasis = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
code = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `code`
linebreak = re.compile(r'^ {2,}\n(?!\s*$)')
strikethrough = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
footnote = re.compile(r'^\[\^([^\]]+)\]')
text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def hard_wrap(self):
"""Grammar for hard wrap linebreak. You don't need to add two
spaces at the end of a line.
"""
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class InlineLexer(object):
"""Inline level lexer for inline grammars."""
grammar_class = InlineGrammar
default_rules = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
inline_html_rules = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, renderer, rules=None, **kwargs):
self.renderer = renderer
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not rules:
rules = self.grammar_class()
kwargs.update(self.renderer.options)
if kwargs.get('hard_wrap'):
rules.hard_wrap()
self.rules = rules
self._in_link = False
self._in_footnote = False
self._parse_inline_html = kwargs.get('parse_inline_html')
def __call__(self, text, rules=None):
return self.output(text, rules)
def setup(self, links, footnotes):
self.footnote_index = 0
self.links = links or {}
self.footnotes = footnotes or {}
def output(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = list(self.default_rules)
if self._in_footnote and 'footnote' in rules:
rules.remove('footnote')
output = self.renderer.placeholder()
def manipulate(text):
for key in rules:
pattern = getattr(self.rules, key)
m = pattern.match(text)
if not m:
continue
self.line_match = m
out = getattr(self, 'output_%s' % key)(m)
if out is not None:
return m, out
return False # pragma: no cover
while text:
ret = manipulate(text)
if ret is not False:
m, out = ret
output += out
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return output
def output_escape(self, m):
text = m.group(1)
return self.renderer.escape(text)
def output_autolink(self, m):
link = m.group(1)
if m.group(2) == '@':
is_email = True
else:
is_email = False
return self.renderer.autolink(link, is_email)
def output_url(self, m):
link = m.group(1)
if self._in_link:
return self.renderer.text(link)
return self.renderer.autolink(link, False)
def output_inline_html(self, m):
tag = m.group(1)
if self._parse_inline_html and tag in _inline_tags:
text = m.group(3)
if tag == 'a':
self._in_link = True
text = self.output(text, rules=self.inline_html_rules)
self._in_link = False
else:
text = self.output(text, rules=self.inline_html_rules)
extra = m.group(2) or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
else:
html = m.group(0)
return self.renderer.inline_html(html)
def output_footnote(self, m):
key = _keyify(m.group(1))
if key not in self.footnotes:
return None
if self.footnotes[key]:
return None
self.footnote_index += 1
self.footnotes[key] = self.footnote_index
return self.renderer.footnote_ref(key, self.footnote_index)
def output_link(self, m):
return self._process_link(m, m.group(3), m.group(4))
def output_reflink(self, m):
key = _keyify(m.group(2) or m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def output_nolink(self, m):
key = _keyify(m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def _process_link(self, m, link, title=None):
line = m.group(0)
text = m.group(1)
if line[0] == '!':
return self.renderer.image(link, title, text)
self._in_link = True
text = self.output(text)
self._in_link = False
return self.renderer.link(link, title, text)
def output_double_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.double_emphasis(text)
def output_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.emphasis(text)
def output_code(self, m):
text = m.group(2)
return self.renderer.codespan(text)
def output_linebreak(self, m):
return self.renderer.linebreak()
def output_strikethrough(self, m):
text = self.output(m.group(1))
return self.renderer.strikethrough(text)
def output_text(self, m):
text = m.group(0)
return self.renderer.text(text)
class Renderer(object):
"""The default HTML renderer for rendering Markdown.
"""
def __init__(self, **kwargs):
self.options = kwargs
def placeholder(self):
"""Returns the default, empty output value for the renderer.
All renderer methods use the '+=' operator to append to this value.
Default is a string so rendering HTML can build up a result string with
the rendered Markdown.
Can be overridden by Renderer subclasses to be types like an empty
list, allowing the renderer to create a tree-like structure to
represent the document (which can then be reprocessed later into a
separate format like docx or pdf).
"""
return ''
def block_code(self, code, lang=None):
"""Rendering block level code. ``pre > code``.
:param code: text content of the code block.
:param lang: language of the given code.
"""
code = code.rstrip('\n')
if not lang:
code = escape(code, smart_amp=False)
return '<pre><code>%s\n</code></pre>\n' % code
code = escape(code, quote=True, smart_amp=False)
return '<pre><code class="lang-%s">%s\n</code></pre>\n' % (lang, code)
def block_quote(self, text):
"""Rendering <blockquote> with the given text.
:param text: text content of the blockquote.
"""
return '<blockquote>%s\n</blockquote>\n' % text.rstrip('\n')
def block_html(self, html):
"""Rendering block level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('skip_style') and \
html.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return escape(html)
return html
def header(self, text, level, raw=None):
"""Rendering header/heading tags like ``<h1>`` ``<h2>``.
:param text: rendered text content for the header.
:param level: a number for the header level, for example: 1.
:param raw: raw text content of the header.
"""
return '<h%d>%s</h%d>\n' % (level, text, level)
def hrule(self):
"""Rendering method for ``<hr>`` tag."""
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def list(self, body, ordered=True):
"""Rendering list tags like ``<ul>`` and ``<ol>``.
:param body: body contents of the list.
:param ordered: whether this list is ordered or not.
"""
tag = 'ul'
if ordered:
tag = 'ol'
return '<%s>\n%s</%s>\n' % (tag, body, tag)
def list_item(self, text):
"""Rendering list item snippet. Like ``<li>``."""
return '<li>%s</li>\n' % text
def paragraph(self, text):
"""Rendering paragraph tags. Like ``<p>``."""
return '<p>%s</p>\n' % text.strip(' ')
def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
return (
'<table>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</table>\n'
) % (header, body)
def table_row(self, content):
"""Rendering a table row. Like ``<tr>``.
:param content: content of current table row.
"""
return '<tr>\n%s</tr>\n' % content
def table_cell(self, content, **flags):
"""Rendering a table cell. Like ``<th>`` ``<td>``.
:param content: content of current table cell.
:param header: whether this is header or not.
:param align: align of current table cell.
"""
if flags['header']:
tag = 'th'
else:
tag = 'td'
align = flags['align']
if not align:
return '<%s>%s</%s>\n' % (tag, content, tag)
return '<%s style="text-align:%s">%s</%s>\n' % (
tag, align, content, tag
)
def double_emphasis(self, text):
"""Rendering **strong** text.
:param text: text content for emphasis.
"""
return '<strong>%s</strong>' % text
def emphasis(self, text):
"""Rendering *emphasis* text.
:param text: text content for emphasis.
"""
return '<em>%s</em>' % text
def codespan(self, text):
"""Rendering inline `code` text.
:param text: text content for inline code.
"""
text = escape(text.rstrip(), smart_amp=False)
return '<code>%s</code>' % text
def linebreak(self):
"""Rendering line break like ``<br>``."""
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def strikethrough(self, text):
"""Rendering ~~strikethrough~~ text.
:param text: text content for strikethrough.
"""
return '<del>%s</del>' % text
def text(self, text):
"""Rendering unformatted text.
:param text: text content.
"""
if self.options.get('parse_block_html'):
return text
return escape(text)
def escape(self, text):
"""Rendering escape sequence.
:param text: text content.
"""
return escape(text)
def autolink(self, link, is_email=False):
"""Rendering a given link or email address.
:param link: link content or email address.
:param is_email: whether this is an email or not.
"""
text = link = escape_link(link)
if is_email:
link = 'mailto:%s' % link
return '<a href="%s">%s</a>' % (link, text)
def link(self, link, title, text):
"""Rendering a given link with content and title.
:param link: href link for ``<a>`` tag.
:param title: title content for `title` attribute.
:param text: text content for description.
"""
link = escape_link(link)
if not title:
return '<a href="%s">%s</a>' % (link, text)
title = escape(title, quote=True)
return '<a href="%s" title="%s">%s</a>' % (link, title, text)
def image(self, src, title, text):
"""Rendering a image with title and text.
:param src: source link of the image.
:param title: title text of the image.
:param text: alt text of the image.
"""
src = escape_link(src)
text = escape(text, quote=True)
if title:
title = escape(title, quote=True)
html = '<img src="%s" alt="%s" title="%s"' % (src, text, title)
else:
html = '<img src="%s" alt="%s"' % (src, text)
if self.options.get('use_xhtml'):
return '%s />' % html
return '%s>' % html
def inline_html(self, html):
"""Rendering span level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('escape'):
return escape(html)
return html
def newline(self):
"""Rendering newline element."""
return ''
def footnote_ref(self, key, index):
"""Rendering the ref anchor of a footnote.
:param key: identity key for the footnote.
:param index: the index count of current footnote.
"""
html = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (escape(key), escape(key), index)
return html
def footnote_item(self, key, text):
"""Rendering a footnote item.
:param key: identity key for the footnote.
:param text: text content of the footnote.
"""
back = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % escape(key)
text = text.rstrip()
if text.endswith('</p>'):
text = re.sub(r'<\/p>$', r'%s</p>' % back, text)
else:
text = '%s<p>%s</p>' % (text, back)
html = '<li id="fn-%s">%s</li>\n' % (escape(key), text)
return html
def footnotes(self, text):
"""Wrapper for all footnotes.
:param text: contents of all footnotes.
"""
html = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return html % (self.hrule(), text)
class Markdown(object):
"""The Markdown parser.
:param renderer: An instance of ``Renderer``.
:param inline: An inline lexer class or instance.
:param block: A block lexer class or instance.
"""
def __init__(self, renderer=None, inline=None, block=None, **kwargs):
if not renderer:
renderer = Renderer(**kwargs)
else:
kwargs.update(renderer.options)
self.renderer = renderer
if inline and inspect.isclass(inline):
inline = inline(renderer, **kwargs)
if block and inspect.isclass(block):
block = block(**kwargs)
if inline:
self.inline = inline
else:
self.inline = InlineLexer(renderer, **kwargs)
self.block = block or BlockLexer(BlockGrammar())
self.footnotes = []
self.tokens = []
# detect if it should parse text in block html
self._parse_block_html = kwargs.get('parse_block_html')
def __call__(self, text):
return self.parse(text)
def render(self, text):
"""Render the Markdown text.
:param text: markdown formatted text content.
"""
return self.parse(text)
def parse(self, text):
out = self.output(preprocessing(text))
keys = self.block.def_footnotes
# reset block
self.block.def_links = {}
self.block.def_footnotes = {}
# reset inline
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return out
footnotes = filter(lambda o: keys.get(o['key']), self.footnotes)
self.footnotes = sorted(
footnotes, key=lambda o: keys.get(o['key']), reverse=True
)
body = self.renderer.placeholder()
while self.footnotes:
note = self.footnotes.pop()
body += self.renderer.footnote_item(
note['key'], note['text']
)
out += self.renderer.footnotes(body)
return out
def pop(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def peek(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def output(self, text, rules=None):
self.tokens = self.block(text, rules)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
out = self.renderer.placeholder()
while self.pop():
out += self.tok()
return out
def tok(self):
t = self.token['type']
# sepcial cases
if t.endswith('_start'):
t = t[:-6]
return getattr(self, 'output_%s' % t)()
def tok_text(self):
text = self.token['text']
while self.peek()['type'] == 'text':
text += '\n' + self.pop()['text']
return self.inline(text)
def output_newline(self):
return self.renderer.newline()
def output_hrule(self):
return self.renderer.hrule()
def output_heading(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def output_code(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def output_table(self):
aligns = self.token['align']
aligns_length = len(aligns)
cell = self.renderer.placeholder()
# header part
header = self.renderer.placeholder()
for i, value in enumerate(self.token['header']):
align = aligns[i] if i < aligns_length else None
flags = {'header': True, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
header += self.renderer.table_row(cell)
# body part
body = self.renderer.placeholder()
for i, row in enumerate(self.token['cells']):
cell = self.renderer.placeholder()
for j, value in enumerate(row):
align = aligns[j] if j < aligns_length else None
flags = {'header': False, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
body += self.renderer.table_row(cell)
return self.renderer.table(header, body)
def output_block_quote(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
body += self.tok()
return self.renderer.block_quote(body)
def output_list(self):
ordered = self.token['ordered']
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
body += self.tok()
return self.renderer.list(body, ordered)
def output_list_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
body += self.tok_text()
else:
body += self.tok()
return self.renderer.list_item(body)
def output_loose_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
body += self.tok()
return self.renderer.list_item(body)
def output_footnote(self):
self.inline._in_footnote = True
body = self.renderer.placeholder()
key = self.token['key']
while self.pop()['type'] != 'footnote_end':
body += self.tok()
self.footnotes.append({'key': key, 'text': body})
self.inline._in_footnote = False
return self.renderer.placeholder()
def output_close_html(self):
text = self.token['text']
return self.renderer.block_html(text)
def output_open_html(self):
text = self.token['text']
tag = self.token['tag']
if self._parse_block_html and tag not in _pre_tags:
text = self.inline(text, rules=self.inline.inline_html_rules)
extra = self.token.get('extra') or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
return self.renderer.block_html(html)
def output_paragraph(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def output_text(self):
return self.renderer.paragraph(self.tok_text())
def markdown(text, escape=True, **kwargs):
"""Render markdown formatted text to html.
:param text: markdown formatted text content.
:param escape: if set to False, all html tags will not be escaped.
:param use_xhtml: output with xhtml tags.
:param hard_wrap: if set to True, it will use the GFM line breaks feature.
:param parse_block_html: parse text only in block level html.
:param parse_inline_html: parse text only in inline level html.
"""
return Markdown(escape=escape, **kwargs)(text)
| # coding: utf-8
"""
mistune
~~~~~~~
The fastest markdown parser in pure Python with renderer feature.
:copyright: (c) 2014 - 2017 by Hsiaoming Yang.
"""
import re
import inspect
__version__ = '0.8.1'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
_key_pattern = re.compile(r'\s+')
_nonalpha_pattern = re.compile(r'\W')
_escape_pattern = re.compile(r'&(?!#?\w+;)')
_newline_pattern = re.compile(r'\r\n|\r')
_block_quote_leading_pattern = re.compile(r'^ *> ?', flags=re.M)
_block_code_leading_pattern = re.compile(r'^ {4}', re.M)
_inline_tags = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
_pre_tags = ['pre', 'script', 'style']
_valid_end = r'(?!:/|[^\w\s@]*@)\b'
_valid_attr = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
_block_tag = r'(?!(?:%s)\b)\w+%s' % ('|'.join(_inline_tags), _valid_end)
_scheme_blacklist = ('javascript:', 'vbscript:')
def _pure_pattern(regex):
pattern = regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
return pattern
def _keyify(key):
key = escape(key.lower(), quote=True)
return _key_pattern.sub(' ', key)
def escape(text, quote=False, smart_amp=True):
"""Replace special characters "&", "<" and ">" to HTML-safe sequences.
The original cgi.escape will always escape "&", but you can control
this one for a smart escape amp.
:param quote: if set to True, " and ' will be escaped.
:param smart_amp: if set to False, & will always be escaped.
"""
if smart_amp:
text = _escape_pattern.sub('&', text)
else:
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if quote:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
def escape_link(url):
"""Remove dangerous URL schemes like javascript: and escape afterwards."""
lower_url = url.lower().strip('\x00\x1a \n\r\t')
for scheme in _scheme_blacklist:
if re.sub(r'[^A-Za-z0-9\/:]+', '', lower_url).startswith(scheme):
return ''
return escape(url, quote=True, smart_amp=False)
def preprocessing(text, tab=4):
text = _newline_pattern.sub('\n', text)
text = text.expandtabs(tab)
text = text.replace('\u2424', '\n')
pattern = re.compile(r'^ +$', re.M)
return pattern.sub('', text)
class BlockGrammar(object):
"""Grammars for block level tokens."""
def_links = re.compile(
r'^ *\[([^^\]]+)\]: *' # [key]:
r'<?([^\s>]+)>?' # <link> or link
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
def_footnotes = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^key]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
newline = re.compile(r'^\n+')
block_code = re.compile(r'^( {4}[^\n]+\n*)+')
fences = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```lang
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
hrule = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
heading = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
lheading = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
block_quote = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
list_block = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # hrule
r'|\n+(?=%s)' # def links
r'|\n+(?=%s)' # def footnotes
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
)
)
list_item = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
flags=re.M
)
list_bullet = re.compile(r'^ *(?:[*+-]|\d+\.) +')
paragraph = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
_pure_pattern(fences).replace(r'\1', r'\2'),
_pure_pattern(list_block).replace(r'\1', r'\3'),
_pure_pattern(hrule),
_pure_pattern(heading),
_pure_pattern(lheading),
_pure_pattern(block_quote),
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
'<' + _block_tag,
)
)
block_html = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (_block_tag, _valid_attr),
r'<%s(?:%s)*?\s*\/?>' % (_block_tag, _valid_attr),
)
)
table = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
nptable = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
text = re.compile(r'^[^\n]+')
class BlockLexer(object):
"""Block level lexer for block grammars."""
grammar_class = BlockGrammar
default_rules = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
list_rules = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
footnote_rules = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, rules=None, **kwargs):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not rules:
rules = self.grammar_class()
self.rules = rules
def __call__(self, text, rules=None):
return self.parse(text, rules)
def parse(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = self.default_rules
def manipulate(text):
for key in rules:
rule = getattr(self.rules, key)
m = rule.match(text)
if not m:
continue
getattr(self, 'parse_%s' % key)(m)
return m
return False # pragma: no cover
while text:
m = manipulate(text)
if m is not False:
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return self.tokens
def parse_newline(self, m):
length = len(m.group(0))
if length > 1:
self.tokens.append({'type': 'newline'})
def parse_block_code(self, m):
# clean leading whitespace
code = _block_code_leading_pattern.sub('', m.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': code,
})
def parse_fences(self, m):
self.tokens.append({
'type': 'code',
'lang': m.group(2),
'text': m.group(3),
})
def parse_heading(self, m):
self.tokens.append({
'type': 'heading',
'level': len(m.group(1)),
'text': m.group(2),
})
def parse_lheading(self, m):
"""Parse setext heading."""
self.tokens.append({
'type': 'heading',
'level': 1 if m.group(2) == '=' else 2,
'text': m.group(1),
})
def parse_hrule(self, m):
self.tokens.append({'type': 'hrule'})
def parse_list_block(self, m):
bull = m.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in bull,
})
cap = m.group(0)
self._process_list_item(cap, bull)
self.tokens.append({'type': 'list_end'})
def _process_list_item(self, cap, bull):
cap = self.rules.list_item.findall(cap)
_next = False
length = len(cap)
for i in range(length):
item = cap[i][0]
# remove the bullet
space = len(item)
item = self.rules.list_bullet.sub('', item)
# outdent
if '\n ' in item:
space = space - len(item)
pattern = re.compile(r'^ {1,%d}' % space, flags=re.M)
item = pattern.sub('', item)
# determine whether item is loose or not
loose = _next
if not loose and re.search(r'\n\n(?!\s*$)', item):
loose = True
rest = len(item)
if i != length - 1 and rest:
_next = item[rest-1] == '\n'
if not loose:
loose = _next
if loose:
t = 'loose_item_start'
else:
t = 'list_item_start'
self.tokens.append({'type': t})
# recurse
self.parse(item, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def parse_block_quote(self, m):
self.tokens.append({'type': 'block_quote_start'})
# clean leading >
cap = _block_quote_leading_pattern.sub('', m.group(0))
self.parse(cap)
self.tokens.append({'type': 'block_quote_end'})
def parse_def_links(self, m):
key = _keyify(m.group(1))
self.def_links[key] = {
'link': m.group(2),
'title': m.group(3),
}
def parse_def_footnotes(self, m):
key = _keyify(m.group(1))
if key in self.def_footnotes:
# footnote is already defined
return
self.def_footnotes[key] = 0
self.tokens.append({
'type': 'footnote_start',
'key': key,
})
text = m.group(2)
if '\n' in text:
lines = text.split('\n')
whitespace = None
for line in lines[1:]:
space = len(line) - len(line.lstrip())
if space and (not whitespace or space < whitespace):
whitespace = space
newlines = [lines[0]]
for line in lines[1:]:
newlines.append(line[whitespace:])
text = '\n'.join(newlines)
self.parse(text, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': key,
})
def parse_table(self, m):
item = self._process_table(m)
cells = re.sub(r'(?: *\| *)?\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
v = re.sub(r'^ *\| *| *\| *$', '', v)
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def parse_nptable(self, m):
item = self._process_table(m)
cells = re.sub(r'\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def _process_table(self, m):
header = re.sub(r'^ *| *\| *$', '', m.group(1))
header = re.split(r' *\| *', header)
align = re.sub(r' *|\| *$', '', m.group(2))
align = re.split(r' *\| *', align)
for i, v in enumerate(align):
if re.search(r'^ *-+: *$', v):
align[i] = 'right'
elif re.search(r'^ *:-+: *$', v):
align[i] = 'center'
elif re.search(r'^ *:-+ *$', v):
align[i] = 'left'
else:
align[i] = None
item = {
'type': 'table',
'header': header,
'align': align,
}
return item
def parse_block_html(self, m):
tag = m.group(1)
if not tag:
text = m.group(0)
self.tokens.append({
'type': 'close_html',
'text': text
})
else:
attr = m.group(2)
text = m.group(3)
self.tokens.append({
'type': 'open_html',
'tag': tag,
'extra': attr,
'text': text
})
def parse_paragraph(self, m):
text = m.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': text})
def parse_text(self, m):
text = m.group(0)
self.tokens.append({'type': 'text', 'text': text})
class InlineGrammar(object):
"""Grammars for inline level tokens."""
escape = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
inline_html = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (
_valid_end, _valid_attr),
r'<\w+%s(?:%s)*?\s*\/?>' % (_valid_end, _valid_attr),
)
)
autolink = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
link = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
reflink = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
nolink = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
url = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
double_emphasis = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
emphasis = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
code = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `code`
linebreak = re.compile(r'^ {2,}\n(?!\s*$)')
strikethrough = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
footnote = re.compile(r'^\[\^([^\]]+)\]')
text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def hard_wrap(self):
"""Grammar for hard wrap linebreak. You don't need to add two
spaces at the end of a line.
"""
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class InlineLexer(object):
"""Inline level lexer for inline grammars."""
grammar_class = InlineGrammar
default_rules = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
inline_html_rules = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, renderer, rules=None, **kwargs):
self.renderer = renderer
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not rules:
rules = self.grammar_class()
kwargs.update(self.renderer.options)
if kwargs.get('hard_wrap'):
rules.hard_wrap()
self.rules = rules
self._in_link = False
self._in_footnote = False
self._parse_inline_html = kwargs.get('parse_inline_html')
def __call__(self, text, rules=None):
return self.output(text, rules)
def setup(self, links, footnotes):
self.footnote_index = 0
self.links = links or {}
self.footnotes = footnotes or {}
def output(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = list(self.default_rules)
if self._in_footnote and 'footnote' in rules:
rules.remove('footnote')
output = self.renderer.placeholder()
def manipulate(text):
for key in rules:
pattern = getattr(self.rules, key)
m = pattern.match(text)
if not m:
continue
self.line_match = m
out = getattr(self, 'output_%s' % key)(m)
if out is not None:
return m, out
return False # pragma: no cover
while text:
ret = manipulate(text)
if ret is not False:
m, out = ret
output += out
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return output
def output_escape(self, m):
text = m.group(1)
return self.renderer.escape(text)
def output_autolink(self, m):
link = m.group(1)
if m.group(2) == '@':
is_email = True
else:
is_email = False
return self.renderer.autolink(link, is_email)
def output_url(self, m):
link = m.group(1)
if self._in_link:
return self.renderer.text(link)
return self.renderer.autolink(link, False)
def output_inline_html(self, m):
tag = m.group(1)
if self._parse_inline_html and tag in _inline_tags:
text = m.group(3)
if tag == 'a':
self._in_link = True
text = self.output(text, rules=self.inline_html_rules)
self._in_link = False
else:
text = self.output(text, rules=self.inline_html_rules)
extra = m.group(2) or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
else:
html = m.group(0)
return self.renderer.inline_html(html)
def output_footnote(self, m):
key = _keyify(m.group(1))
if key not in self.footnotes:
return None
if self.footnotes[key]:
return None
self.footnote_index += 1
self.footnotes[key] = self.footnote_index
return self.renderer.footnote_ref(key, self.footnote_index)
def output_link(self, m):
return self._process_link(m, m.group(3), m.group(4))
def output_reflink(self, m):
key = _keyify(m.group(2) or m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def output_nolink(self, m):
key = _keyify(m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def _process_link(self, m, link, title=None):
line = m.group(0)
text = m.group(1)
if line[0] == '!':
return self.renderer.image(link, title, text)
self._in_link = True
text = self.output(text)
self._in_link = False
return self.renderer.link(link, title, text)
def output_double_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.double_emphasis(text)
def output_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.emphasis(text)
def output_code(self, m):
text = m.group(2)
return self.renderer.codespan(text)
def output_linebreak(self, m):
return self.renderer.linebreak()
def output_strikethrough(self, m):
text = self.output(m.group(1))
return self.renderer.strikethrough(text)
def output_text(self, m):
text = m.group(0)
return self.renderer.text(text)
class Renderer(object):
"""The default HTML renderer for rendering Markdown.
"""
def __init__(self, **kwargs):
self.options = kwargs
def placeholder(self):
"""Returns the default, empty output value for the renderer.
All renderer methods use the '+=' operator to append to this value.
Default is a string so rendering HTML can build up a result string with
the rendered Markdown.
Can be overridden by Renderer subclasses to be types like an empty
list, allowing the renderer to create a tree-like structure to
represent the document (which can then be reprocessed later into a
separate format like docx or pdf).
"""
return ''
def block_code(self, code, lang=None):
"""Rendering block level code. ``pre > code``.
:param code: text content of the code block.
:param lang: language of the given code.
"""
code = code.rstrip('\n')
if not lang:
code = escape(code, smart_amp=False)
return '<pre><code>%s\n</code></pre>\n' % code
code = escape(code, quote=True, smart_amp=False)
return '<pre><code class="lang-%s">%s\n</code></pre>\n' % (lang, code)
def block_quote(self, text):
"""Rendering <blockquote> with the given text.
:param text: text content of the blockquote.
"""
return '<blockquote>%s\n</blockquote>\n' % text.rstrip('\n')
def block_html(self, html):
"""Rendering block level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('skip_style') and \
html.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return escape(html)
return html
def header(self, text, level, raw=None):
"""Rendering header/heading tags like ``<h1>`` ``<h2>``.
:param text: rendered text content for the header.
:param level: a number for the header level, for example: 1.
:param raw: raw text content of the header.
"""
return '<h%d>%s</h%d>\n' % (level, text, level)
def hrule(self):
"""Rendering method for ``<hr>`` tag."""
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def list(self, body, ordered=True):
"""Rendering list tags like ``<ul>`` and ``<ol>``.
:param body: body contents of the list.
:param ordered: whether this list is ordered or not.
"""
tag = 'ul'
if ordered:
tag = 'ol'
return '<%s>\n%s</%s>\n' % (tag, body, tag)
def list_item(self, text):
"""Rendering list item snippet. Like ``<li>``."""
return '<li>%s</li>\n' % text
def paragraph(self, text):
"""Rendering paragraph tags. Like ``<p>``."""
return '<p>%s</p>\n' % text.strip(' ')
def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
return (
'<table>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</table>\n'
) % (header, body)
def table_row(self, content):
"""Rendering a table row. Like ``<tr>``.
:param content: content of current table row.
"""
return '<tr>\n%s</tr>\n' % content
def table_cell(self, content, **flags):
"""Rendering a table cell. Like ``<th>`` ``<td>``.
:param content: content of current table cell.
:param header: whether this is header or not.
:param align: align of current table cell.
"""
if flags['header']:
tag = 'th'
else:
tag = 'td'
align = flags['align']
if not align:
return '<%s>%s</%s>\n' % (tag, content, tag)
return '<%s style="text-align:%s">%s</%s>\n' % (
tag, align, content, tag
)
def double_emphasis(self, text):
"""Rendering **strong** text.
:param text: text content for emphasis.
"""
return '<strong>%s</strong>' % text
def emphasis(self, text):
"""Rendering *emphasis* text.
:param text: text content for emphasis.
"""
return '<em>%s</em>' % text
def codespan(self, text):
"""Rendering inline `code` text.
:param text: text content for inline code.
"""
text = escape(text.rstrip(), smart_amp=False)
return '<code>%s</code>' % text
def linebreak(self):
"""Rendering line break like ``<br>``."""
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def strikethrough(self, text):
"""Rendering ~~strikethrough~~ text.
:param text: text content for strikethrough.
"""
return '<del>%s</del>' % text
def text(self, text):
"""Rendering unformatted text.
:param text: text content.
"""
if self.options.get('parse_block_html'):
return text
return escape(text)
def escape(self, text):
"""Rendering escape sequence.
:param text: text content.
"""
return escape(text)
def autolink(self, link, is_email=False):
"""Rendering a given link or email address.
:param link: link content or email address.
:param is_email: whether this is an email or not.
"""
text = link = escape_link(link)
if is_email:
link = 'mailto:%s' % link
return '<a href="%s">%s</a>' % (link, text)
def link(self, link, title, text):
"""Rendering a given link with content and title.
:param link: href link for ``<a>`` tag.
:param title: title content for `title` attribute.
:param text: text content for description.
"""
link = escape_link(link)
if not title:
return '<a href="%s">%s</a>' % (link, text)
title = escape(title, quote=True)
return '<a href="%s" title="%s">%s</a>' % (link, title, text)
def image(self, src, title, text):
"""Rendering a image with title and text.
:param src: source link of the image.
:param title: title text of the image.
:param text: alt text of the image.
"""
src = escape_link(src)
text = escape(text, quote=True)
if title:
title = escape(title, quote=True)
html = '<img src="%s" alt="%s" title="%s"' % (src, text, title)
else:
html = '<img src="%s" alt="%s"' % (src, text)
if self.options.get('use_xhtml'):
return '%s />' % html
return '%s>' % html
def inline_html(self, html):
"""Rendering span level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('escape'):
return escape(html)
return html
def newline(self):
"""Rendering newline element."""
return ''
def footnote_ref(self, key, index):
"""Rendering the ref anchor of a footnote.
:param key: identity key for the footnote.
:param index: the index count of current footnote.
"""
html = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (escape(key), escape(key), index)
return html
def footnote_item(self, key, text):
"""Rendering a footnote item.
:param key: identity key for the footnote.
:param text: text content of the footnote.
"""
back = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % escape(key)
text = text.rstrip()
if text.endswith('</p>'):
text = re.sub(r'<\/p>$', r'%s</p>' % back, text)
else:
text = '%s<p>%s</p>' % (text, back)
html = '<li id="fn-%s">%s</li>\n' % (escape(key), text)
return html
def footnotes(self, text):
"""Wrapper for all footnotes.
:param text: contents of all footnotes.
"""
html = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return html % (self.hrule(), text)
class Markdown(object):
"""The Markdown parser.
:param renderer: An instance of ``Renderer``.
:param inline: An inline lexer class or instance.
:param block: A block lexer class or instance.
"""
def __init__(self, renderer=None, inline=None, block=None, **kwargs):
if not renderer:
renderer = Renderer(**kwargs)
else:
kwargs.update(renderer.options)
self.renderer = renderer
if inline and inspect.isclass(inline):
inline = inline(renderer, **kwargs)
if block and inspect.isclass(block):
block = block(**kwargs)
if inline:
self.inline = inline
else:
self.inline = InlineLexer(renderer, **kwargs)
self.block = block or BlockLexer(BlockGrammar())
self.footnotes = []
self.tokens = []
# detect if it should parse text in block html
self._parse_block_html = kwargs.get('parse_block_html')
def __call__(self, text):
return self.parse(text)
def render(self, text):
"""Render the Markdown text.
:param text: markdown formatted text content.
"""
return self.parse(text)
def parse(self, text):
out = self.output(preprocessing(text))
keys = self.block.def_footnotes
# reset block
self.block.def_links = {}
self.block.def_footnotes = {}
# reset inline
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return out
footnotes = filter(lambda o: keys.get(o['key']), self.footnotes)
self.footnotes = sorted(
footnotes, key=lambda o: keys.get(o['key']), reverse=True
)
body = self.renderer.placeholder()
while self.footnotes:
note = self.footnotes.pop()
body += self.renderer.footnote_item(
note['key'], note['text']
)
out += self.renderer.footnotes(body)
return out
def pop(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def peek(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def output(self, text, rules=None):
self.tokens = self.block(text, rules)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
out = self.renderer.placeholder()
while self.pop():
out += self.tok()
return out
def tok(self):
t = self.token['type']
# sepcial cases
if t.endswith('_start'):
t = t[:-6]
return getattr(self, 'output_%s' % t)()
def tok_text(self):
text = self.token['text']
while self.peek()['type'] == 'text':
text += '\n' + self.pop()['text']
return self.inline(text)
def output_newline(self):
return self.renderer.newline()
def output_hrule(self):
return self.renderer.hrule()
def output_heading(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def output_code(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def output_table(self):
aligns = self.token['align']
aligns_length = len(aligns)
cell = self.renderer.placeholder()
# header part
header = self.renderer.placeholder()
for i, value in enumerate(self.token['header']):
align = aligns[i] if i < aligns_length else None
flags = {'header': True, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
header += self.renderer.table_row(cell)
# body part
body = self.renderer.placeholder()
for i, row in enumerate(self.token['cells']):
cell = self.renderer.placeholder()
for j, value in enumerate(row):
align = aligns[j] if j < aligns_length else None
flags = {'header': False, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
body += self.renderer.table_row(cell)
return self.renderer.table(header, body)
def output_block_quote(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
body += self.tok()
return self.renderer.block_quote(body)
def output_list(self):
ordered = self.token['ordered']
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
body += self.tok()
return self.renderer.list(body, ordered)
def output_list_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
body += self.tok_text()
else:
body += self.tok()
return self.renderer.list_item(body)
def output_loose_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
body += self.tok()
return self.renderer.list_item(body)
def output_footnote(self):
self.inline._in_footnote = True
body = self.renderer.placeholder()
key = self.token['key']
while self.pop()['type'] != 'footnote_end':
body += self.tok()
self.footnotes.append({'key': key, 'text': body})
self.inline._in_footnote = False
return self.renderer.placeholder()
def output_close_html(self):
text = self.token['text']
return self.renderer.block_html(text)
def output_open_html(self):
text = self.token['text']
tag = self.token['tag']
if self._parse_block_html and tag not in _pre_tags:
text = self.inline(text, rules=self.inline.inline_html_rules)
extra = self.token.get('extra') or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
return self.renderer.block_html(html)
def output_paragraph(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def output_text(self):
return self.renderer.paragraph(self.tok_text())
def markdown(text, escape=True, **kwargs):
"""Render markdown formatted text to html.
:param text: markdown formatted text content.
:param escape: if set to False, all html tags will not be escaped.
:param use_xhtml: output with xhtml tags.
:param hard_wrap: if set to True, it will use the GFM line breaks feature.
:param parse_block_html: parse text only in block level html.
:param parse_inline_html: parse text only in inline level html.
"""
return Markdown(escape=escape, **kwargs)(text)
| xss | {
"code": [
"__version__ = '0.8'",
" return _key_pattern.sub(' ', key.lower())",
" r'<(\\w+%s)((?:%s)*?)\\s*>([\\s\\S]*?)<\\/\\1>' % (_valid_end, _valid_attr),"
],
"line_no": [
14,
51,
448
]
} | {
"code": [
"__version__ = '0.8.1'",
" key = escape(key.lower(), quote=True)",
" r'<(\\w+%s)((?:%s)*?)\\s*>([\\s\\S]*?)<\\/\\1>' % (",
" _valid_end, _valid_attr),"
],
"line_no": [
14,
51,
449,
450
]
} |
import re
import .inspect
__version__ = '0.8'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
VAR_0 = re.compile(r'\s+')
VAR_1 = re.compile(r'\W')
VAR_2 = re.compile(r'&(?!#?\w+;)')
VAR_3 = re.compile(r'\r\n|\r')
VAR_4 = re.compile(r'^ *> ?', VAR_71=re.M)
VAR_5 = re.compile(r'^ {4}', re.M)
VAR_6 = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
VAR_7 = ['pre', 'script', 'style']
VAR_8 = r'(?!:/|[^\w\s@]*@)\b'
VAR_9 = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
VAR_10 = r'(?!(?:%s)\b)\w+%s' % ('|'.join(VAR_6), VAR_8)
VAR_11 = ('javascript:', 'vbscript:')
def FUNC_0(VAR_12):
VAR_21 = VAR_12.pattern
if VAR_21.startswith('^'):
VAR_21 = pattern[1:]
return VAR_21
def FUNC_1(VAR_13):
return VAR_0.sub(' ', VAR_13.lower())
def VAR_19(VAR_14, VAR_15=False, VAR_16=True):
if VAR_16:
VAR_14 = VAR_2.sub('&', VAR_14)
else:
VAR_14 = VAR_14.replace('&', '&')
VAR_14 = VAR_14.replace('<', '<')
VAR_14 = VAR_14.replace('>', '>')
if VAR_15:
VAR_14 = VAR_14.replace('"', '"')
VAR_14 = VAR_14.replace("'", ''')
return VAR_14
def FUNC_3(VAR_17):
VAR_22 = VAR_17.lower().strip('\x00\x1a \n\r\t')
for scheme in VAR_11:
if re.sub(r'[^A-Za-z0-9\/:]+', '', VAR_22).startswith(scheme):
return ''
return VAR_19(VAR_17, VAR_15=True, VAR_16=False)
def FUNC_4(VAR_14, VAR_18=4):
VAR_14 = VAR_3.sub('\n', VAR_14)
VAR_14 = VAR_14.expandtabs(VAR_18)
VAR_14 = VAR_14.replace('\u2424', '\n')
VAR_21 = re.compile(r'^ +$', re.M)
return VAR_21.sub('', VAR_14)
class CLASS_0(object):
VAR_23 = re.compile(
r'^ *\[([^^\]]+)\]: *' # [VAR_13]:
r'<?([^\s>]+)>?' # <VAR_49> or VAR_49
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
VAR_24 = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^VAR_13]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
VAR_25 = re.compile(r'^\n+')
VAR_26 = re.compile(r'^( {4}[^\n]+\n*)+')
VAR_27 = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```VAR_63
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
VAR_28 = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
VAR_29 = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
VAR_30 = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
VAR_31 = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
VAR_32 = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # VAR_28
r'|\n+(?=%s)' # def VAR_60
r'|\n+(?=%s)' # def VAR_61
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
FUNC_0(VAR_23),
FUNC_0(VAR_24),
)
)
VAR_33 = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
VAR_71=re.M
)
VAR_34 = re.compile(r'^ *(?:[*+-]|\d+\.) +')
VAR_35 = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
FUNC_0(VAR_27).replace(r'\1', r'\2'),
FUNC_0(VAR_32).replace(r'\1', r'\3'),
FUNC_0(VAR_28),
FUNC_0(VAR_29),
FUNC_0(VAR_30),
FUNC_0(VAR_31),
FUNC_0(VAR_23),
FUNC_0(VAR_24),
'<' + VAR_10,
)
)
VAR_36 = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (VAR_10, VAR_9),
r'<%s(?:%s)*?\s*\/?>' % (VAR_10, VAR_9),
)
)
VAR_37 = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
VAR_38 = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
VAR_14 = re.compile(r'^[^\n]+')
class CLASS_1(object):
VAR_39 = CLASS_0
VAR_40 = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
VAR_41 = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
VAR_42 = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, VAR_43=None, **VAR_20):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not VAR_43:
rules = self.grammar_class()
self.rules = VAR_43
def __call__(self, VAR_14, VAR_43=None):
return self.parse(VAR_14, VAR_43)
def FUNC_6(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = self.default_rules
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_104 = getattr(self.rules, VAR_13)
VAR_44 = VAR_104.match(VAR_14)
if not VAR_44:
continue
getattr(self, 'parse_%s' % VAR_13)(VAR_44)
return VAR_44
return False # pragma: no cover
while VAR_14:
VAR_44 = FUNC_84(VAR_14)
if VAR_44 is not False:
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return self.tokens
def FUNC_7(self, VAR_44):
VAR_77 = len(VAR_44.group(0))
if VAR_77 > 1:
self.tokens.append({'type': 'newline'})
def FUNC_8(self, VAR_44):
VAR_54 = VAR_5.sub('', VAR_44.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': VAR_54,
})
def FUNC_9(self, VAR_44):
self.tokens.append({
'type': 'code',
'lang': VAR_44.group(2),
'text': VAR_44.group(3),
})
def FUNC_10(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': len(VAR_44.group(1)),
'text': VAR_44.group(2),
})
def FUNC_11(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': 1 if VAR_44.group(2) == '=' else 2,
'text': VAR_44.group(1),
})
def FUNC_12(self, VAR_44):
self.tokens.append({'type': 'hrule'})
def FUNC_13(self, VAR_44):
VAR_46 = VAR_44.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in VAR_46,
})
VAR_45 = VAR_44.group(0)
self._process_list_item(VAR_45, VAR_46)
self.tokens.append({'type': 'list_end'})
def FUNC_14(self, VAR_45, VAR_46):
VAR_45 = self.rules.list_item.findall(VAR_45)
VAR_78 = False
VAR_77 = len(VAR_45)
for VAR_101 in range(VAR_77):
VAR_79 = VAR_45[VAR_101][0]
VAR_94 = len(VAR_79)
VAR_79 = self.rules.list_bullet.sub('', VAR_79)
if '\n ' in VAR_79:
VAR_94 = VAR_94 - len(VAR_79)
VAR_21 = re.compile(r'^ {1,%d}' % VAR_94, VAR_71=re.M)
VAR_79 = VAR_21.sub('', VAR_79)
VAR_95 = VAR_78
if not VAR_95 and re.search(r'\n\n(?!\s*$)', VAR_79):
VAR_95 = True
VAR_96 = len(VAR_79)
if VAR_101 != VAR_77 - 1 and VAR_96:
VAR_78 = VAR_79[VAR_96-1] == '\n'
if not VAR_95:
VAR_95 = VAR_78
if VAR_95:
VAR_89 = 'loose_item_start'
else:
VAR_89 = 'list_item_start'
self.tokens.append({'type': VAR_89})
self.parse(VAR_79, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def FUNC_15(self, VAR_44):
self.tokens.append({'type': 'block_quote_start'})
VAR_45 = VAR_4.sub('', VAR_44.group(0))
self.parse(VAR_45)
self.tokens.append({'type': 'block_quote_end'})
def FUNC_16(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
self.def_links[VAR_13] = {
'link': VAR_44.group(2),
'title': VAR_44.group(3),
}
def FUNC_17(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 in self.def_footnotes:
return
self.def_footnotes[VAR_13] = 0
self.tokens.append({
'type': 'footnote_start',
'key': VAR_13,
})
VAR_14 = VAR_44.group(2)
if '\n' in VAR_14:
VAR_97 = VAR_14.split('\n')
VAR_98 = None
for VAR_85 in VAR_97[1:]:
VAR_94 = len(VAR_85) - len(VAR_85.lstrip())
if VAR_94 and (not VAR_98 or VAR_94 < VAR_98):
VAR_98 = VAR_94
VAR_99 = [VAR_97[0]]
for VAR_85 in VAR_97[1:]:
VAR_99.append(VAR_85[VAR_98:])
VAR_14 = '\n'.join(VAR_99)
self.parse(VAR_14, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': VAR_13,
})
def FUNC_18(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'(?: *\| *)?\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
VAR_100 = re.sub(r'^ *\| *| *\| *$', '', VAR_100)
VAR_80[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_19(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
cells[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_20(self, VAR_44):
VAR_69 = re.sub(r'^ *| *\| *$', '', VAR_44.group(1))
VAR_69 = re.split(r' *\| *', VAR_69)
VAR_81 = re.sub(r' *|\| *$', '', VAR_44.group(2))
VAR_81 = re.split(r' *\| *', VAR_81)
for VAR_101, VAR_100 in enumerate(VAR_81):
if re.search(r'^ *-+: *$', VAR_100):
VAR_81[VAR_101] = 'right'
elif re.search(r'^ *:-+: *$', VAR_100):
VAR_81[VAR_101] = 'center'
elif re.search(r'^ *:-+ *$', VAR_100):
VAR_81[VAR_101] = 'left'
else:
VAR_81[VAR_101] = None
VAR_79 = {
'type': 'table',
'header': VAR_69,
'align': VAR_81,
}
return VAR_79
def FUNC_21(self, VAR_44):
VAR_82 = VAR_44.group(1)
if not VAR_82:
VAR_14 = VAR_44.group(0)
self.tokens.append({
'type': 'close_html',
'text': VAR_14
})
else:
VAR_102 = VAR_44.group(2)
VAR_14 = VAR_44.group(3)
self.tokens.append({
'type': 'open_html',
'tag': VAR_82,
'extra': VAR_102,
'text': VAR_14
})
def FUNC_22(self, VAR_44):
VAR_14 = VAR_44.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': VAR_14})
def FUNC_23(self, VAR_44):
VAR_14 = VAR_44.group(0)
self.tokens.append({'type': 'text', 'text': VAR_14})
class CLASS_2(object):
VAR_19 = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
VAR_47 = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (VAR_8, VAR_9),
r'<\w+%s(?:%s)*?\s*\/?>' % (VAR_8, VAR_9),
)
)
VAR_48 = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
VAR_49 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
VAR_50 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
VAR_51 = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
VAR_17 = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
VAR_52 = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
VAR_53 = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
VAR_54 = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `VAR_54`
VAR_55 = re.compile(r'^ {2,}\n(?!\s*$)')
VAR_56 = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
VAR_57 = re.compile(r'^\[\^([^\]]+)\]')
VAR_14 = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def FUNC_24(self):
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class CLASS_3(object):
VAR_39 = CLASS_2
VAR_40 = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
VAR_58 = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, VAR_59, VAR_43=None, **VAR_20):
self.renderer = VAR_59
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not VAR_43:
rules = self.grammar_class()
VAR_20.update(self.renderer.options)
if VAR_20.get('hard_wrap'):
VAR_43.hard_wrap()
self.rules = VAR_43
self._in_link = False
self._in_footnote = False
self._parse_inline_html = VAR_20.get('parse_inline_html')
def __call__(self, VAR_14, VAR_43=None):
return self.output(VAR_14, VAR_43)
def FUNC_25(self, VAR_60, VAR_61):
self.footnote_index = 0
self.links = VAR_60 or {}
self.footnotes = VAR_61 or {}
def VAR_83(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = FUNC_48(self.default_rules)
if self._in_footnote and 'footnote' in VAR_43:
rules.remove('footnote')
VAR_83 = self.renderer.placeholder()
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_21 = getattr(self.rules, VAR_13)
VAR_44 = VAR_21.match(VAR_14)
if not VAR_44:
continue
self.line_match = VAR_44
VAR_87 = getattr(self, 'output_%s' % VAR_13)(VAR_44)
if VAR_87 is not None:
return VAR_44, VAR_87
return False # pragma: no cover
while VAR_14:
VAR_84 = FUNC_84(VAR_14)
if VAR_84 is not False:
VAR_44, VAR_87 = VAR_84
VAR_83 += VAR_87
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return VAR_83
def FUNC_27(self, VAR_44):
VAR_14 = VAR_44.group(1)
return self.renderer.escape(VAR_14)
def FUNC_28(self, VAR_44):
VAR_49 = VAR_44.group(1)
if VAR_44.group(2) == '@':
VAR_72 = True
else:
VAR_72 = False
return self.renderer.autolink(VAR_49, VAR_72)
def FUNC_29(self, VAR_44):
VAR_49 = VAR_44.group(1)
if self._in_link:
return self.renderer.text(VAR_49)
return self.renderer.autolink(VAR_49, False)
def FUNC_30(self, VAR_44):
VAR_82 = VAR_44.group(1)
if self._parse_inline_html and VAR_82 in VAR_6:
VAR_14 = VAR_44.group(3)
if VAR_82 == 'a':
self._in_link = True
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
self._in_link = False
else:
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
VAR_93 = VAR_44.group(2) or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
else:
VAR_64 = VAR_44.group(0)
return self.renderer.inline_html(VAR_64)
def FUNC_31(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.footnotes:
return None
if self.footnotes[VAR_13]:
return None
self.footnote_index += 1
self.footnotes[VAR_13] = self.footnote_index
return self.renderer.footnote_ref(VAR_13, self.footnote_index)
def FUNC_32(self, VAR_44):
return self._process_link(VAR_44, VAR_44.group(3), VAR_44.group(4))
def FUNC_33(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(2) or VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_34(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_35(self, VAR_44, VAR_49, VAR_62=None):
VAR_85 = VAR_44.group(0)
VAR_14 = VAR_44.group(1)
if VAR_85[0] == '!':
return self.renderer.image(VAR_49, VAR_62, VAR_14)
self._in_link = True
VAR_14 = self.output(VAR_14)
self._in_link = False
return self.renderer.link(VAR_49, VAR_62, VAR_14)
def FUNC_36(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.double_emphasis(VAR_14)
def FUNC_37(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.emphasis(VAR_14)
def FUNC_38(self, VAR_44):
VAR_14 = VAR_44.group(2)
return self.renderer.codespan(VAR_14)
def FUNC_39(self, VAR_44):
return self.renderer.linebreak()
def FUNC_40(self, VAR_44):
VAR_14 = self.output(VAR_44.group(1))
return self.renderer.strikethrough(VAR_14)
def FUNC_41(self, VAR_44):
VAR_14 = VAR_44.group(0)
return self.renderer.text(VAR_14)
class CLASS_4(object):
def __init__(self, **VAR_20):
self.options = VAR_20
def FUNC_42(self):
return ''
def VAR_26(self, VAR_54, VAR_63=None):
VAR_54 = code.rstrip('\n')
if not VAR_63:
VAR_54 = VAR_19(VAR_54, VAR_16=False)
return '<pre><VAR_54>%s\n</VAR_54></pre>\n' % VAR_54
VAR_54 = VAR_19(VAR_54, VAR_15=True, VAR_16=False)
return '<pre><VAR_54 class="lang-%s">%s\n</VAR_54></pre>\n' % (VAR_63, VAR_54)
def VAR_31(self, VAR_14):
return '<blockquote>%s\n</blockquote>\n' % VAR_14.rstrip('\n')
def VAR_36(self, VAR_64):
if self.options.get('skip_style') and \
VAR_64.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_69(self, VAR_14, VAR_65, VAR_66=None):
return '<h%d>%s</h%d>\n' % (VAR_65, VAR_14, VAR_65)
def VAR_28(self):
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def FUNC_48(self, VAR_67, VAR_68=True):
VAR_82 = 'ul'
if VAR_68:
VAR_82 = 'ol'
return '<%s>\n%s</%s>\n' % (VAR_82, VAR_67, VAR_82)
def VAR_33(self, VAR_14):
return '<li>%s</li>\n' % VAR_14
def VAR_35(self, VAR_14):
return '<p>%s</p>\n' % VAR_14.strip(' ')
def VAR_37(self, VAR_69, VAR_67):
return (
'<VAR_37>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</VAR_37>\n'
) % (VAR_69, VAR_67)
def FUNC_52(self, VAR_70):
return '<tr>\n%s</tr>\n' % VAR_70
def FUNC_53(self, VAR_70, **VAR_71):
if VAR_71['header']:
VAR_82 = 'th'
else:
VAR_82 = 'td'
VAR_81 = VAR_71['align']
if not VAR_81:
return '<%s>%s</%s>\n' % (VAR_82, VAR_70, VAR_82)
return '<%s style="text-VAR_81:%s">%s</%s>\n' % (
VAR_82, VAR_81, VAR_70, VAR_82
)
def VAR_52(self, VAR_14):
return '<strong>%s</strong>' % VAR_14
def VAR_53(self, VAR_14):
return '<em>%s</em>' % VAR_14
def FUNC_56(self, VAR_14):
VAR_14 = VAR_19(VAR_14.rstrip(), VAR_16=False)
return '<VAR_54>%s</VAR_54>' % VAR_14
def VAR_55(self):
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def VAR_56(self, VAR_14):
return '<del>%s</del>' % VAR_14
def VAR_14(self, VAR_14):
if self.options.get('parse_block_html'):
return VAR_14
return VAR_19(VAR_14)
def VAR_19(self, VAR_14):
return VAR_19(VAR_14)
def VAR_48(self, VAR_49, VAR_72=False):
VAR_14 = VAR_49 = FUNC_3(VAR_49)
if VAR_72:
VAR_49 = 'mailto:%s' % VAR_49
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
def VAR_49(self, VAR_49, VAR_62, VAR_14):
VAR_49 = FUNC_3(VAR_49)
if not VAR_62:
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
VAR_62 = VAR_19(VAR_62, VAR_15=True)
return '<a href="%s" VAR_62="%s">%s</a>' % (VAR_49, VAR_62, VAR_14)
def FUNC_62(self, VAR_73, VAR_62, VAR_14):
VAR_73 = FUNC_3(VAR_73)
VAR_14 = VAR_19(VAR_14, VAR_15=True)
if VAR_62:
VAR_62 = VAR_19(VAR_62, VAR_15=True)
VAR_64 = '<img VAR_73="%s" alt="%s" VAR_62="%s"' % (VAR_73, VAR_14, VAR_62)
else:
VAR_64 = '<img VAR_73="%s" alt="%s"' % (VAR_73, VAR_14)
if self.options.get('use_xhtml'):
return '%s />' % VAR_64
return '%s>' % VAR_64
def VAR_47(self, VAR_64):
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_25(self):
return ''
def FUNC_65(self, VAR_13, VAR_74):
VAR_64 = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (VAR_19(VAR_13), VAR_19(VAR_13), VAR_74)
return VAR_64
def FUNC_66(self, VAR_13, VAR_14):
VAR_86 = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % VAR_19(VAR_13)
VAR_14 = VAR_14.rstrip()
if VAR_14.endswith('</p>'):
VAR_14 = re.sub(r'<\/p>$', r'%s</p>' % VAR_86, VAR_14)
else:
VAR_14 = '%s<p>%s</p>' % (VAR_14, VAR_86)
VAR_64 = '<li id="fn-%s">%s</li>\n' % (VAR_19(VAR_13), VAR_14)
return VAR_64
def VAR_61(self, VAR_14):
VAR_64 = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return VAR_64 % (self.hrule(), VAR_14)
class CLASS_5(object):
def __init__(self, VAR_59=None, VAR_75=None, VAR_76=None, **VAR_20):
if not VAR_59:
VAR_59 = CLASS_4(**VAR_20)
else:
VAR_20.update(VAR_59.options)
self.renderer = VAR_59
if VAR_75 and inspect.isclass(VAR_75):
VAR_75 = VAR_75(VAR_59, **VAR_20)
if VAR_76 and inspect.isclass(VAR_76):
VAR_76 = VAR_76(**VAR_20)
if VAR_75:
self.inline = VAR_75
else:
self.inline = CLASS_3(VAR_59, **VAR_20)
self.block = VAR_76 or CLASS_1(CLASS_0())
self.footnotes = []
self.tokens = []
self._parse_block_html = VAR_20.get('parse_block_html')
def __call__(self, VAR_14):
return self.parse(VAR_14)
def FUNC_68(self, VAR_14):
return self.parse(VAR_14)
def FUNC_6(self, VAR_14):
VAR_87 = self.output(FUNC_4(VAR_14))
VAR_88 = self.block.def_footnotes
self.block.def_links = {}
self.block.def_footnotes = {}
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return VAR_87
VAR_61 = filter(lambda o: VAR_88.get(o['key']), self.footnotes)
self.footnotes = sorted(
VAR_61, VAR_13=lambda o: VAR_88.get(o['key']), reverse=True
)
VAR_67 = self.renderer.placeholder()
while self.footnotes:
VAR_103 = self.footnotes.pop()
VAR_67 += self.renderer.footnote_item(
VAR_103['key'], VAR_103['text']
)
VAR_87 += self.renderer.footnotes(VAR_67)
return VAR_87
def FUNC_69(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def FUNC_70(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def VAR_83(self, VAR_14, VAR_43=None):
self.tokens = self.block(VAR_14, VAR_43)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
VAR_87 = self.renderer.placeholder()
while self.pop():
VAR_87 += self.tok()
return VAR_87
def FUNC_71(self):
VAR_89 = self.token['type']
if VAR_89.endswith('_start'):
VAR_89 = t[:-6]
return getattr(self, 'output_%s' % VAR_89)()
def FUNC_72(self):
VAR_14 = self.token['text']
while self.peek()['type'] == 'text':
VAR_14 += '\n' + self.pop()['text']
return self.inline(VAR_14)
def FUNC_73(self):
return self.renderer.newline()
def FUNC_74(self):
return self.renderer.hrule()
def FUNC_75(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def FUNC_38(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def FUNC_76(self):
VAR_90 = self.token['align']
VAR_91 = len(VAR_90)
VAR_92 = self.renderer.placeholder()
VAR_69 = self.renderer.placeholder()
for VAR_101, value in enumerate(self.token['header']):
VAR_81 = VAR_90[VAR_101] if VAR_101 < VAR_91 else None
VAR_71 = {'header': True, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_69 += self.renderer.table_row(VAR_92)
VAR_67 = self.renderer.placeholder()
for VAR_101, row in enumerate(self.token['cells']):
VAR_92 = self.renderer.placeholder()
for j, value in enumerate(row):
VAR_81 = VAR_90[j] if j < VAR_91 else None
VAR_71 = {'header': False, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_67 += self.renderer.table_row(VAR_92)
return self.renderer.table(VAR_69, VAR_67)
def FUNC_77(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
VAR_67 += self.tok()
return self.renderer.block_quote(VAR_67)
def FUNC_78(self):
VAR_68 = self.token['ordered']
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
VAR_67 += self.tok()
return self.renderer.list(VAR_67, VAR_68)
def FUNC_79(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
VAR_67 += self.tok_text()
else:
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_80(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_31(self):
self.inline._in_footnote = True
VAR_67 = self.renderer.placeholder()
VAR_13 = self.token['key']
while self.pop()['type'] != 'footnote_end':
VAR_67 += self.tok()
self.footnotes.append({'key': VAR_13, 'text': VAR_67})
self.inline._in_footnote = False
return self.renderer.placeholder()
def FUNC_81(self):
VAR_14 = self.token['text']
return self.renderer.block_html(VAR_14)
def FUNC_82(self):
VAR_14 = self.token['text']
VAR_82 = self.token['tag']
if self._parse_block_html and VAR_82 not in VAR_7:
VAR_14 = self.inline(VAR_14, VAR_43=self.inline.inline_html_rules)
VAR_93 = self.token.get('extra') or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
return self.renderer.block_html(VAR_64)
def FUNC_83(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def FUNC_41(self):
return self.renderer.paragraph(self.tok_text())
def FUNC_5(VAR_14, VAR_19=True, **VAR_20):
return CLASS_5(VAR_19=FUNC_2, **VAR_20)(VAR_14)
|
import re
import .inspect
__version__ = '0.8.1'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
VAR_0 = re.compile(r'\s+')
VAR_1 = re.compile(r'\W')
VAR_2 = re.compile(r'&(?!#?\w+;)')
VAR_3 = re.compile(r'\r\n|\r')
VAR_4 = re.compile(r'^ *> ?', VAR_71=re.M)
VAR_5 = re.compile(r'^ {4}', re.M)
VAR_6 = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
VAR_7 = ['pre', 'script', 'style']
VAR_8 = r'(?!:/|[^\w\s@]*@)\b'
VAR_9 = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
VAR_10 = r'(?!(?:%s)\b)\w+%s' % ('|'.join(VAR_6), VAR_8)
VAR_11 = ('javascript:', 'vbscript:')
def FUNC_0(VAR_12):
VAR_21 = VAR_12.pattern
if VAR_21.startswith('^'):
VAR_21 = pattern[1:]
return VAR_21
def FUNC_1(VAR_13):
VAR_13 = VAR_19(VAR_13.lower(), VAR_15=True)
return VAR_0.sub(' ', VAR_13)
def VAR_19(VAR_14, VAR_15=False, VAR_16=True):
if VAR_16:
VAR_14 = VAR_2.sub('&', VAR_14)
else:
VAR_14 = VAR_14.replace('&', '&')
VAR_14 = VAR_14.replace('<', '<')
VAR_14 = VAR_14.replace('>', '>')
if VAR_15:
VAR_14 = VAR_14.replace('"', '"')
VAR_14 = VAR_14.replace("'", ''')
return VAR_14
def FUNC_3(VAR_17):
VAR_22 = VAR_17.lower().strip('\x00\x1a \n\r\t')
for scheme in VAR_11:
if re.sub(r'[^A-Za-z0-9\/:]+', '', VAR_22).startswith(scheme):
return ''
return VAR_19(VAR_17, VAR_15=True, VAR_16=False)
def FUNC_4(VAR_14, VAR_18=4):
VAR_14 = VAR_3.sub('\n', VAR_14)
VAR_14 = VAR_14.expandtabs(VAR_18)
VAR_14 = VAR_14.replace('\u2424', '\n')
VAR_21 = re.compile(r'^ +$', re.M)
return VAR_21.sub('', VAR_14)
class CLASS_0(object):
VAR_23 = re.compile(
r'^ *\[([^^\]]+)\]: *' # [VAR_13]:
r'<?([^\s>]+)>?' # <VAR_49> or VAR_49
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
VAR_24 = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^VAR_13]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
VAR_25 = re.compile(r'^\n+')
VAR_26 = re.compile(r'^( {4}[^\n]+\n*)+')
VAR_27 = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```VAR_63
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
VAR_28 = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
VAR_29 = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
VAR_30 = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
VAR_31 = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
VAR_32 = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # VAR_28
r'|\n+(?=%s)' # def VAR_60
r'|\n+(?=%s)' # def VAR_61
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
FUNC_0(VAR_23),
FUNC_0(VAR_24),
)
)
VAR_33 = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
VAR_71=re.M
)
VAR_34 = re.compile(r'^ *(?:[*+-]|\d+\.) +')
VAR_35 = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
FUNC_0(VAR_27).replace(r'\1', r'\2'),
FUNC_0(VAR_32).replace(r'\1', r'\3'),
FUNC_0(VAR_28),
FUNC_0(VAR_29),
FUNC_0(VAR_30),
FUNC_0(VAR_31),
FUNC_0(VAR_23),
FUNC_0(VAR_24),
'<' + VAR_10,
)
)
VAR_36 = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (VAR_10, VAR_9),
r'<%s(?:%s)*?\s*\/?>' % (VAR_10, VAR_9),
)
)
VAR_37 = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
VAR_38 = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
VAR_14 = re.compile(r'^[^\n]+')
class CLASS_1(object):
VAR_39 = CLASS_0
VAR_40 = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
VAR_41 = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
VAR_42 = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, VAR_43=None, **VAR_20):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not VAR_43:
rules = self.grammar_class()
self.rules = VAR_43
def __call__(self, VAR_14, VAR_43=None):
return self.parse(VAR_14, VAR_43)
def FUNC_6(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = self.default_rules
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_104 = getattr(self.rules, VAR_13)
VAR_44 = VAR_104.match(VAR_14)
if not VAR_44:
continue
getattr(self, 'parse_%s' % VAR_13)(VAR_44)
return VAR_44
return False # pragma: no cover
while VAR_14:
VAR_44 = FUNC_84(VAR_14)
if VAR_44 is not False:
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return self.tokens
def FUNC_7(self, VAR_44):
VAR_77 = len(VAR_44.group(0))
if VAR_77 > 1:
self.tokens.append({'type': 'newline'})
def FUNC_8(self, VAR_44):
VAR_54 = VAR_5.sub('', VAR_44.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': VAR_54,
})
def FUNC_9(self, VAR_44):
self.tokens.append({
'type': 'code',
'lang': VAR_44.group(2),
'text': VAR_44.group(3),
})
def FUNC_10(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': len(VAR_44.group(1)),
'text': VAR_44.group(2),
})
def FUNC_11(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': 1 if VAR_44.group(2) == '=' else 2,
'text': VAR_44.group(1),
})
def FUNC_12(self, VAR_44):
self.tokens.append({'type': 'hrule'})
def FUNC_13(self, VAR_44):
VAR_46 = VAR_44.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in VAR_46,
})
VAR_45 = VAR_44.group(0)
self._process_list_item(VAR_45, VAR_46)
self.tokens.append({'type': 'list_end'})
def FUNC_14(self, VAR_45, VAR_46):
VAR_45 = self.rules.list_item.findall(VAR_45)
VAR_78 = False
VAR_77 = len(VAR_45)
for VAR_101 in range(VAR_77):
VAR_79 = VAR_45[VAR_101][0]
VAR_94 = len(VAR_79)
VAR_79 = self.rules.list_bullet.sub('', VAR_79)
if '\n ' in VAR_79:
VAR_94 = VAR_94 - len(VAR_79)
VAR_21 = re.compile(r'^ {1,%d}' % VAR_94, VAR_71=re.M)
VAR_79 = VAR_21.sub('', VAR_79)
VAR_95 = VAR_78
if not VAR_95 and re.search(r'\n\n(?!\s*$)', VAR_79):
VAR_95 = True
VAR_96 = len(VAR_79)
if VAR_101 != VAR_77 - 1 and VAR_96:
VAR_78 = VAR_79[VAR_96-1] == '\n'
if not VAR_95:
VAR_95 = VAR_78
if VAR_95:
VAR_89 = 'loose_item_start'
else:
VAR_89 = 'list_item_start'
self.tokens.append({'type': VAR_89})
self.parse(VAR_79, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def FUNC_15(self, VAR_44):
self.tokens.append({'type': 'block_quote_start'})
VAR_45 = VAR_4.sub('', VAR_44.group(0))
self.parse(VAR_45)
self.tokens.append({'type': 'block_quote_end'})
def FUNC_16(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
self.def_links[VAR_13] = {
'link': VAR_44.group(2),
'title': VAR_44.group(3),
}
def FUNC_17(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 in self.def_footnotes:
return
self.def_footnotes[VAR_13] = 0
self.tokens.append({
'type': 'footnote_start',
'key': VAR_13,
})
VAR_14 = VAR_44.group(2)
if '\n' in VAR_14:
VAR_97 = VAR_14.split('\n')
VAR_98 = None
for VAR_85 in VAR_97[1:]:
VAR_94 = len(VAR_85) - len(VAR_85.lstrip())
if VAR_94 and (not VAR_98 or VAR_94 < VAR_98):
VAR_98 = VAR_94
VAR_99 = [VAR_97[0]]
for VAR_85 in VAR_97[1:]:
VAR_99.append(VAR_85[VAR_98:])
VAR_14 = '\n'.join(VAR_99)
self.parse(VAR_14, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': VAR_13,
})
def FUNC_18(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'(?: *\| *)?\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
VAR_100 = re.sub(r'^ *\| *| *\| *$', '', VAR_100)
VAR_80[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_19(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
cells[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_20(self, VAR_44):
VAR_69 = re.sub(r'^ *| *\| *$', '', VAR_44.group(1))
VAR_69 = re.split(r' *\| *', VAR_69)
VAR_81 = re.sub(r' *|\| *$', '', VAR_44.group(2))
VAR_81 = re.split(r' *\| *', VAR_81)
for VAR_101, VAR_100 in enumerate(VAR_81):
if re.search(r'^ *-+: *$', VAR_100):
VAR_81[VAR_101] = 'right'
elif re.search(r'^ *:-+: *$', VAR_100):
VAR_81[VAR_101] = 'center'
elif re.search(r'^ *:-+ *$', VAR_100):
VAR_81[VAR_101] = 'left'
else:
VAR_81[VAR_101] = None
VAR_79 = {
'type': 'table',
'header': VAR_69,
'align': VAR_81,
}
return VAR_79
def FUNC_21(self, VAR_44):
VAR_82 = VAR_44.group(1)
if not VAR_82:
VAR_14 = VAR_44.group(0)
self.tokens.append({
'type': 'close_html',
'text': VAR_14
})
else:
VAR_102 = VAR_44.group(2)
VAR_14 = VAR_44.group(3)
self.tokens.append({
'type': 'open_html',
'tag': VAR_82,
'extra': VAR_102,
'text': VAR_14
})
def FUNC_22(self, VAR_44):
VAR_14 = VAR_44.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': VAR_14})
def FUNC_23(self, VAR_44):
VAR_14 = VAR_44.group(0)
self.tokens.append({'type': 'text', 'text': VAR_14})
class CLASS_2(object):
VAR_19 = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
VAR_47 = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (
VAR_8, VAR_9),
r'<\w+%s(?:%s)*?\s*\/?>' % (VAR_8, VAR_9),
)
)
VAR_48 = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
VAR_49 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
VAR_50 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
VAR_51 = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
VAR_17 = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
VAR_52 = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
VAR_53 = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
VAR_54 = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `VAR_54`
VAR_55 = re.compile(r'^ {2,}\n(?!\s*$)')
VAR_56 = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
VAR_57 = re.compile(r'^\[\^([^\]]+)\]')
VAR_14 = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def FUNC_24(self):
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class CLASS_3(object):
VAR_39 = CLASS_2
VAR_40 = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
VAR_58 = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, VAR_59, VAR_43=None, **VAR_20):
self.renderer = VAR_59
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not VAR_43:
rules = self.grammar_class()
VAR_20.update(self.renderer.options)
if VAR_20.get('hard_wrap'):
VAR_43.hard_wrap()
self.rules = VAR_43
self._in_link = False
self._in_footnote = False
self._parse_inline_html = VAR_20.get('parse_inline_html')
def __call__(self, VAR_14, VAR_43=None):
return self.output(VAR_14, VAR_43)
def FUNC_25(self, VAR_60, VAR_61):
self.footnote_index = 0
self.links = VAR_60 or {}
self.footnotes = VAR_61 or {}
def VAR_83(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = FUNC_48(self.default_rules)
if self._in_footnote and 'footnote' in VAR_43:
rules.remove('footnote')
VAR_83 = self.renderer.placeholder()
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_21 = getattr(self.rules, VAR_13)
VAR_44 = VAR_21.match(VAR_14)
if not VAR_44:
continue
self.line_match = VAR_44
VAR_87 = getattr(self, 'output_%s' % VAR_13)(VAR_44)
if VAR_87 is not None:
return VAR_44, VAR_87
return False # pragma: no cover
while VAR_14:
VAR_84 = FUNC_84(VAR_14)
if VAR_84 is not False:
VAR_44, VAR_87 = VAR_84
VAR_83 += VAR_87
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return VAR_83
def FUNC_27(self, VAR_44):
VAR_14 = VAR_44.group(1)
return self.renderer.escape(VAR_14)
def FUNC_28(self, VAR_44):
VAR_49 = VAR_44.group(1)
if VAR_44.group(2) == '@':
VAR_72 = True
else:
VAR_72 = False
return self.renderer.autolink(VAR_49, VAR_72)
def FUNC_29(self, VAR_44):
VAR_49 = VAR_44.group(1)
if self._in_link:
return self.renderer.text(VAR_49)
return self.renderer.autolink(VAR_49, False)
def FUNC_30(self, VAR_44):
VAR_82 = VAR_44.group(1)
if self._parse_inline_html and VAR_82 in VAR_6:
VAR_14 = VAR_44.group(3)
if VAR_82 == 'a':
self._in_link = True
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
self._in_link = False
else:
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
VAR_93 = VAR_44.group(2) or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
else:
VAR_64 = VAR_44.group(0)
return self.renderer.inline_html(VAR_64)
def FUNC_31(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.footnotes:
return None
if self.footnotes[VAR_13]:
return None
self.footnote_index += 1
self.footnotes[VAR_13] = self.footnote_index
return self.renderer.footnote_ref(VAR_13, self.footnote_index)
def FUNC_32(self, VAR_44):
return self._process_link(VAR_44, VAR_44.group(3), VAR_44.group(4))
def FUNC_33(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(2) or VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_34(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_35(self, VAR_44, VAR_49, VAR_62=None):
VAR_85 = VAR_44.group(0)
VAR_14 = VAR_44.group(1)
if VAR_85[0] == '!':
return self.renderer.image(VAR_49, VAR_62, VAR_14)
self._in_link = True
VAR_14 = self.output(VAR_14)
self._in_link = False
return self.renderer.link(VAR_49, VAR_62, VAR_14)
def FUNC_36(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.double_emphasis(VAR_14)
def FUNC_37(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.emphasis(VAR_14)
def FUNC_38(self, VAR_44):
VAR_14 = VAR_44.group(2)
return self.renderer.codespan(VAR_14)
def FUNC_39(self, VAR_44):
return self.renderer.linebreak()
def FUNC_40(self, VAR_44):
VAR_14 = self.output(VAR_44.group(1))
return self.renderer.strikethrough(VAR_14)
def FUNC_41(self, VAR_44):
VAR_14 = VAR_44.group(0)
return self.renderer.text(VAR_14)
class CLASS_4(object):
def __init__(self, **VAR_20):
self.options = VAR_20
def FUNC_42(self):
return ''
def VAR_26(self, VAR_54, VAR_63=None):
VAR_54 = code.rstrip('\n')
if not VAR_63:
VAR_54 = VAR_19(VAR_54, VAR_16=False)
return '<pre><VAR_54>%s\n</VAR_54></pre>\n' % VAR_54
VAR_54 = VAR_19(VAR_54, VAR_15=True, VAR_16=False)
return '<pre><VAR_54 class="lang-%s">%s\n</VAR_54></pre>\n' % (VAR_63, VAR_54)
def VAR_31(self, VAR_14):
return '<blockquote>%s\n</blockquote>\n' % VAR_14.rstrip('\n')
def VAR_36(self, VAR_64):
if self.options.get('skip_style') and \
VAR_64.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_69(self, VAR_14, VAR_65, VAR_66=None):
return '<h%d>%s</h%d>\n' % (VAR_65, VAR_14, VAR_65)
def VAR_28(self):
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def FUNC_48(self, VAR_67, VAR_68=True):
VAR_82 = 'ul'
if VAR_68:
VAR_82 = 'ol'
return '<%s>\n%s</%s>\n' % (VAR_82, VAR_67, VAR_82)
def VAR_33(self, VAR_14):
return '<li>%s</li>\n' % VAR_14
def VAR_35(self, VAR_14):
return '<p>%s</p>\n' % VAR_14.strip(' ')
def VAR_37(self, VAR_69, VAR_67):
return (
'<VAR_37>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</VAR_37>\n'
) % (VAR_69, VAR_67)
def FUNC_52(self, VAR_70):
return '<tr>\n%s</tr>\n' % VAR_70
def FUNC_53(self, VAR_70, **VAR_71):
if VAR_71['header']:
VAR_82 = 'th'
else:
VAR_82 = 'td'
VAR_81 = VAR_71['align']
if not VAR_81:
return '<%s>%s</%s>\n' % (VAR_82, VAR_70, VAR_82)
return '<%s style="text-VAR_81:%s">%s</%s>\n' % (
VAR_82, VAR_81, VAR_70, VAR_82
)
def VAR_52(self, VAR_14):
return '<strong>%s</strong>' % VAR_14
def VAR_53(self, VAR_14):
return '<em>%s</em>' % VAR_14
def FUNC_56(self, VAR_14):
VAR_14 = VAR_19(VAR_14.rstrip(), VAR_16=False)
return '<VAR_54>%s</VAR_54>' % VAR_14
def VAR_55(self):
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def VAR_56(self, VAR_14):
return '<del>%s</del>' % VAR_14
def VAR_14(self, VAR_14):
if self.options.get('parse_block_html'):
return VAR_14
return VAR_19(VAR_14)
def VAR_19(self, VAR_14):
return VAR_19(VAR_14)
def VAR_48(self, VAR_49, VAR_72=False):
VAR_14 = VAR_49 = FUNC_3(VAR_49)
if VAR_72:
VAR_49 = 'mailto:%s' % VAR_49
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
def VAR_49(self, VAR_49, VAR_62, VAR_14):
VAR_49 = FUNC_3(VAR_49)
if not VAR_62:
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
VAR_62 = VAR_19(VAR_62, VAR_15=True)
return '<a href="%s" VAR_62="%s">%s</a>' % (VAR_49, VAR_62, VAR_14)
def FUNC_62(self, VAR_73, VAR_62, VAR_14):
VAR_73 = FUNC_3(VAR_73)
VAR_14 = VAR_19(VAR_14, VAR_15=True)
if VAR_62:
VAR_62 = VAR_19(VAR_62, VAR_15=True)
VAR_64 = '<img VAR_73="%s" alt="%s" VAR_62="%s"' % (VAR_73, VAR_14, VAR_62)
else:
VAR_64 = '<img VAR_73="%s" alt="%s"' % (VAR_73, VAR_14)
if self.options.get('use_xhtml'):
return '%s />' % VAR_64
return '%s>' % VAR_64
def VAR_47(self, VAR_64):
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_25(self):
return ''
def FUNC_65(self, VAR_13, VAR_74):
VAR_64 = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (VAR_19(VAR_13), VAR_19(VAR_13), VAR_74)
return VAR_64
def FUNC_66(self, VAR_13, VAR_14):
VAR_86 = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % VAR_19(VAR_13)
VAR_14 = VAR_14.rstrip()
if VAR_14.endswith('</p>'):
VAR_14 = re.sub(r'<\/p>$', r'%s</p>' % VAR_86, VAR_14)
else:
VAR_14 = '%s<p>%s</p>' % (VAR_14, VAR_86)
VAR_64 = '<li id="fn-%s">%s</li>\n' % (VAR_19(VAR_13), VAR_14)
return VAR_64
def VAR_61(self, VAR_14):
VAR_64 = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return VAR_64 % (self.hrule(), VAR_14)
class CLASS_5(object):
def __init__(self, VAR_59=None, VAR_75=None, VAR_76=None, **VAR_20):
if not VAR_59:
VAR_59 = CLASS_4(**VAR_20)
else:
VAR_20.update(VAR_59.options)
self.renderer = VAR_59
if VAR_75 and inspect.isclass(VAR_75):
VAR_75 = VAR_75(VAR_59, **VAR_20)
if VAR_76 and inspect.isclass(VAR_76):
VAR_76 = VAR_76(**VAR_20)
if VAR_75:
self.inline = VAR_75
else:
self.inline = CLASS_3(VAR_59, **VAR_20)
self.block = VAR_76 or CLASS_1(CLASS_0())
self.footnotes = []
self.tokens = []
self._parse_block_html = VAR_20.get('parse_block_html')
def __call__(self, VAR_14):
return self.parse(VAR_14)
def FUNC_68(self, VAR_14):
return self.parse(VAR_14)
def FUNC_6(self, VAR_14):
VAR_87 = self.output(FUNC_4(VAR_14))
VAR_88 = self.block.def_footnotes
self.block.def_links = {}
self.block.def_footnotes = {}
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return VAR_87
VAR_61 = filter(lambda o: VAR_88.get(o['key']), self.footnotes)
self.footnotes = sorted(
VAR_61, VAR_13=lambda o: VAR_88.get(o['key']), reverse=True
)
VAR_67 = self.renderer.placeholder()
while self.footnotes:
VAR_103 = self.footnotes.pop()
VAR_67 += self.renderer.footnote_item(
VAR_103['key'], VAR_103['text']
)
VAR_87 += self.renderer.footnotes(VAR_67)
return VAR_87
def FUNC_69(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def FUNC_70(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def VAR_83(self, VAR_14, VAR_43=None):
self.tokens = self.block(VAR_14, VAR_43)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
VAR_87 = self.renderer.placeholder()
while self.pop():
VAR_87 += self.tok()
return VAR_87
def FUNC_71(self):
VAR_89 = self.token['type']
if VAR_89.endswith('_start'):
VAR_89 = t[:-6]
return getattr(self, 'output_%s' % VAR_89)()
def FUNC_72(self):
VAR_14 = self.token['text']
while self.peek()['type'] == 'text':
VAR_14 += '\n' + self.pop()['text']
return self.inline(VAR_14)
def FUNC_73(self):
return self.renderer.newline()
def FUNC_74(self):
return self.renderer.hrule()
def FUNC_75(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def FUNC_38(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def FUNC_76(self):
VAR_90 = self.token['align']
VAR_91 = len(VAR_90)
VAR_92 = self.renderer.placeholder()
VAR_69 = self.renderer.placeholder()
for VAR_101, value in enumerate(self.token['header']):
VAR_81 = VAR_90[VAR_101] if VAR_101 < VAR_91 else None
VAR_71 = {'header': True, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_69 += self.renderer.table_row(VAR_92)
VAR_67 = self.renderer.placeholder()
for VAR_101, row in enumerate(self.token['cells']):
VAR_92 = self.renderer.placeholder()
for j, value in enumerate(row):
VAR_81 = VAR_90[j] if j < VAR_91 else None
VAR_71 = {'header': False, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_67 += self.renderer.table_row(VAR_92)
return self.renderer.table(VAR_69, VAR_67)
def FUNC_77(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
VAR_67 += self.tok()
return self.renderer.block_quote(VAR_67)
def FUNC_78(self):
VAR_68 = self.token['ordered']
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
VAR_67 += self.tok()
return self.renderer.list(VAR_67, VAR_68)
def FUNC_79(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
VAR_67 += self.tok_text()
else:
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_80(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_31(self):
self.inline._in_footnote = True
VAR_67 = self.renderer.placeholder()
VAR_13 = self.token['key']
while self.pop()['type'] != 'footnote_end':
VAR_67 += self.tok()
self.footnotes.append({'key': VAR_13, 'text': VAR_67})
self.inline._in_footnote = False
return self.renderer.placeholder()
def FUNC_81(self):
VAR_14 = self.token['text']
return self.renderer.block_html(VAR_14)
def FUNC_82(self):
VAR_14 = self.token['text']
VAR_82 = self.token['tag']
if self._parse_block_html and VAR_82 not in VAR_7:
VAR_14 = self.inline(VAR_14, VAR_43=self.inline.inline_html_rules)
VAR_93 = self.token.get('extra') or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
return self.renderer.block_html(VAR_64)
def FUNC_83(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def FUNC_41(self):
return self.renderer.paragraph(self.tok_text())
def FUNC_5(VAR_14, VAR_19=True, **VAR_20):
return CLASS_5(VAR_19=FUNC_2, **VAR_20)(VAR_14)
| [
1,
5,
7,
10,
13,
22,
23,
41,
42,
48,
49,
52,
53,
56,
59,
73,
74,
78,
83,
84,
91,
92,
95,
107,
169,
170,
174,
181,
186,
192,
197,
200,
202,
205,
208,
211,
221,
230,
235,
237,
244,
251,
258,
266,
269,
279,
282,
285,
288,
289,
292,
293,
298,
299,
303,
309,
314,
316,
319,
322,
326,
333,
337,
339,
341,
346,
348,
360,
362,
367,
370,
376,
379,
382,
387,
390,
396,
406,
413,
431,
435,
439,
440,
443,
482,
491,
492,
496,
508,
514,
517,
521,
523,
527,
530,
535,
540,
543,
545,
557,
567,
569,
573,
581,
587,
603,
613,
616,
623,
630,
636,
641,
646,
651,
655,
658,
662,
666,
667,
671,
674,
677,
681,
688,
691,
701,
704,
708,
711,
720,
723,
729,
735,
738,
746,
750,
754,
757,
765,
768,
772,
775,
790,
793,
797,
800,
804,
807,
812,
818,
821,
825,
828,
834,
837,
841,
844,
852,
855,
865,
868,
883,
886,
892,
896,
899,
908,
911,
925,
928,
933,
934,
937,
947,
949,
954,
959,
963,
964,
966,
969,
972,
976,
979,
981,
982,
985,
986,
989,
992,
997,
1004,
1007,
1013,
1018,
1022,
1024,
1029,
1032,
1033,
1036,
1038,
1044,
1047,
1050,
1057,
1062,
1067,
1068,
1074,
1076,
1077,
1086,
1088,
1094,
1101,
1109,
1111,
1117,
1127,
1131,
1140,
1143,
1146,
1147,
1150,
1159,
2,
3,
4,
5,
6,
7,
8,
9,
55,
56,
57,
58,
59,
60,
61,
62,
76,
94,
172,
442,
494,
669,
670,
936,
937,
938,
939,
940,
941,
1149,
1150,
1151,
1152,
1153,
1154,
1155,
1156,
1157,
260,
484,
485,
486,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
690,
691,
692,
693,
694,
703,
704,
705,
706,
710,
711,
712,
713,
722,
723,
724,
725,
726,
727,
731,
737,
738,
739,
740,
741,
748,
752,
756,
757,
758,
759,
760,
767,
768,
769,
770,
774,
775,
776,
777,
778,
779,
792,
793,
794,
795,
799,
800,
801,
802,
806,
807,
808,
809,
814,
820,
821,
822,
823,
827,
828,
829,
830,
836,
837,
838,
839,
843,
844,
845,
846,
847,
854,
855,
856,
857,
858,
859,
867,
868,
869,
870,
871,
872,
885,
886,
887,
888,
894,
898,
899,
900,
901,
902,
910,
911,
912,
913,
914,
927,
928,
929,
930,
971,
972,
973,
974
] | [
1,
5,
7,
10,
13,
22,
23,
41,
42,
48,
49,
53,
54,
57,
60,
74,
75,
79,
84,
85,
92,
93,
96,
108,
170,
171,
175,
182,
187,
193,
198,
201,
203,
206,
209,
212,
222,
231,
236,
238,
245,
252,
259,
267,
270,
280,
283,
286,
289,
290,
293,
294,
299,
300,
304,
310,
315,
317,
320,
323,
327,
334,
338,
340,
342,
347,
349,
361,
363,
368,
371,
377,
380,
383,
388,
391,
397,
407,
414,
432,
436,
440,
441,
444,
484,
493,
494,
498,
510,
516,
519,
523,
525,
529,
532,
537,
542,
545,
547,
559,
569,
571,
575,
583,
589,
605,
615,
618,
625,
632,
638,
643,
648,
653,
657,
660,
664,
668,
669,
673,
676,
679,
683,
690,
693,
703,
706,
710,
713,
722,
725,
731,
737,
740,
748,
752,
756,
759,
767,
770,
774,
777,
792,
795,
799,
802,
806,
809,
814,
820,
823,
827,
830,
836,
839,
843,
846,
854,
857,
867,
870,
885,
888,
894,
898,
901,
910,
913,
927,
930,
935,
936,
939,
949,
951,
956,
961,
965,
966,
968,
971,
974,
978,
981,
983,
984,
987,
988,
991,
994,
999,
1006,
1009,
1015,
1020,
1024,
1026,
1031,
1034,
1035,
1038,
1040,
1046,
1049,
1052,
1059,
1064,
1069,
1070,
1076,
1078,
1079,
1088,
1090,
1096,
1103,
1111,
1113,
1119,
1129,
1133,
1142,
1145,
1148,
1149,
1152,
1161,
2,
3,
4,
5,
6,
7,
8,
9,
56,
57,
58,
59,
60,
61,
62,
63,
77,
95,
173,
443,
496,
671,
672,
938,
939,
940,
941,
942,
943,
1151,
1152,
1153,
1154,
1155,
1156,
1157,
1158,
1159,
261,
486,
487,
488,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
692,
693,
694,
695,
696,
705,
706,
707,
708,
712,
713,
714,
715,
724,
725,
726,
727,
728,
729,
733,
739,
740,
741,
742,
743,
750,
754,
758,
759,
760,
761,
762,
769,
770,
771,
772,
776,
777,
778,
779,
780,
781,
794,
795,
796,
797,
801,
802,
803,
804,
808,
809,
810,
811,
816,
822,
823,
824,
825,
829,
830,
831,
832,
838,
839,
840,
841,
845,
846,
847,
848,
849,
856,
857,
858,
859,
860,
861,
869,
870,
871,
872,
873,
874,
887,
888,
889,
890,
896,
900,
901,
902,
903,
904,
912,
913,
914,
915,
916,
929,
930,
931,
932,
973,
974,
975,
976
] |
1CWE-79
| #
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import copy
import json
import re
from datetime import date, datetime, timedelta
from typing import Dict, List
from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios, Tab, TabHolder
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, Layout
from django import forms
from django.conf import settings
from django.core.exceptions import NON_FIELD_ERRORS, PermissionDenied, ValidationError
from django.core.validators import FileExtensionValidator
from django.db.models import Q
from django.forms import model_to_dict
from django.forms.models import ModelChoiceIterator
from django.forms.utils import from_current_timezone
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from translation_finder import DiscoveryResult, discover
from weblate.auth.models import Group, User
from weblate.checks.flags import Flags
from weblate.checks.models import CHECKS
from weblate.checks.utils import highlight_string
from weblate.formats.models import EXPORTERS, FILE_FORMATS
from weblate.glossary.forms import GlossaryAddMixin
from weblate.lang.data import BASIC_LANGUAGES
from weblate.lang.models import Language
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.trans.defines import COMPONENT_NAME_LENGTH, REPO_LENGTH
from weblate.trans.filter import FILTERS, get_filter_choice
from weblate.trans.models import (
Announcement,
Change,
Component,
Label,
Project,
ProjectToken,
Unit,
)
from weblate.trans.specialchars import RTL_CHARS_DATA, get_special_chars
from weblate.trans.util import check_upload_method_permissions, is_repo_link
from weblate.trans.validators import validate_check_flags
from weblate.utils.antispam import is_spam
from weblate.utils.errors import report_error
from weblate.utils.forms import (
ColorWidget,
ContextDiv,
EmailField,
SearchField,
SortedSelect,
SortedSelectMultiple,
UsernameField,
)
from weblate.utils.hash import checksum_to_hash, hash_to_checksum
from weblate.utils.search import parse_query
from weblate.utils.state import (
STATE_APPROVED,
STATE_CHOICES,
STATE_EMPTY,
STATE_FUZZY,
STATE_READONLY,
STATE_TRANSLATED,
)
from weblate.utils.validators import validate_file_extension
from weblate.vcs.models import VCS_REGISTRY
BUTTON_TEMPLATE = """
<button class="btn btn-default {0}" title="{1}" {2}>{3}</button>
"""
RADIO_TEMPLATE = """
<label class="btn btn-default {0}" title="{1}">
<input type="radio" name="{2}" value="{3}" {4}/>
{5}
</label>
"""
GROUP_TEMPLATE = """
<div class="btn-group btn-group-xs" {0}>{1}</div>
"""
TOOLBAR_TEMPLATE = """
<div class="btn-toolbar pull-right flip editor-toolbar">{0}</div>
"""
class MarkdownTextarea(forms.Textarea):
def __init__(self, **kwargs):
kwargs["attrs"] = {
"dir": "auto",
"class": "markdown-editor highlight-editor",
"data-mode": "markdown",
}
super().__init__(**kwargs)
class WeblateDateInput(forms.DateInput):
def __init__(self, datepicker=True, **kwargs):
attrs = {"type": "date"}
if datepicker:
attrs["data-provide"] = "datepicker"
attrs["data-date-format"] = "yyyy-mm-dd"
super().__init__(attrs=attrs, format="%Y-%m-%d", **kwargs)
class WeblateDateField(forms.DateField):
def __init__(self, datepicker=True, **kwargs):
if "widget" not in kwargs:
kwargs["widget"] = WeblateDateInput(datepicker=datepicker)
super().__init__(**kwargs)
def to_python(self, value):
"""Produce timezone aware datetime with 00:00:00 as time."""
value = super().to_python(value)
if isinstance(value, date):
return from_current_timezone(
datetime(value.year, value.month, value.day, 0, 0, 0)
)
return value
class ChecksumField(forms.CharField):
"""Field for handling checksum IDs for translation."""
def __init__(self, *args, **kwargs):
kwargs["widget"] = forms.HiddenInput
super().__init__(*args, **kwargs)
def clean(self, value):
super().clean(value)
if not value:
return None
try:
return checksum_to_hash(value)
except ValueError:
raise ValidationError(_("Invalid checksum specified!"))
class UserField(forms.CharField):
def clean(self, value):
if not value:
return None
try:
return User.objects.get(Q(username=value) | Q(email=value))
except User.DoesNotExist:
raise ValidationError(_("Could not find any such user."))
except User.MultipleObjectsReturned:
raise ValidationError(_("More possible users were found."))
class QueryField(forms.CharField):
def __init__(self, **kwargs):
if "label" not in kwargs:
kwargs["label"] = _("Query")
if "required" not in kwargs:
kwargs["required"] = False
super().__init__(**kwargs)
def clean(self, value):
if not value:
if self.required:
raise ValidationError(_("Missing query string."))
return ""
try:
parse_query(value)
return value
except Exception as error:
report_error()
raise ValidationError(_("Could not parse query string: {}").format(error))
class FlagField(forms.CharField):
default_validators = [validate_check_flags]
class PluralTextarea(forms.Textarea):
"""Text-area extension which possibly handles plurals."""
def __init__(self, *args, **kwargs):
self.profile = None
super().__init__(*args, **kwargs)
def get_rtl_toolbar(self, fieldname):
groups = []
# Special chars
chars = []
for name, char, value in RTL_CHARS_DATA:
chars.append(
BUTTON_TEMPLATE.format(
"specialchar",
name,
'data-value="{}"'.format(
value.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
)
groups.append(GROUP_TEMPLATE.format("", "\n".join(chars)))
return TOOLBAR_TEMPLATE.format("\n".join(groups))
def get_rtl_toggle(self, language, fieldname):
if language.direction != "rtl":
return ""
# RTL/LTR switch
rtl_name = f"rtl-{fieldname}"
rtl_switch = [
RADIO_TEMPLATE.format(
"direction-toggle active",
gettext("Toggle text direction"),
rtl_name,
"rtl",
'checked="checked"',
"RTL",
),
RADIO_TEMPLATE.format(
"direction-toggle",
gettext("Toggle text direction"),
rtl_name,
"ltr",
"",
"LTR",
),
]
groups = [GROUP_TEMPLATE.format('data-toggle="buttons"', "\n".join(rtl_switch))]
return mark_safe(TOOLBAR_TEMPLATE.format("\n".join(groups)))
def get_toolbar(self, language, fieldname, unit, idx, source):
"""Return toolbar HTML code."""
profile = self.profile
groups = []
# Special chars
chars = [
BUTTON_TEMPLATE.format(
"specialchar",
name,
'data-value="{}"'.format(
value.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
for name, char, value in get_special_chars(
language, profile.special_chars, unit.source
)
]
groups.append(GROUP_TEMPLATE.format("", "\n".join(chars)))
result = TOOLBAR_TEMPLATE.format("\n".join(groups))
if language.direction == "rtl":
result = self.get_rtl_toolbar(fieldname) + result
return mark_safe(result)
def render(self, name, value, attrs=None, renderer=None, **kwargs):
"""Render all textareas with correct plural labels."""
unit = value
values = unit.get_target_plurals()
lang = unit.translation.language
plural = unit.translation.plural
tabindex = self.attrs["tabindex"]
placeables = [hl[2] for hl in highlight_string(unit.source_string, unit)]
# Need to add extra class
attrs["class"] = "translation-editor form-control highlight-editor"
attrs["tabindex"] = tabindex
attrs["lang"] = lang.code
attrs["dir"] = lang.direction
attrs["rows"] = 3
attrs["data-max"] = unit.get_max_length()
attrs["data-mode"] = unit.edit_mode
attrs["data-placeables"] = "|".join(re.escape(pl) for pl in placeables if pl)
if unit.readonly:
attrs["readonly"] = 1
# Okay we have more strings
ret = []
plurals = unit.get_source_plurals()
base_id = f"id_{unit.checksum}"
for idx, val in enumerate(values):
# Generate ID
fieldname = f"{name}_{idx}"
fieldid = f"{base_id}_{idx}"
attrs["id"] = fieldid
attrs["tabindex"] = tabindex + idx
if idx and len(plurals) > 1:
source = plurals[1]
else:
source = plurals[0]
# Render textare
textarea = super().render(fieldname, val, attrs, renderer, **kwargs)
# Label for plural
label = str(unit.translation.language)
if len(values) != 1:
label = f"{label}, {plural.get_plural_label(idx)}"
ret.append(
render_to_string(
"snippets/editor.html",
{
"toolbar": self.get_toolbar(lang, fieldid, unit, idx, source),
"fieldid": fieldid,
"label": mark_safe(label),
"textarea": textarea,
"max_length": attrs["data-max"],
"length": len(val),
"source_length": len(source),
"rtl_toggle": self.get_rtl_toggle(lang, fieldid),
},
)
)
# Show plural formula for more strings
if len(values) > 1:
ret.append(
render_to_string(
"snippets/plural-formula.html",
{"plural": plural, "user": self.profile.user},
)
)
# Join output
return mark_safe("".join(ret))
def value_from_datadict(self, data, files, name):
"""Return processed plurals as a list."""
ret = []
for idx in range(0, 10):
fieldname = f"{name}_{idx:d}"
if fieldname not in data:
break
ret.append(data.get(fieldname, ""))
return [r.replace("\r", "") for r in ret]
class PluralField(forms.CharField):
"""Renderer for the plural field.
The only difference from CharField is that it does not force value to be string.
"""
def __init__(self, max_length=None, min_length=None, **kwargs):
kwargs["label"] = ""
super().__init__(widget=PluralTextarea, **kwargs)
def to_python(self, value):
"""Return list or string as returned by PluralTextarea."""
return value
def clean(self, value):
value = super().clean(value)
if not value or (self.required and not any(value)):
raise ValidationError(_("Missing translated string!"))
return value
class FilterField(forms.ChoiceField):
def __init__(self, *args, **kwargs):
kwargs["label"] = _("Search filter")
if "required" not in kwargs:
kwargs["required"] = False
kwargs["choices"] = get_filter_choice()
kwargs["error_messages"] = {
"invalid_choice": _("Please choose a valid filter type.")
}
super().__init__(*args, **kwargs)
def to_python(self, value):
if value == "untranslated":
return "todo"
return super().to_python(value)
class ChecksumForm(forms.Form):
"""Form for handling checksum IDs for translation."""
checksum = ChecksumField(required=True)
def __init__(self, unit_set, *args, **kwargs):
self.unit_set = unit_set
super().__init__(*args, **kwargs)
def clean_checksum(self):
"""Validate whether checksum is valid and fetches unit for it."""
if "checksum" not in self.cleaned_data:
return
unit_set = self.unit_set
try:
self.cleaned_data["unit"] = unit_set.filter(
id_hash=self.cleaned_data["checksum"]
)[0]
except (Unit.DoesNotExist, IndexError):
raise ValidationError(
_("The string you wanted to translate is no longer available.")
)
class UnitForm(forms.Form):
def __init__(self, unit: Unit, *args, **kwargs):
self.unit = unit
super().__init__(*args, **kwargs)
class FuzzyField(forms.BooleanField):
help_as_icon = True
def __init__(self, *args, **kwargs):
kwargs["label"] = _("Needs editing")
kwargs["help_text"] = _(
'Strings are usually marked as "Needs editing" after the source '
"string is updated, or when marked as such manually."
)
super().__init__(*args, **kwargs)
self.widget.attrs["class"] = "fuzzy_checkbox"
class TranslationForm(UnitForm):
"""Form used for translation of single string."""
contentsum = ChecksumField(required=True)
translationsum = ChecksumField(required=True)
target = PluralField(required=False)
fuzzy = FuzzyField(required=False)
review = forms.ChoiceField(
label=_("Review state"),
choices=[
(STATE_FUZZY, _("Needs editing")),
(STATE_TRANSLATED, _("Waiting for review")),
(STATE_APPROVED, _("Approved")),
],
required=False,
widget=forms.RadioSelect,
)
explanation = forms.CharField(
widget=MarkdownTextarea,
label=_("Explanation"),
help_text=_(
"Additional explanation to clarify meaning or usage of the string."
),
max_length=1000,
required=False,
)
def __init__(self, user, unit: Unit, *args, **kwargs):
if unit is not None:
kwargs["initial"] = {
"checksum": unit.checksum,
"contentsum": hash_to_checksum(unit.content_hash),
"translationsum": hash_to_checksum(unit.get_target_hash()),
"target": unit,
"fuzzy": unit.fuzzy,
"review": unit.state,
"explanation": unit.explanation,
}
kwargs["auto_id"] = f"id_{unit.checksum}_%s"
tabindex = kwargs.pop("tabindex", 100)
super().__init__(unit, *args, **kwargs)
if unit.readonly:
for field in ["target", "fuzzy", "review"]:
self.fields[field].widget.attrs["readonly"] = 1
self.fields["review"].choices = [
(STATE_READONLY, _("Read only")),
]
self.user = user
self.fields["target"].widget.attrs["tabindex"] = tabindex
self.fields["target"].widget.profile = user.profile
self.fields["review"].widget.attrs["class"] = "review_radio"
# Avoid failing validation on untranslated string
if args:
self.fields["review"].choices.append((STATE_EMPTY, ""))
self.helper = FormHelper()
self.helper.form_method = "post"
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Field("target"),
Field("fuzzy"),
Field("contentsum"),
Field("translationsum"),
InlineRadios("review"),
Field("explanation"),
)
if unit and user.has_perm("unit.review", unit.translation):
self.fields["fuzzy"].widget = forms.HiddenInput()
else:
self.fields["review"].widget = forms.HiddenInput()
if not unit.translation.component.is_glossary:
self.fields["explanation"].widget = forms.HiddenInput()
def clean(self):
super().clean()
# Check required fields
required = {"target", "contentsum", "translationsum"}
if not required.issubset(self.cleaned_data):
return
unit = self.unit
if self.cleaned_data["contentsum"] != unit.content_hash:
raise ValidationError(
_(
"Source string has been changed meanwhile. "
"Please check your changes."
)
)
if self.cleaned_data["translationsum"] != unit.get_target_hash():
raise ValidationError(
_(
"Translation of the string has been changed meanwhile. "
"Please check your changes."
)
)
max_length = unit.get_max_length()
for text in self.cleaned_data["target"]:
if len(text) > max_length:
raise ValidationError(_("Translation text too long!"))
if self.user.has_perm(
"unit.review", unit.translation
) and self.cleaned_data.get("review"):
self.cleaned_data["state"] = int(self.cleaned_data["review"])
elif self.cleaned_data["fuzzy"]:
self.cleaned_data["state"] = STATE_FUZZY
else:
self.cleaned_data["state"] = STATE_TRANSLATED
class ZenTranslationForm(TranslationForm):
checksum = ChecksumField(required=True)
def __init__(self, user, unit, *args, **kwargs):
super().__init__(user, unit, *args, **kwargs)
self.helper.form_action = reverse(
"save_zen", kwargs=unit.translation.get_reverse_url_kwargs()
)
self.helper.form_tag = True
self.helper.disable_csrf = False
self.helper.layout.append(Field("checksum"))
class DownloadForm(forms.Form):
q = QueryField()
format = forms.ChoiceField(
label=_("File format"),
choices=[(x.name, x.verbose) for x in EXPORTERS.values()],
initial="po",
required=True,
widget=forms.RadioSelect,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
InlineRadios("format"),
)
class SimpleUploadForm(forms.Form):
"""Base form for uploading a file."""
file = forms.FileField(label=_("File"), validators=[validate_file_extension])
method = forms.ChoiceField(
label=_("File upload mode"),
choices=(
("translate", _("Add as translation")),
("approve", _("Add as approved translation")),
("suggest", _("Add as suggestion")),
("fuzzy", _("Add as translation needing edit")),
("replace", _("Replace existing translation file")),
("source", _("Update source strings")),
("add", _("Add new strings")),
),
widget=forms.RadioSelect,
required=True,
)
fuzzy = forms.ChoiceField(
label=_("Processing of strings needing edit"),
choices=(
("", _("Do not import")),
("process", _("Import as string needing edit")),
("approve", _("Import as translated")),
),
required=False,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
@staticmethod
def get_field_doc(field):
return ("user/files", f"upload-{field.name}")
def remove_translation_choice(self, value):
"""Remove add as translation choice."""
choices = self.fields["method"].choices
self.fields["method"].choices = [
choice for choice in choices if choice[0] != value
]
class UploadForm(SimpleUploadForm):
"""Upload form with the option to overwrite current messages."""
conflicts = forms.ChoiceField(
label=_("Conflict handling"),
help_text=_(
"Whether to overwrite existing translations if the string is "
"already translated."
),
choices=(
("", _("Update only untranslated strings")),
("replace-translated", _("Update translated strings")),
("replace-approved", _("Update translated and approved strings")),
),
required=False,
initial="replace-translated",
)
class ExtraUploadForm(UploadForm):
"""Advanced upload form for users who can override authorship."""
author_name = forms.CharField(label=_("Author name"))
author_email = EmailField(label=_("Author e-mail"))
def get_upload_form(user, translation, *args, **kwargs):
"""Return correct upload form based on user permissions."""
if user.has_perm("upload.authorship", translation):
form = ExtraUploadForm
kwargs["initial"] = {"author_name": user.full_name, "author_email": user.email}
elif user.has_perm("upload.overwrite", translation):
form = UploadForm
else:
form = SimpleUploadForm
result = form(*args, **kwargs)
for method in [x[0] for x in result.fields["method"].choices]:
if not check_upload_method_permissions(user, translation, method):
result.remove_translation_choice(method)
# Remove approved choice for non review projects
if not user.has_perm("unit.review", translation) and not form == SimpleUploadForm:
result.fields["conflicts"].choices = [
choice
for choice in result.fields["conflicts"].choices
if choice[0] != "approved"
]
return result
class SearchForm(forms.Form):
"""Text searching form."""
# pylint: disable=invalid-name
q = QueryField()
sort_by = forms.CharField(required=False, widget=forms.HiddenInput)
checksum = ChecksumField(required=False)
offset = forms.IntegerField(min_value=-1, required=False, widget=forms.HiddenInput)
offset_kwargs = {}
def __init__(self, user, language=None, show_builder=True, **kwargs):
"""Generate choices for other component in same project."""
self.user = user
self.language = language
super().__init__(**kwargs)
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Div(
Field("offset", **self.offset_kwargs),
SearchField("q"),
Field("sort_by", template="snippets/sort-field.html"),
css_class="btn-toolbar",
role="toolbar",
),
ContextDiv(
template="snippets/query-builder.html",
context={
"user": self.user,
"month_ago": timezone.now() - timedelta(days=31),
"show_builder": show_builder,
"language": self.language,
},
),
Field("checksum"),
)
def get_name(self):
"""Return verbose name for a search."""
return FILTERS.get_search_name(self.cleaned_data.get("q", ""))
def get_search_query(self):
return self.cleaned_data["q"]
def clean_offset(self):
if self.cleaned_data.get("offset") is None:
self.cleaned_data["offset"] = 1
return self.cleaned_data["offset"]
def items(self):
items = []
# Skip checksum and offset as these change
ignored = {"offset", "checksum"}
for param in sorted(self.cleaned_data):
value = self.cleaned_data[param]
# We don't care about empty values or ignored
if value is None or param in ignored:
continue
if isinstance(value, bool):
# Only store true values
if value:
items.append((param, "1"))
elif isinstance(value, int):
# Avoid storing 0 values
if value > 0:
items.append((param, str(value)))
elif isinstance(value, datetime):
# Convert date to string
items.append((param, value.date().isoformat()))
elif isinstance(value, list):
for val in value:
items.append((param, val))
elif isinstance(value, User):
items.append((param, value.username))
else:
# It should be string here
if value:
items.append((param, value))
return items
def urlencode(self):
return urlencode(self.items())
def reset_offset(self):
"""Reset offset to avoid using form as default for new search."""
data = copy.copy(self.data)
data["offset"] = "1"
data["checksum"] = ""
self.data = data
return self
class PositionSearchForm(SearchForm):
offset = forms.IntegerField(min_value=-1, required=False)
offset_kwargs = {"template": "snippets/position-field.html"}
class MergeForm(UnitForm):
"""Simple form for merging translation of two units."""
merge = forms.IntegerField()
def clean(self):
super().clean()
if "merge" not in self.cleaned_data:
return None
try:
unit = self.unit
translation = unit.translation
project = translation.component.project
self.cleaned_data["merge_unit"] = merge_unit = Unit.objects.get(
pk=self.cleaned_data["merge"],
translation__component__project=project,
translation__language=translation.language,
)
# Compare in Python to ensure case sensitiveness on MySQL
if not translation.is_source and unit.source != merge_unit.source:
raise ValidationError(_("Could not find merged string."))
except Unit.DoesNotExist:
raise ValidationError(_("Could not find merged string."))
return self.cleaned_data
class RevertForm(UnitForm):
"""Form for reverting edits."""
revert = forms.IntegerField()
def clean(self):
super().clean()
if "revert" not in self.cleaned_data:
return None
try:
self.cleaned_data["revert_change"] = Change.objects.get(
pk=self.cleaned_data["revert"], unit=self.unit
)
except Change.DoesNotExist:
raise ValidationError(_("Could not find reverted change."))
return self.cleaned_data
class AutoForm(forms.Form):
"""Automatic translation form."""
mode = forms.ChoiceField(
label=_("Automatic translation mode"),
choices=[
("suggest", _("Add as suggestion")),
("translate", _("Add as translation")),
("fuzzy", _("Add as needing edit")),
],
initial="suggest",
)
filter_type = FilterField(
required=True,
initial="todo",
help_text=_(
"Please note that translating all strings will "
"discard all existing translations."
),
)
auto_source = forms.ChoiceField(
label=_("Automatic translation source"),
choices=[
("others", _("Other translation components")),
("mt", _("Machine translation")),
],
initial="others",
)
component = forms.ChoiceField(
label=_("Components"),
required=False,
help_text=_(
"Turn on contribution to shared translation memory for the project to "
"get access to additional components."
),
initial="",
)
engines = forms.MultipleChoiceField(
label=_("Machine translation engines"), choices=[], required=False
)
threshold = forms.IntegerField(
label=_("Score threshold"), initial=80, min_value=1, max_value=100
)
def __init__(self, obj, *args, **kwargs):
"""Generate choices for other component in same project."""
super().__init__(*args, **kwargs)
self.obj = obj
# Add components from other projects with enabled shared TM
self.components = obj.project.component_set.filter(
source_language=obj.source_language
) | Component.objects.filter(
source_language_id=obj.source_language_id,
project__contribute_shared_tm=True,
).exclude(
project=obj.project
)
# Fetching is faster than doing count on possibly thousands of components
if len(self.components.values_list("id")[:30]) == 30:
# Do not show choices when too many
self.fields["component"] = forms.CharField(
required=False,
label=_("Components"),
help_text=_(
"Enter component to use as source, "
"keep blank to use all components in current project."
),
)
else:
choices = [
(s.id, str(s))
for s in self.components.order_project().prefetch_related("project")
]
self.fields["component"].choices = [
("", _("All components in current project"))
] + choices
self.fields["engines"].choices = [
(key, mt.name) for key, mt in MACHINE_TRANSLATION_SERVICES.items()
]
if "weblate" in MACHINE_TRANSLATION_SERVICES.keys():
self.fields["engines"].initial = "weblate"
use_types = {"all", "nottranslated", "todo", "fuzzy", "check:inconsistent"}
self.fields["filter_type"].choices = [
x for x in self.fields["filter_type"].choices if x[0] in use_types
]
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("mode"),
Field("filter_type"),
InlineRadios("auto_source", id="select_auto_source"),
Div("component", css_id="auto_source_others"),
Div("engines", "threshold", css_id="auto_source_mt"),
)
def clean_component(self):
component = self.cleaned_data["component"]
if not component:
return None
if component.isdigit():
try:
result = self.components.get(pk=component)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
slashes = component.count("/")
if slashes == 0:
try:
result = self.components.get(
slug=component, project=self.obj.project
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
elif slashes == 1:
project_slug, component_slug = component.split("/")
try:
result = self.components.get(
slug=component_slug, project__slug=project_slug
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
raise ValidationError(_("Please provide valid component slug!"))
return result.pk
class CommentForm(forms.Form):
"""Simple commenting form."""
scope = forms.ChoiceField(
label=_("Scope"),
help_text=_(
"Is your comment specific to this "
"translation or generic for all of them?"
),
choices=(
(
"report",
_("Report issue with the source string"),
),
(
"global",
_("Source string comment, suggestions for changes to this string"),
),
(
"translation",
_("Translation comment, discussions with other translators"),
),
),
)
comment = forms.CharField(
widget=MarkdownTextarea,
label=_("New comment"),
help_text=_("You can use Markdown and mention users by @username."),
max_length=1000,
)
def __init__(self, project, *args, **kwargs):
super().__init__(*args, **kwargs)
# Remove bug report in case source review is not enabled
if not project.source_review:
self.fields["scope"].choices = self.fields["scope"].choices[1:]
class EngageForm(forms.Form):
"""Form to choose language for engagement widgets."""
lang = forms.ChoiceField(required=False, choices=[("", _("All languages"))])
component = forms.ChoiceField(required=False, choices=[("", _("All components"))])
def __init__(self, user, project, *args, **kwargs):
"""Dynamically generate choices for used languages in project."""
super().__init__(*args, **kwargs)
self.fields["lang"].choices += project.languages.as_choices()
self.fields["component"].choices += (
project.component_set.filter_access(user)
.order()
.values_list("slug", "name")
)
class NewLanguageOwnerForm(forms.Form):
"""Form for requesting new language."""
lang = forms.MultipleChoiceField(
label=_("Languages"), choices=[], widget=forms.SelectMultiple
)
def get_lang_objects(self):
return Language.objects.exclude(
Q(translation__component=self.component) | Q(component=self.component)
)
def __init__(self, component, *args, **kwargs):
super().__init__(*args, **kwargs)
self.component = component
languages = self.get_lang_objects()
self.fields["lang"].choices = languages.as_choices()
class NewLanguageForm(NewLanguageOwnerForm):
"""Form for requesting new language."""
lang = forms.ChoiceField(label=_("Language"), choices=[], widget=forms.Select)
def get_lang_objects(self):
codes = BASIC_LANGUAGES
if settings.BASIC_LANGUAGES is not None:
codes = settings.BASIC_LANGUAGES
return super().get_lang_objects().filter(code__in=codes)
def __init__(self, component, *args, **kwargs):
super().__init__(component, *args, **kwargs)
self.fields["lang"].choices = [("", _("Please choose"))] + self.fields[
"lang"
].choices
def clean_lang(self):
# Compatibility with NewLanguageOwnerForm
return [self.cleaned_data["lang"]]
def get_new_language_form(request, component):
"""Return new language form for user."""
if not request.user.has_perm("translation.add", component):
raise PermissionDenied()
if request.user.has_perm("translation.add_more", component):
return NewLanguageOwnerForm
return NewLanguageForm
class ContextForm(forms.ModelForm):
class Meta:
model = Unit
fields = ("explanation", "labels", "extra_flags")
widgets = {
"labels": forms.CheckboxSelectMultiple(),
"explanation": MarkdownTextarea,
}
doc_links = {
"explanation": ("admin/translating", "additional-explanation"),
"labels": ("devel/translations", "labels"),
"extra_flags": ("admin/translating", "additional-flags"),
}
def get_field_doc(self, field):
return self.doc_links[field.name]
def __init__(self, data=None, instance=None, user=None, **kwargs):
kwargs["initial"] = {
"labels": Label.objects.filter(
Q(unit=instance) | Q(unit__source_unit=instance)
)
}
super().__init__(data=data, instance=instance, **kwargs)
project = instance.translation.component.project
self.fields["labels"].queryset = project.label_set.all()
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Field("explanation"),
Field("labels"),
ContextDiv(
template="snippets/labels_description.html",
context={"project": project, "user": user},
),
Field("extra_flags"),
)
def save(self, commit=True):
if commit:
self.instance.save(same_content=True)
self._save_m2m()
return self.instance
return super().save(commit)
class UserManageForm(forms.Form):
user = UserField(
label=_("User to add"),
help_text=_(
"Please type in an existing Weblate account name or e-mail address."
),
)
class UserBlockForm(forms.Form):
user = UserField(
label=_("User to block"),
help_text=_(
"Please type in an existing Weblate account name or e-mail address."
),
)
expiry = forms.ChoiceField(
label=_("Block duration"),
choices=(
("", _("Block user until I unblock them")),
("1", _("Block user for one day")),
("7", _("Block user for one week")),
("30", _("Block user for one month")),
),
required=False,
)
class ReportsForm(forms.Form):
style = forms.ChoiceField(
label=_("Report format"),
help_text=_("Choose file format for the report"),
choices=(
("rst", _("reStructuredText")),
("json", _("JSON")),
("html", _("HTML")),
),
)
period = forms.ChoiceField(
label=_("Report period"),
choices=(
("30days", _("Last 30 days")),
("this-month", _("This month")),
("month", _("Last month")),
("this-year", _("This year")),
("year", _("Last year")),
("", _("As specified")),
),
required=False,
)
start_date = WeblateDateField(
label=_("Starting date"), required=False, datepicker=False
)
end_date = WeblateDateField(
label=_("Ending date"), required=False, datepicker=False
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Field("style"),
Field("period"),
Div(
"start_date",
"end_date",
css_class="input-group input-daterange",
data_provide="datepicker",
data_date_format="yyyy-mm-dd",
),
)
def clean(self):
super().clean()
# Invalid value, skip rest of the validation
if "period" not in self.cleaned_data:
return
# Handle predefined periods
if self.cleaned_data["period"] == "30days":
end = timezone.now()
start = end - timedelta(days=30)
elif self.cleaned_data["period"] == "month":
end = timezone.now().replace(day=1) - timedelta(days=1)
start = end.replace(day=1)
elif self.cleaned_data["period"] == "this-month":
end = timezone.now().replace(day=1) + timedelta(days=31)
end = end.replace(day=1) - timedelta(days=1)
start = end.replace(day=1)
elif self.cleaned_data["period"] == "year":
year = timezone.now().year - 1
end = timezone.make_aware(datetime(year, 12, 31))
start = timezone.make_aware(datetime(year, 1, 1))
elif self.cleaned_data["period"] == "this-year":
year = timezone.now().year
end = timezone.make_aware(datetime(year, 12, 31))
start = timezone.make_aware(datetime(year, 1, 1))
else:
# Validate custom period
if not self.cleaned_data.get("start_date"):
raise ValidationError({"start_date": _("Missing date!")})
if not self.cleaned_data.get("end_date"):
raise ValidationError({"end_date": _("Missing date!")})
start = self.cleaned_data["start_date"]
end = self.cleaned_data["end_date"]
# Sanitize timestamps
self.cleaned_data["start_date"] = start.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.cleaned_data["end_date"] = end.replace(
hour=23, minute=59, second=59, microsecond=999999
)
# Final validation
if self.cleaned_data["start_date"] > self.cleaned_data["end_date"]:
msg = _("Starting date has to be before ending date!")
raise ValidationError({"start_date": msg, "end_date": msg})
class CleanRepoMixin:
def clean_repo(self):
repo = self.cleaned_data.get("repo")
if not repo or not is_repo_link(repo) or "/" not in repo[10:]:
return repo
project, component = repo[10:].split("/", 1)
try:
obj = Component.objects.get(
slug__iexact=component, project__slug__iexact=project
)
except Component.DoesNotExist:
return repo
if not self.request.user.has_perm("component.edit", obj):
raise ValidationError(
_("You do not have permission to access this component!")
)
return repo
class SettingsBaseForm(CleanRepoMixin, forms.ModelForm):
"""Component base form."""
class Meta:
model = Component
fields = []
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request = request
self.helper = FormHelper()
self.helper.form_tag = False
class SelectChecksWidget(SortedSelectMultiple):
def __init__(self, attrs=None, choices=()):
choices = CHECKS.get_choices()
super().__init__(attrs=attrs, choices=choices)
def value_from_datadict(self, data, files, name):
value = super().value_from_datadict(data, files, name)
if isinstance(value, str):
return json.loads(value)
return value
def format_value(self, value):
value = super().format_value(value)
if isinstance(value, str):
return value
return json.dumps(value)
class SelectChecksField(forms.CharField):
def to_python(self, value):
return value
class ComponentDocsMixin:
@staticmethod
def get_field_doc(field):
return ("admin/projects", f"component-{field.name}")
class ProjectDocsMixin:
@staticmethod
def get_field_doc(field):
return ("admin/projects", f"project-{field.name}")
class SpamCheckMixin:
def spam_check(self, value):
if is_spam(value, self.request):
raise ValidationError(_("This field has been identified as spam!"))
class ComponentAntispamMixin(SpamCheckMixin):
def clean_agreement(self):
value = self.cleaned_data["agreement"]
self.spam_check(value)
return value
class ProjectAntispamMixin(SpamCheckMixin):
def clean_web(self):
value = self.cleaned_data["web"]
self.spam_check(value)
return value
def clean_instructions(self):
value = self.cleaned_data["instructions"]
self.spam_check(value)
return value
class ComponentSettingsForm(
SettingsBaseForm, ComponentDocsMixin, ComponentAntispamMixin
):
"""Component settings form."""
class Meta:
model = Component
fields = (
"name",
"report_source_bugs",
"license",
"agreement",
"allow_translation_propagation",
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
"priority",
"check_flags",
"enforced_checks",
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"push_on_commit",
"commit_pending_age",
"merge_style",
"file_format",
"edit_template",
"new_lang",
"language_code_style",
"source_language",
"new_base",
"filemask",
"template",
"intermediate",
"language_regex",
"variant_regex",
"restricted",
"auto_lock_error",
"links",
"manage_units",
"is_glossary",
"glossary_color",
)
widgets = {
"enforced_checks": SelectChecksWidget,
"source_language": SortedSelect,
}
field_classes = {"enforced_checks": SelectChecksField}
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
if self.hide_restricted:
self.fields["restricted"].widget = forms.HiddenInput()
self.fields["links"].queryset = request.user.managed_projects.exclude(
pk=self.instance.pk
)
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
Fieldset(_("Name"), "name"),
Fieldset(_("License"), "license", "agreement"),
Fieldset(_("Upstream links"), "report_source_bugs"),
Fieldset(
_("Listing and access"),
"priority",
"restricted",
"links",
),
Fieldset(
_("Glossary"),
"is_glossary",
"glossary_color",
),
css_id="basic",
),
Tab(
_("Translation"),
Fieldset(
_("Suggestions"),
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
),
Fieldset(
_("Translation settings"),
"allow_translation_propagation",
"manage_units",
"check_flags",
"variant_regex",
"enforced_checks",
),
css_id="translation",
),
Tab(
_("Version control"),
Fieldset(
_("Locations"),
Div(template="trans/repo_help.html"),
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
),
Fieldset(
_("Version control settings"),
"push_on_commit",
"commit_pending_age",
"merge_style",
"auto_lock_error",
),
css_id="vcs",
),
Tab(
_("Commit messages"),
Fieldset(
_("Commit messages"),
ContextDiv(
template="trans/messages_help.html",
context={"user": request.user},
),
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
),
css_id="messages",
),
Tab(
_("Files"),
Fieldset(
_("Translation files"),
"file_format",
"filemask",
"language_regex",
"source_language",
),
Fieldset(
_("Monolingual translations"),
"template",
"edit_template",
"intermediate",
),
Fieldset(
_("Adding new languages"),
"new_base",
"new_lang",
"language_code_style",
),
css_id="files",
),
template="layout/pills.html",
)
)
vcses = (
"git",
"gerrit",
"github",
"gitlab",
"pagure",
"local",
"git-force-push",
)
if self.instance.vcs not in vcses:
vcses = (self.instance.vcs,)
self.fields["vcs"].choices = [
c for c in self.fields["vcs"].choices if c[0] in vcses
]
@property
def hide_restricted(self):
user = self.request.user
if user.is_superuser:
return False
if settings.OFFER_HOSTING:
return True
return not any(
"component.edit" in permissions
for permissions, _langs in user.component_permissions[self.instance.pk]
)
def clean(self):
data = self.cleaned_data
if self.hide_restricted:
data["restricted"] = self.instance.restricted
class ComponentCreateForm(SettingsBaseForm, ComponentDocsMixin, ComponentAntispamMixin):
"""Component creation form."""
class Meta:
model = Component
fields = [
"project",
"name",
"slug",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"file_format",
"filemask",
"template",
"edit_template",
"intermediate",
"new_base",
"license",
"new_lang",
"language_code_style",
"language_regex",
"source_language",
"is_glossary",
]
widgets = {"source_language": SortedSelect}
class ComponentNameForm(forms.Form, ComponentDocsMixin, ComponentAntispamMixin):
name = forms.CharField(
label=_("Component name"),
max_length=COMPONENT_NAME_LENGTH,
help_text=_("Display name"),
)
slug = forms.SlugField(
label=_("URL slug"),
max_length=COMPONENT_NAME_LENGTH,
help_text=_("Name used in URLs and filenames."),
)
is_glossary = forms.BooleanField(
label=_("Use as a glossary"),
required=False,
)
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.request = request
class ComponentSelectForm(ComponentNameForm):
component = forms.ModelChoiceField(
queryset=Component.objects.none(),
label=_("Component"),
help_text=_("Select existing component to copy configuration from."),
)
def __init__(self, request, *args, **kwargs):
if "instance" in kwargs:
kwargs.pop("instance")
if "auto_id" not in kwargs:
kwargs["auto_id"] = "id_existing_%s"
super().__init__(request, *args, **kwargs)
class ComponentBranchForm(ComponentSelectForm):
branch = forms.ChoiceField(label=_("Repository branch"))
branch_data: Dict[int, List[str]] = {}
instance = None
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_branch_%s"
super().__init__(*args, **kwargs)
def clean_component(self):
component = self.cleaned_data["component"]
self.fields["branch"].choices = [(x, x) for x in self.branch_data[component.pk]]
return component
def clean(self):
form_fields = ("branch", "slug", "name")
data = self.cleaned_data
component = data.get("component")
if not component or any(field not in data for field in form_fields):
return
kwargs = model_to_dict(component, exclude=["id", "links"])
# We need a object, not integer here
kwargs["source_language"] = component.source_language
kwargs["project"] = component.project
for field in form_fields:
kwargs[field] = data[field]
self.instance = Component(**kwargs)
try:
self.instance.full_clean()
except ValidationError as error:
# Can not raise directly as this will contain errors
# from fields not present here
result = {NON_FIELD_ERRORS: []}
for key, value in error.message_dict.items():
if key in self.fields:
result[key] = value
else:
result[NON_FIELD_ERRORS].extend(value)
raise ValidationError(error.messages)
class ComponentProjectForm(ComponentNameForm):
project = forms.ModelChoiceField(
queryset=Project.objects.none(), label=_("Project")
)
source_language = forms.ModelChoiceField(
widget=SortedSelect,
label=_("Source language"),
help_text=_("Language used for source strings in all components"),
queryset=Language.objects.all(),
)
def __init__(self, request, *args, **kwargs):
if "instance" in kwargs:
kwargs.pop("instance")
super().__init__(request, *args, **kwargs)
# It might be overriden based on preset project
self.fields["source_language"].initial = Language.objects.default_language
self.request = request
self.helper = FormHelper()
self.helper.form_tag = False
self.instance = None
def clean(self):
if "project" not in self.cleaned_data:
return
project = self.cleaned_data["project"]
name = self.cleaned_data.get("name")
if name and project.component_set.filter(name__iexact=name).exists():
raise ValidationError(
{"name": _("Component with the same name already exists.")}
)
slug = self.cleaned_data.get("slug")
if slug and project.component_set.filter(slug__iexact=slug).exists():
raise ValidationError(
{"slug": _("Component with the same name already exists.")}
)
class ComponentScratchCreateForm(ComponentProjectForm):
file_format = forms.ChoiceField(
label=_("File format"),
initial="po-mono",
choices=FILE_FORMATS.get_choices(
cond=lambda x: bool(x.new_translation) or hasattr(x, "update_bilingual")
),
)
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_scratchcreate_%s"
super().__init__(*args, **kwargs)
class ComponentZipCreateForm(ComponentProjectForm):
zipfile = forms.FileField(
label=_("ZIP file containing translations"),
validators=[FileExtensionValidator(allowed_extensions=["zip"])],
widget=forms.FileInput(attrs={"accept": ".zip,application/zip"}),
)
field_order = ["zipfile", "project", "name", "slug"]
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_zipcreate_%s"
super().__init__(*args, **kwargs)
class ComponentDocCreateForm(ComponentProjectForm):
docfile = forms.FileField(
label=_("Document to translate"),
validators=[validate_file_extension],
)
field_order = ["docfile", "project", "name", "slug"]
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_doccreate_%s"
super().__init__(*args, **kwargs)
class ComponentInitCreateForm(CleanRepoMixin, ComponentProjectForm):
"""Component creation form.
This is mostly copy from Component model. Probably should be extracted to standalone
Repository model...
"""
project = forms.ModelChoiceField(
queryset=Project.objects.none(), label=_("Project")
)
vcs = forms.ChoiceField(
label=_("Version control system"),
help_text=_(
"Version control system to use to access your "
"repository with translations."
),
choices=VCS_REGISTRY.get_choices(exclude={"local"}),
initial=settings.DEFAULT_VCS,
)
repo = forms.CharField(
label=_("Source code repository"),
max_length=REPO_LENGTH,
help_text=_(
"URL of a repository, use weblate://project/component "
"for sharing with other component."
),
)
branch = forms.CharField(
label=_("Repository branch"),
max_length=REPO_LENGTH,
help_text=_("Repository branch to translate"),
required=False,
)
def clean_instance(self, data):
params = copy.copy(data)
if "discovery" in params:
params.pop("discovery")
instance = Component(**params)
instance.clean_fields(exclude=("filemask", "file_format", "license"))
instance.validate_unique()
instance.clean_repo()
self.instance = instance
# Create linked repos automatically
repo = instance.suggest_repo_link()
if repo:
data["repo"] = repo
data["branch"] = ""
self.clean_instance(data)
def clean(self):
self.clean_instance(self.cleaned_data)
class ComponentDiscoverForm(ComponentInitCreateForm):
discovery = forms.ChoiceField(
label=_("Choose translation files to import"),
choices=[("manual", _("Specify configuration manually"))],
required=True,
widget=forms.RadioSelect,
)
def render_choice(self, value):
context = copy.copy(value)
try:
format_cls = FILE_FORMATS[value["file_format"]]
context["file_format_name"] = format_cls.name
context["valid"] = True
except KeyError:
context["file_format_name"] = value["file_format"]
context["valid"] = False
context["origin"] = value.meta["origin"]
return render_to_string("trans/discover-choice.html", context)
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
for field, value in self.fields.items():
if field == "discovery":
continue
value.widget = forms.HiddenInput()
# Allow all VCS now (to handle zip file upload case)
self.fields["vcs"].choices = VCS_REGISTRY.get_choices()
self.discovered = self.perform_discovery(request, kwargs)
for i, value in enumerate(self.discovered):
self.fields["discovery"].choices.append((i, self.render_choice(value)))
def perform_discovery(self, request, kwargs):
if "data" in kwargs and "create_discovery" in request.session:
discovered = []
for i, data in enumerate(request.session["create_discovery"]):
item = DiscoveryResult(data)
item.meta = request.session["create_discovery_meta"][i]
discovered.append(item)
return discovered
try:
self.clean_instance(kwargs["initial"])
discovered = self.discover()
if not discovered:
discovered = self.discover(eager=True)
except ValidationError:
discovered = []
request.session["create_discovery"] = discovered
request.session["create_discovery_meta"] = [x.meta for x in discovered]
return discovered
def discover(self, eager: bool = False):
return discover(
self.instance.full_path,
source_language=self.instance.source_language.code,
eager=eager,
)
def clean(self):
super().clean()
discovery = self.cleaned_data.get("discovery")
if discovery and discovery != "manual":
self.cleaned_data.update(self.discovered[int(discovery)])
class ComponentRenameForm(SettingsBaseForm, ComponentDocsMixin):
"""Component rename form."""
class Meta:
model = Component
fields = ["slug"]
class ComponentMoveForm(SettingsBaseForm, ComponentDocsMixin):
"""Component rename form."""
class Meta:
model = Component
fields = ["project"]
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
self.fields["project"].queryset = request.user.managed_projects
class ProjectSettingsForm(SettingsBaseForm, ProjectDocsMixin, ProjectAntispamMixin):
"""Project settings form."""
class Meta:
model = Project
fields = (
"name",
"web",
"instructions",
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"access_control",
"translation_review",
"source_review",
)
widgets = {
"access_control": forms.RadioSelect,
"instructions": MarkdownTextarea,
"language_aliases": forms.TextInput,
}
def clean(self):
data = self.cleaned_data
if settings.OFFER_HOSTING:
data["contribute_shared_tm"] = data["use_shared_tm"]
if (
"access_control" not in data
or data["access_control"] is None
or data["access_control"] == ""
):
data["access_control"] = self.instance.access_control
access = data["access_control"]
self.changed_access = access != self.instance.access_control
if self.changed_access and not self.user_can_change_access:
raise ValidationError(
{
"access_control": _(
"You do not have permission to change project access control."
)
}
)
if self.changed_access and access in (
Project.ACCESS_PUBLIC,
Project.ACCESS_PROTECTED,
):
unlicensed = self.instance.component_set.filter(license="")
if unlicensed:
raise ValidationError(
{
"access_control": _(
"You must specify a license for these components "
"to make them publicly accessible: %s"
)
% ", ".join(unlicensed.values_list("name", flat=True))
}
)
def save(self, commit: bool = True):
super().save(commit=commit)
if self.changed_access:
Change.objects.create(
project=self.instance,
action=Change.ACTION_ACCESS_EDIT,
user=self.user,
details={"access_control": self.instance.access_control},
)
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
self.user = request.user
self.user_can_change_access = request.user.has_perm(
"billing:project.permissions", self.instance
)
self.changed_access = False
self.helper.form_tag = False
if not self.user_can_change_access:
disabled = {"disabled": True}
self.fields["access_control"].required = False
self.fields["access_control"].help_text = _(
"You do not have permission to change project access control."
)
else:
disabled = {}
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
"name",
"web",
"instructions",
css_id="basic",
),
Tab(
_("Access"),
InlineRadios(
"access_control",
template="%s/layout/radioselect_access.html",
**disabled,
),
css_id="access",
),
Tab(
_("Workflow"),
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"translation_review",
"source_review",
css_id="workflow",
),
Tab(
_("Components"),
ContextDiv(
template="snippets/project-component-settings.html",
context={"object": self.instance, "user": request.user},
),
css_id="components",
),
template="layout/pills.html",
)
)
if settings.OFFER_HOSTING:
self.fields["contribute_shared_tm"].widget = forms.HiddenInput()
self.fields["use_shared_tm"].help_text = _(
"Uses and contributes to the pool of shared translations "
"between projects."
)
self.fields["access_control"].choices = [
choice
for choice in self.fields["access_control"].choices
if choice[0] != Project.ACCESS_CUSTOM
]
class ProjectRenameForm(SettingsBaseForm, ProjectDocsMixin):
"""Project rename form."""
class Meta:
model = Project
fields = ["slug"]
class ProjectCreateForm(SettingsBaseForm, ProjectDocsMixin, ProjectAntispamMixin):
"""Project creation form."""
# This is fake field with is either hidden or configured
# in the view
billing = forms.ModelChoiceField(
label=_("Billing"),
queryset=User.objects.none(),
required=True,
empty_label=None,
)
class Meta:
model = Project
fields = ("name", "slug", "web", "instructions")
class ReplaceForm(forms.Form):
q = QueryField(
required=False, help_text=_("Optional additional filter on the strings")
)
search = forms.CharField(
label=_("Search string"),
min_length=1,
required=True,
strip=False,
help_text=_("Case sensitive string to search for and replace."),
)
replacement = forms.CharField(
label=_("Replacement string"), min_length=1, required=True, strip=False
)
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_replace_%s"
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
Field("search"),
Field("replacement"),
Div(template="snippets/replace-help.html"),
)
class ReplaceConfirmForm(forms.Form):
units = forms.ModelMultipleChoiceField(queryset=Unit.objects.none(), required=False)
confirm = forms.BooleanField(required=True, initial=True, widget=forms.HiddenInput)
def __init__(self, units, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["units"].queryset = units
class MatrixLanguageForm(forms.Form):
"""Form for requesting new language."""
lang = forms.MultipleChoiceField(
label=_("Languages"), choices=[], widget=forms.SelectMultiple
)
def __init__(self, component, *args, **kwargs):
super().__init__(*args, **kwargs)
languages = Language.objects.filter(translation__component=component).exclude(
pk=component.source_language_id
)
self.fields["lang"].choices = languages.as_choices()
class NewUnitBaseForm(forms.Form):
variant = forms.ModelChoiceField(
Unit.objects.none(),
widget=forms.HiddenInput,
required=False,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.translation = translation
self.fields["variant"].queryset = translation.unit_set.all()
self.user = user
def clean(self):
try:
data = self.as_kwargs()
except KeyError:
# Probably some fields validation has failed
return
self.translation.validate_new_unit_data(**data)
def get_glossary_flags(self):
return ""
def as_kwargs(self):
flags = Flags()
flags.merge(self.get_glossary_flags())
variant = self.cleaned_data.get("variant")
if variant:
flags.set_value("variant", variant.source)
return {
"context": self.cleaned_data.get("context", ""),
"source": self.cleaned_data["source"],
"target": self.cleaned_data.get("target"),
"extra_flags": flags.format(),
"explanation": self.cleaned_data.get("explanation", ""),
"auto_context": self.cleaned_data.get("auto_context", False),
}
class NewMonolingualUnitForm(NewUnitBaseForm):
context = forms.CharField(
label=_("Translation key"),
help_text=_(
"Key used to identify string in translation file. "
"File format specific rules might apply."
),
required=True,
)
source = PluralField(
label=_("Source language text"),
help_text=_(
"You can edit this later, as with any other string in "
"the source language."
),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = user.profile
self.fields["source"].initial = Unit(translation=translation, id_hash=0)
class NewBilingualSourceUnitForm(NewUnitBaseForm):
context = forms.CharField(
label=_("Context"),
help_text=_("Optional context to clarify the source strings."),
required=False,
)
auto_context = forms.BooleanField(
required=False,
initial=True,
label=_("Automatically adjust context when same string already exists."),
)
source = PluralField(
label=_("Source string"),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["context"].label = translation.component.context_label
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = user.profile
self.fields["source"].initial = Unit(
translation=translation.component.source_translation, id_hash=0
)
class NewBilingualUnitForm(NewBilingualSourceUnitForm):
target = PluralField(
label=_("Translated string"),
help_text=_(
"You can edit this later, as with any other string in the translation."
),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["target"].widget.attrs["tabindex"] = 101
self.fields["target"].widget.profile = user.profile
self.fields["target"].initial = Unit(translation=translation, id_hash=0)
class NewBilingualGlossarySourceUnitForm(GlossaryAddMixin, NewBilingualSourceUnitForm):
def __init__(self, translation, user, *args, **kwargs):
if kwargs["initial"] is None:
kwargs["initial"] = {}
kwargs["initial"]["terminology"] = True
super().__init__(translation, user, *args, **kwargs)
class NewBilingualGlossaryUnitForm(GlossaryAddMixin, NewBilingualUnitForm):
pass
def get_new_unit_form(translation, user, data=None, initial=None):
if translation.component.has_template():
return NewMonolingualUnitForm(translation, user, data=data, initial=initial)
if translation.component.is_glossary:
if translation.is_source:
return NewBilingualGlossarySourceUnitForm(
translation, user, data=data, initial=initial
)
return NewBilingualGlossaryUnitForm(
translation, user, data=data, initial=initial
)
if translation.is_source:
return NewBilingualSourceUnitForm(translation, user, data=data, initial=initial)
return NewBilingualUnitForm(translation, user, data=data, initial=initial)
class CachedQueryIterator(ModelChoiceIterator):
"""
Choice iterator for cached querysets.
It assumes the queryset is reused and avoids using iterator or count queries.
"""
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for obj in self.queryset:
yield self.choice(obj)
def __len__(self):
return len(self.queryset) + (1 if self.field.empty_label is not None else 0)
def __bool__(self):
return self.field.empty_label is not None or bool(self.queryset)
class CachedModelMultipleChoiceField(forms.ModelMultipleChoiceField):
iterator = CachedQueryIterator
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
class BulkEditForm(forms.Form):
q = QueryField(required=True)
state = forms.ChoiceField(
label=_("State to set"), choices=((-1, _("Do not change")),) + STATE_CHOICES
)
add_flags = FlagField(label=_("Translation flags to add"), required=False)
remove_flags = FlagField(label=_("Translation flags to remove"), required=False)
add_labels = CachedModelMultipleChoiceField(
queryset=Label.objects.none(),
label=_("Labels to add"),
widget=forms.CheckboxSelectMultiple(),
required=False,
)
remove_labels = CachedModelMultipleChoiceField(
queryset=Label.objects.none(),
label=_("Labels to remove"),
widget=forms.CheckboxSelectMultiple(),
required=False,
)
def __init__(self, user, obj, *args, **kwargs):
project = kwargs.pop("project")
kwargs["auto_id"] = "id_bulk_%s"
super().__init__(*args, **kwargs)
labels = project.label_set.all()
if labels:
self.fields["remove_labels"].queryset = labels
self.fields["add_labels"].queryset = labels
excluded = {STATE_EMPTY, STATE_READONLY}
if user is not None and not user.has_perm("unit.review", obj):
excluded.add(STATE_APPROVED)
# Filter offered states
self.fields["state"].choices = [
x for x in self.fields["state"].choices if x[0] not in excluded
]
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Div(template="snippets/bulk-help.html"),
SearchField("q"),
Field("state"),
Field("add_flags"),
Field("remove_flags"),
)
if labels:
self.helper.layout.append(InlineCheckboxes("add_labels"))
self.helper.layout.append(InlineCheckboxes("remove_labels"))
class ContributorAgreementForm(forms.Form):
confirm = forms.BooleanField(
label=_("I accept the contributor agreement"), required=True
)
next = forms.CharField(required=False, widget=forms.HiddenInput)
class BaseDeleteForm(forms.Form):
confirm = forms.CharField(required=True)
warning_template = ""
def __init__(self, obj, *args, **kwargs):
super().__init__(*args, **kwargs)
self.obj = obj
self.helper = FormHelper(self)
self.helper.layout = Layout(
ContextDiv(
template=self.warning_template,
css_class="form-group",
context={"object": obj},
),
Field("confirm"),
)
self.helper.form_tag = False
def clean(self):
if self.cleaned_data.get("confirm") != self.obj.full_slug:
raise ValidationError(
_("The slug does not match the one marked for deletion!")
)
class TranslationDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the full slug of the translation to confirm."),
required=True,
)
warning_template = "trans/delete-translation.html"
class ComponentDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the full slug of the component to confirm."),
required=True,
)
warning_template = "trans/delete-component.html"
class ProjectDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the slug of the project to confirm."),
required=True,
)
warning_template = "trans/delete-project.html"
class ProjectLanguageDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the slug of the project and language to confirm."),
required=True,
)
warning_template = "trans/delete-project-language.html"
class AnnouncementForm(forms.ModelForm):
"""Component base form."""
class Meta:
model = Announcement
fields = ["message", "category", "expiry", "notify"]
widgets = {
"expiry": WeblateDateInput(),
"message": MarkdownTextarea,
}
class ChangesForm(forms.Form):
project = forms.ChoiceField(label=_("Project"), choices=[("", "")], required=False)
lang = forms.ChoiceField(label=_("Language"), choices=[("", "")], required=False)
action = forms.MultipleChoiceField(
label=_("Action"),
required=False,
widget=SortedSelectMultiple,
choices=Change.ACTION_CHOICES,
)
user = UsernameField(label=_("Author username"), required=False, help_text=None)
start_date = WeblateDateField(
label=_("Starting date"), required=False, datepicker=False
)
end_date = WeblateDateField(
label=_("Ending date"), required=False, datepicker=False
)
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["lang"].choices += Language.objects.have_translation().as_choices()
self.fields["project"].choices += [
(project.slug, project.name) for project in request.user.allowed_projects
]
class LabelForm(forms.ModelForm):
class Meta:
model = Label
fields = ("name", "color")
widgets = {"color": ColorWidget()}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
class ProjectTokenDeleteForm(forms.Form):
token = forms.ModelChoiceField(
ProjectToken.objects.none(),
widget=forms.HiddenInput,
required=True,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["token"].queryset = project.projecttoken_set.all()
class ProjectTokenCreateForm(forms.ModelForm):
class Meta:
model = ProjectToken
fields = ["name", "expires", "project"]
widgets = {
"expires": WeblateDateInput(),
"project": forms.HiddenInput,
}
def __init__(self, project, *args, **kwargs):
self.project = project
kwargs["initial"] = {"project": project}
super().__init__(*args, **kwargs)
def clean_project(self):
if self.project != self.cleaned_data["project"]:
raise ValidationError("Invalid project!")
return self.cleaned_data["project"]
def clean_expires(self):
expires = self.cleaned_data["expires"]
expires = expires.replace(hour=23, minute=59, second=59, microsecond=999999)
if expires < timezone.now():
raise forms.ValidationError(gettext("Expiry cannot be in the past!"))
return expires
class ProjectGroupDeleteForm(forms.Form):
group = forms.ModelChoiceField(
Group.objects.none(),
widget=forms.HiddenInput,
required=True,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["group"].queryset = project.defined_groups.all()
class ProjectUserGroupForm(UserManageForm):
groups = forms.ModelMultipleChoiceField(
Group.objects.none(),
widget=forms.CheckboxSelectMultiple,
label=_("Teams"),
required=False,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["user"].widget = forms.HiddenInput()
self.fields["groups"].queryset = project.defined_groups.all()
| #
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import copy
import json
import re
from datetime import date, datetime, timedelta
from typing import Dict, List
from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios, Tab, TabHolder
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, Layout
from django import forms
from django.conf import settings
from django.core.exceptions import NON_FIELD_ERRORS, PermissionDenied, ValidationError
from django.core.validators import FileExtensionValidator
from django.db.models import Q
from django.forms import model_to_dict
from django.forms.models import ModelChoiceIterator
from django.forms.utils import from_current_timezone
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from translation_finder import DiscoveryResult, discover
from weblate.auth.models import Group, User
from weblate.checks.flags import Flags
from weblate.checks.models import CHECKS
from weblate.checks.utils import highlight_string
from weblate.formats.models import EXPORTERS, FILE_FORMATS
from weblate.glossary.forms import GlossaryAddMixin
from weblate.lang.data import BASIC_LANGUAGES
from weblate.lang.models import Language
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.trans.defines import COMPONENT_NAME_LENGTH, REPO_LENGTH
from weblate.trans.filter import FILTERS, get_filter_choice
from weblate.trans.models import (
Announcement,
Change,
Component,
Label,
Project,
ProjectToken,
Unit,
)
from weblate.trans.specialchars import RTL_CHARS_DATA, get_special_chars
from weblate.trans.util import check_upload_method_permissions, is_repo_link
from weblate.trans.validators import validate_check_flags
from weblate.utils.antispam import is_spam
from weblate.utils.errors import report_error
from weblate.utils.forms import (
ColorWidget,
ContextDiv,
EmailField,
SearchField,
SortedSelect,
SortedSelectMultiple,
UsernameField,
)
from weblate.utils.hash import checksum_to_hash, hash_to_checksum
from weblate.utils.search import parse_query
from weblate.utils.state import (
STATE_APPROVED,
STATE_CHOICES,
STATE_EMPTY,
STATE_FUZZY,
STATE_READONLY,
STATE_TRANSLATED,
)
from weblate.utils.validators import validate_file_extension
from weblate.vcs.models import VCS_REGISTRY
BUTTON_TEMPLATE = """
<button class="btn btn-default {0}" title="{1}" {2}>{3}</button>
"""
RADIO_TEMPLATE = """
<label class="btn btn-default {0}" title="{1}">
<input type="radio" name="{2}" value="{3}" {4}/>
{5}
</label>
"""
GROUP_TEMPLATE = """
<div class="btn-group btn-group-xs" {0}>{1}</div>
"""
TOOLBAR_TEMPLATE = """
<div class="btn-toolbar pull-right flip editor-toolbar">{0}</div>
"""
class MarkdownTextarea(forms.Textarea):
def __init__(self, **kwargs):
kwargs["attrs"] = {
"dir": "auto",
"class": "markdown-editor highlight-editor",
"data-mode": "markdown",
}
super().__init__(**kwargs)
class WeblateDateInput(forms.DateInput):
def __init__(self, datepicker=True, **kwargs):
attrs = {"type": "date"}
if datepicker:
attrs["data-provide"] = "datepicker"
attrs["data-date-format"] = "yyyy-mm-dd"
super().__init__(attrs=attrs, format="%Y-%m-%d", **kwargs)
class WeblateDateField(forms.DateField):
def __init__(self, datepicker=True, **kwargs):
if "widget" not in kwargs:
kwargs["widget"] = WeblateDateInput(datepicker=datepicker)
super().__init__(**kwargs)
def to_python(self, value):
"""Produce timezone aware datetime with 00:00:00 as time."""
value = super().to_python(value)
if isinstance(value, date):
return from_current_timezone(
datetime(value.year, value.month, value.day, 0, 0, 0)
)
return value
class ChecksumField(forms.CharField):
"""Field for handling checksum IDs for translation."""
def __init__(self, *args, **kwargs):
kwargs["widget"] = forms.HiddenInput
super().__init__(*args, **kwargs)
def clean(self, value):
super().clean(value)
if not value:
return None
try:
return checksum_to_hash(value)
except ValueError:
raise ValidationError(_("Invalid checksum specified!"))
class UserField(forms.CharField):
def clean(self, value):
if not value:
return None
try:
return User.objects.get(Q(username=value) | Q(email=value))
except User.DoesNotExist:
raise ValidationError(_("Could not find any such user."))
except User.MultipleObjectsReturned:
raise ValidationError(_("More possible users were found."))
class QueryField(forms.CharField):
def __init__(self, **kwargs):
if "label" not in kwargs:
kwargs["label"] = _("Query")
if "required" not in kwargs:
kwargs["required"] = False
super().__init__(**kwargs)
def clean(self, value):
if not value:
if self.required:
raise ValidationError(_("Missing query string."))
return ""
try:
parse_query(value)
return value
except Exception as error:
report_error()
raise ValidationError(_("Could not parse query string: {}").format(error))
class FlagField(forms.CharField):
default_validators = [validate_check_flags]
class PluralTextarea(forms.Textarea):
"""Text-area extension which possibly handles plurals."""
def __init__(self, *args, **kwargs):
self.profile = None
super().__init__(*args, **kwargs)
def get_rtl_toolbar(self, fieldname):
groups = []
# Special chars
chars = []
for name, char, value in RTL_CHARS_DATA:
chars.append(
BUTTON_TEMPLATE.format(
"specialchar",
name,
'data-value="{}"'.format(
value.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
)
groups.append(GROUP_TEMPLATE.format("", "\n".join(chars)))
return TOOLBAR_TEMPLATE.format("\n".join(groups))
def get_rtl_toggle(self, language, fieldname):
if language.direction != "rtl":
return ""
# RTL/LTR switch
rtl_name = f"rtl-{fieldname}"
rtl_switch = [
RADIO_TEMPLATE.format(
"direction-toggle active",
gettext("Toggle text direction"),
rtl_name,
"rtl",
'checked="checked"',
"RTL",
),
RADIO_TEMPLATE.format(
"direction-toggle",
gettext("Toggle text direction"),
rtl_name,
"ltr",
"",
"LTR",
),
]
groups = [GROUP_TEMPLATE.format('data-toggle="buttons"', "\n".join(rtl_switch))]
return mark_safe(TOOLBAR_TEMPLATE.format("\n".join(groups)))
def get_toolbar(self, language, fieldname, unit, idx, source):
"""Return toolbar HTML code."""
profile = self.profile
groups = []
# Special chars
chars = [
BUTTON_TEMPLATE.format(
"specialchar",
name,
'data-value="{}"'.format(
value.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
for name, char, value in get_special_chars(
language, profile.special_chars, unit.source
)
]
groups.append(GROUP_TEMPLATE.format("", "\n".join(chars)))
result = TOOLBAR_TEMPLATE.format("\n".join(groups))
if language.direction == "rtl":
result = self.get_rtl_toolbar(fieldname) + result
return mark_safe(result)
def render(self, name, value, attrs=None, renderer=None, **kwargs):
"""Render all textareas with correct plural labels."""
unit = value
values = unit.get_target_plurals()
lang = unit.translation.language
plural = unit.translation.plural
tabindex = self.attrs["tabindex"]
placeables = [hl[2] for hl in highlight_string(unit.source_string, unit)]
# Need to add extra class
attrs["class"] = "translation-editor form-control highlight-editor"
attrs["tabindex"] = tabindex
attrs["lang"] = lang.code
attrs["dir"] = lang.direction
attrs["rows"] = 3
attrs["data-max"] = unit.get_max_length()
attrs["data-mode"] = unit.edit_mode
attrs["data-placeables"] = "|".join(re.escape(pl) for pl in placeables if pl)
if unit.readonly:
attrs["readonly"] = 1
# Okay we have more strings
ret = []
plurals = unit.get_source_plurals()
base_id = f"id_{unit.checksum}"
for idx, val in enumerate(values):
# Generate ID
fieldname = f"{name}_{idx}"
fieldid = f"{base_id}_{idx}"
attrs["id"] = fieldid
attrs["tabindex"] = tabindex + idx
if idx and len(plurals) > 1:
source = plurals[1]
else:
source = plurals[0]
# Render textare
textarea = super().render(fieldname, val, attrs, renderer, **kwargs)
# Label for plural
label = escape(unit.translation.language)
if len(values) != 1:
label = f"{label}, {plural.get_plural_label(idx)}"
ret.append(
render_to_string(
"snippets/editor.html",
{
"toolbar": self.get_toolbar(lang, fieldid, unit, idx, source),
"fieldid": fieldid,
"label": mark_safe(label),
"textarea": textarea,
"max_length": attrs["data-max"],
"length": len(val),
"source_length": len(source),
"rtl_toggle": self.get_rtl_toggle(lang, fieldid),
},
)
)
# Show plural formula for more strings
if len(values) > 1:
ret.append(
render_to_string(
"snippets/plural-formula.html",
{"plural": plural, "user": self.profile.user},
)
)
# Join output
return mark_safe("".join(ret))
def value_from_datadict(self, data, files, name):
"""Return processed plurals as a list."""
ret = []
for idx in range(0, 10):
fieldname = f"{name}_{idx:d}"
if fieldname not in data:
break
ret.append(data.get(fieldname, ""))
return [r.replace("\r", "") for r in ret]
class PluralField(forms.CharField):
"""Renderer for the plural field.
The only difference from CharField is that it does not force value to be string.
"""
def __init__(self, max_length=None, min_length=None, **kwargs):
kwargs["label"] = ""
super().__init__(widget=PluralTextarea, **kwargs)
def to_python(self, value):
"""Return list or string as returned by PluralTextarea."""
return value
def clean(self, value):
value = super().clean(value)
if not value or (self.required and not any(value)):
raise ValidationError(_("Missing translated string!"))
return value
class FilterField(forms.ChoiceField):
def __init__(self, *args, **kwargs):
kwargs["label"] = _("Search filter")
if "required" not in kwargs:
kwargs["required"] = False
kwargs["choices"] = get_filter_choice()
kwargs["error_messages"] = {
"invalid_choice": _("Please choose a valid filter type.")
}
super().__init__(*args, **kwargs)
def to_python(self, value):
if value == "untranslated":
return "todo"
return super().to_python(value)
class ChecksumForm(forms.Form):
"""Form for handling checksum IDs for translation."""
checksum = ChecksumField(required=True)
def __init__(self, unit_set, *args, **kwargs):
self.unit_set = unit_set
super().__init__(*args, **kwargs)
def clean_checksum(self):
"""Validate whether checksum is valid and fetches unit for it."""
if "checksum" not in self.cleaned_data:
return
unit_set = self.unit_set
try:
self.cleaned_data["unit"] = unit_set.filter(
id_hash=self.cleaned_data["checksum"]
)[0]
except (Unit.DoesNotExist, IndexError):
raise ValidationError(
_("The string you wanted to translate is no longer available.")
)
class UnitForm(forms.Form):
def __init__(self, unit: Unit, *args, **kwargs):
self.unit = unit
super().__init__(*args, **kwargs)
class FuzzyField(forms.BooleanField):
help_as_icon = True
def __init__(self, *args, **kwargs):
kwargs["label"] = _("Needs editing")
kwargs["help_text"] = _(
'Strings are usually marked as "Needs editing" after the source '
"string is updated, or when marked as such manually."
)
super().__init__(*args, **kwargs)
self.widget.attrs["class"] = "fuzzy_checkbox"
class TranslationForm(UnitForm):
"""Form used for translation of single string."""
contentsum = ChecksumField(required=True)
translationsum = ChecksumField(required=True)
target = PluralField(required=False)
fuzzy = FuzzyField(required=False)
review = forms.ChoiceField(
label=_("Review state"),
choices=[
(STATE_FUZZY, _("Needs editing")),
(STATE_TRANSLATED, _("Waiting for review")),
(STATE_APPROVED, _("Approved")),
],
required=False,
widget=forms.RadioSelect,
)
explanation = forms.CharField(
widget=MarkdownTextarea,
label=_("Explanation"),
help_text=_(
"Additional explanation to clarify meaning or usage of the string."
),
max_length=1000,
required=False,
)
def __init__(self, user, unit: Unit, *args, **kwargs):
if unit is not None:
kwargs["initial"] = {
"checksum": unit.checksum,
"contentsum": hash_to_checksum(unit.content_hash),
"translationsum": hash_to_checksum(unit.get_target_hash()),
"target": unit,
"fuzzy": unit.fuzzy,
"review": unit.state,
"explanation": unit.explanation,
}
kwargs["auto_id"] = f"id_{unit.checksum}_%s"
tabindex = kwargs.pop("tabindex", 100)
super().__init__(unit, *args, **kwargs)
if unit.readonly:
for field in ["target", "fuzzy", "review"]:
self.fields[field].widget.attrs["readonly"] = 1
self.fields["review"].choices = [
(STATE_READONLY, _("Read only")),
]
self.user = user
self.fields["target"].widget.attrs["tabindex"] = tabindex
self.fields["target"].widget.profile = user.profile
self.fields["review"].widget.attrs["class"] = "review_radio"
# Avoid failing validation on untranslated string
if args:
self.fields["review"].choices.append((STATE_EMPTY, ""))
self.helper = FormHelper()
self.helper.form_method = "post"
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Field("target"),
Field("fuzzy"),
Field("contentsum"),
Field("translationsum"),
InlineRadios("review"),
Field("explanation"),
)
if unit and user.has_perm("unit.review", unit.translation):
self.fields["fuzzy"].widget = forms.HiddenInput()
else:
self.fields["review"].widget = forms.HiddenInput()
if not unit.translation.component.is_glossary:
self.fields["explanation"].widget = forms.HiddenInput()
def clean(self):
super().clean()
# Check required fields
required = {"target", "contentsum", "translationsum"}
if not required.issubset(self.cleaned_data):
return
unit = self.unit
if self.cleaned_data["contentsum"] != unit.content_hash:
raise ValidationError(
_(
"Source string has been changed meanwhile. "
"Please check your changes."
)
)
if self.cleaned_data["translationsum"] != unit.get_target_hash():
raise ValidationError(
_(
"Translation of the string has been changed meanwhile. "
"Please check your changes."
)
)
max_length = unit.get_max_length()
for text in self.cleaned_data["target"]:
if len(text) > max_length:
raise ValidationError(_("Translation text too long!"))
if self.user.has_perm(
"unit.review", unit.translation
) and self.cleaned_data.get("review"):
self.cleaned_data["state"] = int(self.cleaned_data["review"])
elif self.cleaned_data["fuzzy"]:
self.cleaned_data["state"] = STATE_FUZZY
else:
self.cleaned_data["state"] = STATE_TRANSLATED
class ZenTranslationForm(TranslationForm):
checksum = ChecksumField(required=True)
def __init__(self, user, unit, *args, **kwargs):
super().__init__(user, unit, *args, **kwargs)
self.helper.form_action = reverse(
"save_zen", kwargs=unit.translation.get_reverse_url_kwargs()
)
self.helper.form_tag = True
self.helper.disable_csrf = False
self.helper.layout.append(Field("checksum"))
class DownloadForm(forms.Form):
q = QueryField()
format = forms.ChoiceField(
label=_("File format"),
choices=[(x.name, x.verbose) for x in EXPORTERS.values()],
initial="po",
required=True,
widget=forms.RadioSelect,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
InlineRadios("format"),
)
class SimpleUploadForm(forms.Form):
"""Base form for uploading a file."""
file = forms.FileField(label=_("File"), validators=[validate_file_extension])
method = forms.ChoiceField(
label=_("File upload mode"),
choices=(
("translate", _("Add as translation")),
("approve", _("Add as approved translation")),
("suggest", _("Add as suggestion")),
("fuzzy", _("Add as translation needing edit")),
("replace", _("Replace existing translation file")),
("source", _("Update source strings")),
("add", _("Add new strings")),
),
widget=forms.RadioSelect,
required=True,
)
fuzzy = forms.ChoiceField(
label=_("Processing of strings needing edit"),
choices=(
("", _("Do not import")),
("process", _("Import as string needing edit")),
("approve", _("Import as translated")),
),
required=False,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
@staticmethod
def get_field_doc(field):
return ("user/files", f"upload-{field.name}")
def remove_translation_choice(self, value):
"""Remove add as translation choice."""
choices = self.fields["method"].choices
self.fields["method"].choices = [
choice for choice in choices if choice[0] != value
]
class UploadForm(SimpleUploadForm):
"""Upload form with the option to overwrite current messages."""
conflicts = forms.ChoiceField(
label=_("Conflict handling"),
help_text=_(
"Whether to overwrite existing translations if the string is "
"already translated."
),
choices=(
("", _("Update only untranslated strings")),
("replace-translated", _("Update translated strings")),
("replace-approved", _("Update translated and approved strings")),
),
required=False,
initial="replace-translated",
)
class ExtraUploadForm(UploadForm):
"""Advanced upload form for users who can override authorship."""
author_name = forms.CharField(label=_("Author name"))
author_email = EmailField(label=_("Author e-mail"))
def get_upload_form(user, translation, *args, **kwargs):
"""Return correct upload form based on user permissions."""
if user.has_perm("upload.authorship", translation):
form = ExtraUploadForm
kwargs["initial"] = {"author_name": user.full_name, "author_email": user.email}
elif user.has_perm("upload.overwrite", translation):
form = UploadForm
else:
form = SimpleUploadForm
result = form(*args, **kwargs)
for method in [x[0] for x in result.fields["method"].choices]:
if not check_upload_method_permissions(user, translation, method):
result.remove_translation_choice(method)
# Remove approved choice for non review projects
if not user.has_perm("unit.review", translation) and not form == SimpleUploadForm:
result.fields["conflicts"].choices = [
choice
for choice in result.fields["conflicts"].choices
if choice[0] != "approved"
]
return result
class SearchForm(forms.Form):
"""Text searching form."""
# pylint: disable=invalid-name
q = QueryField()
sort_by = forms.CharField(required=False, widget=forms.HiddenInput)
checksum = ChecksumField(required=False)
offset = forms.IntegerField(min_value=-1, required=False, widget=forms.HiddenInput)
offset_kwargs = {}
def __init__(self, user, language=None, show_builder=True, **kwargs):
"""Generate choices for other component in same project."""
self.user = user
self.language = language
super().__init__(**kwargs)
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Div(
Field("offset", **self.offset_kwargs),
SearchField("q"),
Field("sort_by", template="snippets/sort-field.html"),
css_class="btn-toolbar",
role="toolbar",
),
ContextDiv(
template="snippets/query-builder.html",
context={
"user": self.user,
"month_ago": timezone.now() - timedelta(days=31),
"show_builder": show_builder,
"language": self.language,
},
),
Field("checksum"),
)
def get_name(self):
"""Return verbose name for a search."""
return FILTERS.get_search_name(self.cleaned_data.get("q", ""))
def get_search_query(self):
return self.cleaned_data["q"]
def clean_offset(self):
if self.cleaned_data.get("offset") is None:
self.cleaned_data["offset"] = 1
return self.cleaned_data["offset"]
def items(self):
items = []
# Skip checksum and offset as these change
ignored = {"offset", "checksum"}
for param in sorted(self.cleaned_data):
value = self.cleaned_data[param]
# We don't care about empty values or ignored
if value is None or param in ignored:
continue
if isinstance(value, bool):
# Only store true values
if value:
items.append((param, "1"))
elif isinstance(value, int):
# Avoid storing 0 values
if value > 0:
items.append((param, str(value)))
elif isinstance(value, datetime):
# Convert date to string
items.append((param, value.date().isoformat()))
elif isinstance(value, list):
for val in value:
items.append((param, val))
elif isinstance(value, User):
items.append((param, value.username))
else:
# It should be string here
if value:
items.append((param, value))
return items
def urlencode(self):
return urlencode(self.items())
def reset_offset(self):
"""Reset offset to avoid using form as default for new search."""
data = copy.copy(self.data)
data["offset"] = "1"
data["checksum"] = ""
self.data = data
return self
class PositionSearchForm(SearchForm):
offset = forms.IntegerField(min_value=-1, required=False)
offset_kwargs = {"template": "snippets/position-field.html"}
class MergeForm(UnitForm):
"""Simple form for merging translation of two units."""
merge = forms.IntegerField()
def clean(self):
super().clean()
if "merge" not in self.cleaned_data:
return None
try:
unit = self.unit
translation = unit.translation
project = translation.component.project
self.cleaned_data["merge_unit"] = merge_unit = Unit.objects.get(
pk=self.cleaned_data["merge"],
translation__component__project=project,
translation__language=translation.language,
)
# Compare in Python to ensure case sensitiveness on MySQL
if not translation.is_source and unit.source != merge_unit.source:
raise ValidationError(_("Could not find merged string."))
except Unit.DoesNotExist:
raise ValidationError(_("Could not find merged string."))
return self.cleaned_data
class RevertForm(UnitForm):
"""Form for reverting edits."""
revert = forms.IntegerField()
def clean(self):
super().clean()
if "revert" not in self.cleaned_data:
return None
try:
self.cleaned_data["revert_change"] = Change.objects.get(
pk=self.cleaned_data["revert"], unit=self.unit
)
except Change.DoesNotExist:
raise ValidationError(_("Could not find reverted change."))
return self.cleaned_data
class AutoForm(forms.Form):
"""Automatic translation form."""
mode = forms.ChoiceField(
label=_("Automatic translation mode"),
choices=[
("suggest", _("Add as suggestion")),
("translate", _("Add as translation")),
("fuzzy", _("Add as needing edit")),
],
initial="suggest",
)
filter_type = FilterField(
required=True,
initial="todo",
help_text=_(
"Please note that translating all strings will "
"discard all existing translations."
),
)
auto_source = forms.ChoiceField(
label=_("Automatic translation source"),
choices=[
("others", _("Other translation components")),
("mt", _("Machine translation")),
],
initial="others",
)
component = forms.ChoiceField(
label=_("Components"),
required=False,
help_text=_(
"Turn on contribution to shared translation memory for the project to "
"get access to additional components."
),
initial="",
)
engines = forms.MultipleChoiceField(
label=_("Machine translation engines"), choices=[], required=False
)
threshold = forms.IntegerField(
label=_("Score threshold"), initial=80, min_value=1, max_value=100
)
def __init__(self, obj, *args, **kwargs):
"""Generate choices for other component in same project."""
super().__init__(*args, **kwargs)
self.obj = obj
# Add components from other projects with enabled shared TM
self.components = obj.project.component_set.filter(
source_language=obj.source_language
) | Component.objects.filter(
source_language_id=obj.source_language_id,
project__contribute_shared_tm=True,
).exclude(
project=obj.project
)
# Fetching is faster than doing count on possibly thousands of components
if len(self.components.values_list("id")[:30]) == 30:
# Do not show choices when too many
self.fields["component"] = forms.CharField(
required=False,
label=_("Components"),
help_text=_(
"Enter component to use as source, "
"keep blank to use all components in current project."
),
)
else:
choices = [
(s.id, str(s))
for s in self.components.order_project().prefetch_related("project")
]
self.fields["component"].choices = [
("", _("All components in current project"))
] + choices
self.fields["engines"].choices = [
(key, mt.name) for key, mt in MACHINE_TRANSLATION_SERVICES.items()
]
if "weblate" in MACHINE_TRANSLATION_SERVICES.keys():
self.fields["engines"].initial = "weblate"
use_types = {"all", "nottranslated", "todo", "fuzzy", "check:inconsistent"}
self.fields["filter_type"].choices = [
x for x in self.fields["filter_type"].choices if x[0] in use_types
]
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("mode"),
Field("filter_type"),
InlineRadios("auto_source", id="select_auto_source"),
Div("component", css_id="auto_source_others"),
Div("engines", "threshold", css_id="auto_source_mt"),
)
def clean_component(self):
component = self.cleaned_data["component"]
if not component:
return None
if component.isdigit():
try:
result = self.components.get(pk=component)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
slashes = component.count("/")
if slashes == 0:
try:
result = self.components.get(
slug=component, project=self.obj.project
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
elif slashes == 1:
project_slug, component_slug = component.split("/")
try:
result = self.components.get(
slug=component_slug, project__slug=project_slug
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
raise ValidationError(_("Please provide valid component slug!"))
return result.pk
class CommentForm(forms.Form):
"""Simple commenting form."""
scope = forms.ChoiceField(
label=_("Scope"),
help_text=_(
"Is your comment specific to this "
"translation or generic for all of them?"
),
choices=(
(
"report",
_("Report issue with the source string"),
),
(
"global",
_("Source string comment, suggestions for changes to this string"),
),
(
"translation",
_("Translation comment, discussions with other translators"),
),
),
)
comment = forms.CharField(
widget=MarkdownTextarea,
label=_("New comment"),
help_text=_("You can use Markdown and mention users by @username."),
max_length=1000,
)
def __init__(self, project, *args, **kwargs):
super().__init__(*args, **kwargs)
# Remove bug report in case source review is not enabled
if not project.source_review:
self.fields["scope"].choices = self.fields["scope"].choices[1:]
class EngageForm(forms.Form):
"""Form to choose language for engagement widgets."""
lang = forms.ChoiceField(required=False, choices=[("", _("All languages"))])
component = forms.ChoiceField(required=False, choices=[("", _("All components"))])
def __init__(self, user, project, *args, **kwargs):
"""Dynamically generate choices for used languages in project."""
super().__init__(*args, **kwargs)
self.fields["lang"].choices += project.languages.as_choices()
self.fields["component"].choices += (
project.component_set.filter_access(user)
.order()
.values_list("slug", "name")
)
class NewLanguageOwnerForm(forms.Form):
"""Form for requesting new language."""
lang = forms.MultipleChoiceField(
label=_("Languages"), choices=[], widget=forms.SelectMultiple
)
def get_lang_objects(self):
return Language.objects.exclude(
Q(translation__component=self.component) | Q(component=self.component)
)
def __init__(self, component, *args, **kwargs):
super().__init__(*args, **kwargs)
self.component = component
languages = self.get_lang_objects()
self.fields["lang"].choices = languages.as_choices()
class NewLanguageForm(NewLanguageOwnerForm):
"""Form for requesting new language."""
lang = forms.ChoiceField(label=_("Language"), choices=[], widget=forms.Select)
def get_lang_objects(self):
codes = BASIC_LANGUAGES
if settings.BASIC_LANGUAGES is not None:
codes = settings.BASIC_LANGUAGES
return super().get_lang_objects().filter(code__in=codes)
def __init__(self, component, *args, **kwargs):
super().__init__(component, *args, **kwargs)
self.fields["lang"].choices = [("", _("Please choose"))] + self.fields[
"lang"
].choices
def clean_lang(self):
# Compatibility with NewLanguageOwnerForm
return [self.cleaned_data["lang"]]
def get_new_language_form(request, component):
"""Return new language form for user."""
if not request.user.has_perm("translation.add", component):
raise PermissionDenied()
if request.user.has_perm("translation.add_more", component):
return NewLanguageOwnerForm
return NewLanguageForm
class ContextForm(forms.ModelForm):
class Meta:
model = Unit
fields = ("explanation", "labels", "extra_flags")
widgets = {
"labels": forms.CheckboxSelectMultiple(),
"explanation": MarkdownTextarea,
}
doc_links = {
"explanation": ("admin/translating", "additional-explanation"),
"labels": ("devel/translations", "labels"),
"extra_flags": ("admin/translating", "additional-flags"),
}
def get_field_doc(self, field):
return self.doc_links[field.name]
def __init__(self, data=None, instance=None, user=None, **kwargs):
kwargs["initial"] = {
"labels": Label.objects.filter(
Q(unit=instance) | Q(unit__source_unit=instance)
)
}
super().__init__(data=data, instance=instance, **kwargs)
project = instance.translation.component.project
self.fields["labels"].queryset = project.label_set.all()
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Field("explanation"),
Field("labels"),
ContextDiv(
template="snippets/labels_description.html",
context={"project": project, "user": user},
),
Field("extra_flags"),
)
def save(self, commit=True):
if commit:
self.instance.save(same_content=True)
self._save_m2m()
return self.instance
return super().save(commit)
class UserManageForm(forms.Form):
user = UserField(
label=_("User to add"),
help_text=_(
"Please type in an existing Weblate account name or e-mail address."
),
)
class UserBlockForm(forms.Form):
user = UserField(
label=_("User to block"),
help_text=_(
"Please type in an existing Weblate account name or e-mail address."
),
)
expiry = forms.ChoiceField(
label=_("Block duration"),
choices=(
("", _("Block user until I unblock them")),
("1", _("Block user for one day")),
("7", _("Block user for one week")),
("30", _("Block user for one month")),
),
required=False,
)
class ReportsForm(forms.Form):
style = forms.ChoiceField(
label=_("Report format"),
help_text=_("Choose file format for the report"),
choices=(
("rst", _("reStructuredText")),
("json", _("JSON")),
("html", _("HTML")),
),
)
period = forms.ChoiceField(
label=_("Report period"),
choices=(
("30days", _("Last 30 days")),
("this-month", _("This month")),
("month", _("Last month")),
("this-year", _("This year")),
("year", _("Last year")),
("", _("As specified")),
),
required=False,
)
start_date = WeblateDateField(
label=_("Starting date"), required=False, datepicker=False
)
end_date = WeblateDateField(
label=_("Ending date"), required=False, datepicker=False
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Field("style"),
Field("period"),
Div(
"start_date",
"end_date",
css_class="input-group input-daterange",
data_provide="datepicker",
data_date_format="yyyy-mm-dd",
),
)
def clean(self):
super().clean()
# Invalid value, skip rest of the validation
if "period" not in self.cleaned_data:
return
# Handle predefined periods
if self.cleaned_data["period"] == "30days":
end = timezone.now()
start = end - timedelta(days=30)
elif self.cleaned_data["period"] == "month":
end = timezone.now().replace(day=1) - timedelta(days=1)
start = end.replace(day=1)
elif self.cleaned_data["period"] == "this-month":
end = timezone.now().replace(day=1) + timedelta(days=31)
end = end.replace(day=1) - timedelta(days=1)
start = end.replace(day=1)
elif self.cleaned_data["period"] == "year":
year = timezone.now().year - 1
end = timezone.make_aware(datetime(year, 12, 31))
start = timezone.make_aware(datetime(year, 1, 1))
elif self.cleaned_data["period"] == "this-year":
year = timezone.now().year
end = timezone.make_aware(datetime(year, 12, 31))
start = timezone.make_aware(datetime(year, 1, 1))
else:
# Validate custom period
if not self.cleaned_data.get("start_date"):
raise ValidationError({"start_date": _("Missing date!")})
if not self.cleaned_data.get("end_date"):
raise ValidationError({"end_date": _("Missing date!")})
start = self.cleaned_data["start_date"]
end = self.cleaned_data["end_date"]
# Sanitize timestamps
self.cleaned_data["start_date"] = start.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.cleaned_data["end_date"] = end.replace(
hour=23, minute=59, second=59, microsecond=999999
)
# Final validation
if self.cleaned_data["start_date"] > self.cleaned_data["end_date"]:
msg = _("Starting date has to be before ending date!")
raise ValidationError({"start_date": msg, "end_date": msg})
class CleanRepoMixin:
def clean_repo(self):
repo = self.cleaned_data.get("repo")
if not repo or not is_repo_link(repo) or "/" not in repo[10:]:
return repo
project, component = repo[10:].split("/", 1)
try:
obj = Component.objects.get(
slug__iexact=component, project__slug__iexact=project
)
except Component.DoesNotExist:
return repo
if not self.request.user.has_perm("component.edit", obj):
raise ValidationError(
_("You do not have permission to access this component!")
)
return repo
class SettingsBaseForm(CleanRepoMixin, forms.ModelForm):
"""Component base form."""
class Meta:
model = Component
fields = []
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request = request
self.helper = FormHelper()
self.helper.form_tag = False
class SelectChecksWidget(SortedSelectMultiple):
def __init__(self, attrs=None, choices=()):
choices = CHECKS.get_choices()
super().__init__(attrs=attrs, choices=choices)
def value_from_datadict(self, data, files, name):
value = super().value_from_datadict(data, files, name)
if isinstance(value, str):
return json.loads(value)
return value
def format_value(self, value):
value = super().format_value(value)
if isinstance(value, str):
return value
return json.dumps(value)
class SelectChecksField(forms.CharField):
def to_python(self, value):
return value
class ComponentDocsMixin:
@staticmethod
def get_field_doc(field):
return ("admin/projects", f"component-{field.name}")
class ProjectDocsMixin:
@staticmethod
def get_field_doc(field):
return ("admin/projects", f"project-{field.name}")
class SpamCheckMixin:
def spam_check(self, value):
if is_spam(value, self.request):
raise ValidationError(_("This field has been identified as spam!"))
class ComponentAntispamMixin(SpamCheckMixin):
def clean_agreement(self):
value = self.cleaned_data["agreement"]
self.spam_check(value)
return value
class ProjectAntispamMixin(SpamCheckMixin):
def clean_web(self):
value = self.cleaned_data["web"]
self.spam_check(value)
return value
def clean_instructions(self):
value = self.cleaned_data["instructions"]
self.spam_check(value)
return value
class ComponentSettingsForm(
SettingsBaseForm, ComponentDocsMixin, ComponentAntispamMixin
):
"""Component settings form."""
class Meta:
model = Component
fields = (
"name",
"report_source_bugs",
"license",
"agreement",
"allow_translation_propagation",
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
"priority",
"check_flags",
"enforced_checks",
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"push_on_commit",
"commit_pending_age",
"merge_style",
"file_format",
"edit_template",
"new_lang",
"language_code_style",
"source_language",
"new_base",
"filemask",
"template",
"intermediate",
"language_regex",
"variant_regex",
"restricted",
"auto_lock_error",
"links",
"manage_units",
"is_glossary",
"glossary_color",
)
widgets = {
"enforced_checks": SelectChecksWidget,
"source_language": SortedSelect,
}
field_classes = {"enforced_checks": SelectChecksField}
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
if self.hide_restricted:
self.fields["restricted"].widget = forms.HiddenInput()
self.fields["links"].queryset = request.user.managed_projects.exclude(
pk=self.instance.pk
)
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
Fieldset(_("Name"), "name"),
Fieldset(_("License"), "license", "agreement"),
Fieldset(_("Upstream links"), "report_source_bugs"),
Fieldset(
_("Listing and access"),
"priority",
"restricted",
"links",
),
Fieldset(
_("Glossary"),
"is_glossary",
"glossary_color",
),
css_id="basic",
),
Tab(
_("Translation"),
Fieldset(
_("Suggestions"),
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
),
Fieldset(
_("Translation settings"),
"allow_translation_propagation",
"manage_units",
"check_flags",
"variant_regex",
"enforced_checks",
),
css_id="translation",
),
Tab(
_("Version control"),
Fieldset(
_("Locations"),
Div(template="trans/repo_help.html"),
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
),
Fieldset(
_("Version control settings"),
"push_on_commit",
"commit_pending_age",
"merge_style",
"auto_lock_error",
),
css_id="vcs",
),
Tab(
_("Commit messages"),
Fieldset(
_("Commit messages"),
ContextDiv(
template="trans/messages_help.html",
context={"user": request.user},
),
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
),
css_id="messages",
),
Tab(
_("Files"),
Fieldset(
_("Translation files"),
"file_format",
"filemask",
"language_regex",
"source_language",
),
Fieldset(
_("Monolingual translations"),
"template",
"edit_template",
"intermediate",
),
Fieldset(
_("Adding new languages"),
"new_base",
"new_lang",
"language_code_style",
),
css_id="files",
),
template="layout/pills.html",
)
)
vcses = (
"git",
"gerrit",
"github",
"gitlab",
"pagure",
"local",
"git-force-push",
)
if self.instance.vcs not in vcses:
vcses = (self.instance.vcs,)
self.fields["vcs"].choices = [
c for c in self.fields["vcs"].choices if c[0] in vcses
]
@property
def hide_restricted(self):
user = self.request.user
if user.is_superuser:
return False
if settings.OFFER_HOSTING:
return True
return not any(
"component.edit" in permissions
for permissions, _langs in user.component_permissions[self.instance.pk]
)
def clean(self):
data = self.cleaned_data
if self.hide_restricted:
data["restricted"] = self.instance.restricted
class ComponentCreateForm(SettingsBaseForm, ComponentDocsMixin, ComponentAntispamMixin):
"""Component creation form."""
class Meta:
model = Component
fields = [
"project",
"name",
"slug",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"file_format",
"filemask",
"template",
"edit_template",
"intermediate",
"new_base",
"license",
"new_lang",
"language_code_style",
"language_regex",
"source_language",
"is_glossary",
]
widgets = {"source_language": SortedSelect}
class ComponentNameForm(forms.Form, ComponentDocsMixin, ComponentAntispamMixin):
name = forms.CharField(
label=_("Component name"),
max_length=COMPONENT_NAME_LENGTH,
help_text=_("Display name"),
)
slug = forms.SlugField(
label=_("URL slug"),
max_length=COMPONENT_NAME_LENGTH,
help_text=_("Name used in URLs and filenames."),
)
is_glossary = forms.BooleanField(
label=_("Use as a glossary"),
required=False,
)
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.request = request
class ComponentSelectForm(ComponentNameForm):
component = forms.ModelChoiceField(
queryset=Component.objects.none(),
label=_("Component"),
help_text=_("Select existing component to copy configuration from."),
)
def __init__(self, request, *args, **kwargs):
if "instance" in kwargs:
kwargs.pop("instance")
if "auto_id" not in kwargs:
kwargs["auto_id"] = "id_existing_%s"
super().__init__(request, *args, **kwargs)
class ComponentBranchForm(ComponentSelectForm):
branch = forms.ChoiceField(label=_("Repository branch"))
branch_data: Dict[int, List[str]] = {}
instance = None
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_branch_%s"
super().__init__(*args, **kwargs)
def clean_component(self):
component = self.cleaned_data["component"]
self.fields["branch"].choices = [(x, x) for x in self.branch_data[component.pk]]
return component
def clean(self):
form_fields = ("branch", "slug", "name")
data = self.cleaned_data
component = data.get("component")
if not component or any(field not in data for field in form_fields):
return
kwargs = model_to_dict(component, exclude=["id", "links"])
# We need a object, not integer here
kwargs["source_language"] = component.source_language
kwargs["project"] = component.project
for field in form_fields:
kwargs[field] = data[field]
self.instance = Component(**kwargs)
try:
self.instance.full_clean()
except ValidationError as error:
# Can not raise directly as this will contain errors
# from fields not present here
result = {NON_FIELD_ERRORS: []}
for key, value in error.message_dict.items():
if key in self.fields:
result[key] = value
else:
result[NON_FIELD_ERRORS].extend(value)
raise ValidationError(error.messages)
class ComponentProjectForm(ComponentNameForm):
project = forms.ModelChoiceField(
queryset=Project.objects.none(), label=_("Project")
)
source_language = forms.ModelChoiceField(
widget=SortedSelect,
label=_("Source language"),
help_text=_("Language used for source strings in all components"),
queryset=Language.objects.all(),
)
def __init__(self, request, *args, **kwargs):
if "instance" in kwargs:
kwargs.pop("instance")
super().__init__(request, *args, **kwargs)
# It might be overriden based on preset project
self.fields["source_language"].initial = Language.objects.default_language
self.request = request
self.helper = FormHelper()
self.helper.form_tag = False
self.instance = None
def clean(self):
if "project" not in self.cleaned_data:
return
project = self.cleaned_data["project"]
name = self.cleaned_data.get("name")
if name and project.component_set.filter(name__iexact=name).exists():
raise ValidationError(
{"name": _("Component with the same name already exists.")}
)
slug = self.cleaned_data.get("slug")
if slug and project.component_set.filter(slug__iexact=slug).exists():
raise ValidationError(
{"slug": _("Component with the same name already exists.")}
)
class ComponentScratchCreateForm(ComponentProjectForm):
file_format = forms.ChoiceField(
label=_("File format"),
initial="po-mono",
choices=FILE_FORMATS.get_choices(
cond=lambda x: bool(x.new_translation) or hasattr(x, "update_bilingual")
),
)
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_scratchcreate_%s"
super().__init__(*args, **kwargs)
class ComponentZipCreateForm(ComponentProjectForm):
zipfile = forms.FileField(
label=_("ZIP file containing translations"),
validators=[FileExtensionValidator(allowed_extensions=["zip"])],
widget=forms.FileInput(attrs={"accept": ".zip,application/zip"}),
)
field_order = ["zipfile", "project", "name", "slug"]
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_zipcreate_%s"
super().__init__(*args, **kwargs)
class ComponentDocCreateForm(ComponentProjectForm):
docfile = forms.FileField(
label=_("Document to translate"),
validators=[validate_file_extension],
)
field_order = ["docfile", "project", "name", "slug"]
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_doccreate_%s"
super().__init__(*args, **kwargs)
class ComponentInitCreateForm(CleanRepoMixin, ComponentProjectForm):
"""Component creation form.
This is mostly copy from Component model. Probably should be extracted to standalone
Repository model...
"""
project = forms.ModelChoiceField(
queryset=Project.objects.none(), label=_("Project")
)
vcs = forms.ChoiceField(
label=_("Version control system"),
help_text=_(
"Version control system to use to access your "
"repository with translations."
),
choices=VCS_REGISTRY.get_choices(exclude={"local"}),
initial=settings.DEFAULT_VCS,
)
repo = forms.CharField(
label=_("Source code repository"),
max_length=REPO_LENGTH,
help_text=_(
"URL of a repository, use weblate://project/component "
"for sharing with other component."
),
)
branch = forms.CharField(
label=_("Repository branch"),
max_length=REPO_LENGTH,
help_text=_("Repository branch to translate"),
required=False,
)
def clean_instance(self, data):
params = copy.copy(data)
if "discovery" in params:
params.pop("discovery")
instance = Component(**params)
instance.clean_fields(exclude=("filemask", "file_format", "license"))
instance.validate_unique()
instance.clean_repo()
self.instance = instance
# Create linked repos automatically
repo = instance.suggest_repo_link()
if repo:
data["repo"] = repo
data["branch"] = ""
self.clean_instance(data)
def clean(self):
self.clean_instance(self.cleaned_data)
class ComponentDiscoverForm(ComponentInitCreateForm):
discovery = forms.ChoiceField(
label=_("Choose translation files to import"),
choices=[("manual", _("Specify configuration manually"))],
required=True,
widget=forms.RadioSelect,
)
def render_choice(self, value):
context = copy.copy(value)
try:
format_cls = FILE_FORMATS[value["file_format"]]
context["file_format_name"] = format_cls.name
context["valid"] = True
except KeyError:
context["file_format_name"] = value["file_format"]
context["valid"] = False
context["origin"] = value.meta["origin"]
return render_to_string("trans/discover-choice.html", context)
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
for field, value in self.fields.items():
if field == "discovery":
continue
value.widget = forms.HiddenInput()
# Allow all VCS now (to handle zip file upload case)
self.fields["vcs"].choices = VCS_REGISTRY.get_choices()
self.discovered = self.perform_discovery(request, kwargs)
for i, value in enumerate(self.discovered):
self.fields["discovery"].choices.append((i, self.render_choice(value)))
def perform_discovery(self, request, kwargs):
if "data" in kwargs and "create_discovery" in request.session:
discovered = []
for i, data in enumerate(request.session["create_discovery"]):
item = DiscoveryResult(data)
item.meta = request.session["create_discovery_meta"][i]
discovered.append(item)
return discovered
try:
self.clean_instance(kwargs["initial"])
discovered = self.discover()
if not discovered:
discovered = self.discover(eager=True)
except ValidationError:
discovered = []
request.session["create_discovery"] = discovered
request.session["create_discovery_meta"] = [x.meta for x in discovered]
return discovered
def discover(self, eager: bool = False):
return discover(
self.instance.full_path,
source_language=self.instance.source_language.code,
eager=eager,
)
def clean(self):
super().clean()
discovery = self.cleaned_data.get("discovery")
if discovery and discovery != "manual":
self.cleaned_data.update(self.discovered[int(discovery)])
class ComponentRenameForm(SettingsBaseForm, ComponentDocsMixin):
"""Component rename form."""
class Meta:
model = Component
fields = ["slug"]
class ComponentMoveForm(SettingsBaseForm, ComponentDocsMixin):
"""Component rename form."""
class Meta:
model = Component
fields = ["project"]
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
self.fields["project"].queryset = request.user.managed_projects
class ProjectSettingsForm(SettingsBaseForm, ProjectDocsMixin, ProjectAntispamMixin):
"""Project settings form."""
class Meta:
model = Project
fields = (
"name",
"web",
"instructions",
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"access_control",
"translation_review",
"source_review",
)
widgets = {
"access_control": forms.RadioSelect,
"instructions": MarkdownTextarea,
"language_aliases": forms.TextInput,
}
def clean(self):
data = self.cleaned_data
if settings.OFFER_HOSTING:
data["contribute_shared_tm"] = data["use_shared_tm"]
if (
"access_control" not in data
or data["access_control"] is None
or data["access_control"] == ""
):
data["access_control"] = self.instance.access_control
access = data["access_control"]
self.changed_access = access != self.instance.access_control
if self.changed_access and not self.user_can_change_access:
raise ValidationError(
{
"access_control": _(
"You do not have permission to change project access control."
)
}
)
if self.changed_access and access in (
Project.ACCESS_PUBLIC,
Project.ACCESS_PROTECTED,
):
unlicensed = self.instance.component_set.filter(license="")
if unlicensed:
raise ValidationError(
{
"access_control": _(
"You must specify a license for these components "
"to make them publicly accessible: %s"
)
% ", ".join(unlicensed.values_list("name", flat=True))
}
)
def save(self, commit: bool = True):
super().save(commit=commit)
if self.changed_access:
Change.objects.create(
project=self.instance,
action=Change.ACTION_ACCESS_EDIT,
user=self.user,
details={"access_control": self.instance.access_control},
)
def __init__(self, request, *args, **kwargs):
super().__init__(request, *args, **kwargs)
self.user = request.user
self.user_can_change_access = request.user.has_perm(
"billing:project.permissions", self.instance
)
self.changed_access = False
self.helper.form_tag = False
if not self.user_can_change_access:
disabled = {"disabled": True}
self.fields["access_control"].required = False
self.fields["access_control"].help_text = _(
"You do not have permission to change project access control."
)
else:
disabled = {}
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
"name",
"web",
"instructions",
css_id="basic",
),
Tab(
_("Access"),
InlineRadios(
"access_control",
template="%s/layout/radioselect_access.html",
**disabled,
),
css_id="access",
),
Tab(
_("Workflow"),
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"translation_review",
"source_review",
css_id="workflow",
),
Tab(
_("Components"),
ContextDiv(
template="snippets/project-component-settings.html",
context={"object": self.instance, "user": request.user},
),
css_id="components",
),
template="layout/pills.html",
)
)
if settings.OFFER_HOSTING:
self.fields["contribute_shared_tm"].widget = forms.HiddenInput()
self.fields["use_shared_tm"].help_text = _(
"Uses and contributes to the pool of shared translations "
"between projects."
)
self.fields["access_control"].choices = [
choice
for choice in self.fields["access_control"].choices
if choice[0] != Project.ACCESS_CUSTOM
]
class ProjectRenameForm(SettingsBaseForm, ProjectDocsMixin):
"""Project rename form."""
class Meta:
model = Project
fields = ["slug"]
class ProjectCreateForm(SettingsBaseForm, ProjectDocsMixin, ProjectAntispamMixin):
"""Project creation form."""
# This is fake field with is either hidden or configured
# in the view
billing = forms.ModelChoiceField(
label=_("Billing"),
queryset=User.objects.none(),
required=True,
empty_label=None,
)
class Meta:
model = Project
fields = ("name", "slug", "web", "instructions")
class ReplaceForm(forms.Form):
q = QueryField(
required=False, help_text=_("Optional additional filter on the strings")
)
search = forms.CharField(
label=_("Search string"),
min_length=1,
required=True,
strip=False,
help_text=_("Case sensitive string to search for and replace."),
)
replacement = forms.CharField(
label=_("Replacement string"), min_length=1, required=True, strip=False
)
def __init__(self, *args, **kwargs):
kwargs["auto_id"] = "id_replace_%s"
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
Field("search"),
Field("replacement"),
Div(template="snippets/replace-help.html"),
)
class ReplaceConfirmForm(forms.Form):
units = forms.ModelMultipleChoiceField(queryset=Unit.objects.none(), required=False)
confirm = forms.BooleanField(required=True, initial=True, widget=forms.HiddenInput)
def __init__(self, units, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["units"].queryset = units
class MatrixLanguageForm(forms.Form):
"""Form for requesting new language."""
lang = forms.MultipleChoiceField(
label=_("Languages"), choices=[], widget=forms.SelectMultiple
)
def __init__(self, component, *args, **kwargs):
super().__init__(*args, **kwargs)
languages = Language.objects.filter(translation__component=component).exclude(
pk=component.source_language_id
)
self.fields["lang"].choices = languages.as_choices()
class NewUnitBaseForm(forms.Form):
variant = forms.ModelChoiceField(
Unit.objects.none(),
widget=forms.HiddenInput,
required=False,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.translation = translation
self.fields["variant"].queryset = translation.unit_set.all()
self.user = user
def clean(self):
try:
data = self.as_kwargs()
except KeyError:
# Probably some fields validation has failed
return
self.translation.validate_new_unit_data(**data)
def get_glossary_flags(self):
return ""
def as_kwargs(self):
flags = Flags()
flags.merge(self.get_glossary_flags())
variant = self.cleaned_data.get("variant")
if variant:
flags.set_value("variant", variant.source)
return {
"context": self.cleaned_data.get("context", ""),
"source": self.cleaned_data["source"],
"target": self.cleaned_data.get("target"),
"extra_flags": flags.format(),
"explanation": self.cleaned_data.get("explanation", ""),
"auto_context": self.cleaned_data.get("auto_context", False),
}
class NewMonolingualUnitForm(NewUnitBaseForm):
context = forms.CharField(
label=_("Translation key"),
help_text=_(
"Key used to identify string in translation file. "
"File format specific rules might apply."
),
required=True,
)
source = PluralField(
label=_("Source language text"),
help_text=_(
"You can edit this later, as with any other string in "
"the source language."
),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = user.profile
self.fields["source"].initial = Unit(translation=translation, id_hash=0)
class NewBilingualSourceUnitForm(NewUnitBaseForm):
context = forms.CharField(
label=_("Context"),
help_text=_("Optional context to clarify the source strings."),
required=False,
)
auto_context = forms.BooleanField(
required=False,
initial=True,
label=_("Automatically adjust context when same string already exists."),
)
source = PluralField(
label=_("Source string"),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["context"].label = translation.component.context_label
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = user.profile
self.fields["source"].initial = Unit(
translation=translation.component.source_translation, id_hash=0
)
class NewBilingualUnitForm(NewBilingualSourceUnitForm):
target = PluralField(
label=_("Translated string"),
help_text=_(
"You can edit this later, as with any other string in the translation."
),
required=True,
)
def __init__(self, translation, user, *args, **kwargs):
super().__init__(translation, user, *args, **kwargs)
self.fields["target"].widget.attrs["tabindex"] = 101
self.fields["target"].widget.profile = user.profile
self.fields["target"].initial = Unit(translation=translation, id_hash=0)
class NewBilingualGlossarySourceUnitForm(GlossaryAddMixin, NewBilingualSourceUnitForm):
def __init__(self, translation, user, *args, **kwargs):
if kwargs["initial"] is None:
kwargs["initial"] = {}
kwargs["initial"]["terminology"] = True
super().__init__(translation, user, *args, **kwargs)
class NewBilingualGlossaryUnitForm(GlossaryAddMixin, NewBilingualUnitForm):
pass
def get_new_unit_form(translation, user, data=None, initial=None):
if translation.component.has_template():
return NewMonolingualUnitForm(translation, user, data=data, initial=initial)
if translation.component.is_glossary:
if translation.is_source:
return NewBilingualGlossarySourceUnitForm(
translation, user, data=data, initial=initial
)
return NewBilingualGlossaryUnitForm(
translation, user, data=data, initial=initial
)
if translation.is_source:
return NewBilingualSourceUnitForm(translation, user, data=data, initial=initial)
return NewBilingualUnitForm(translation, user, data=data, initial=initial)
class CachedQueryIterator(ModelChoiceIterator):
"""
Choice iterator for cached querysets.
It assumes the queryset is reused and avoids using iterator or count queries.
"""
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for obj in self.queryset:
yield self.choice(obj)
def __len__(self):
return len(self.queryset) + (1 if self.field.empty_label is not None else 0)
def __bool__(self):
return self.field.empty_label is not None or bool(self.queryset)
class CachedModelMultipleChoiceField(forms.ModelMultipleChoiceField):
iterator = CachedQueryIterator
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
class BulkEditForm(forms.Form):
q = QueryField(required=True)
state = forms.ChoiceField(
label=_("State to set"), choices=((-1, _("Do not change")),) + STATE_CHOICES
)
add_flags = FlagField(label=_("Translation flags to add"), required=False)
remove_flags = FlagField(label=_("Translation flags to remove"), required=False)
add_labels = CachedModelMultipleChoiceField(
queryset=Label.objects.none(),
label=_("Labels to add"),
widget=forms.CheckboxSelectMultiple(),
required=False,
)
remove_labels = CachedModelMultipleChoiceField(
queryset=Label.objects.none(),
label=_("Labels to remove"),
widget=forms.CheckboxSelectMultiple(),
required=False,
)
def __init__(self, user, obj, *args, **kwargs):
project = kwargs.pop("project")
kwargs["auto_id"] = "id_bulk_%s"
super().__init__(*args, **kwargs)
labels = project.label_set.all()
if labels:
self.fields["remove_labels"].queryset = labels
self.fields["add_labels"].queryset = labels
excluded = {STATE_EMPTY, STATE_READONLY}
if user is not None and not user.has_perm("unit.review", obj):
excluded.add(STATE_APPROVED)
# Filter offered states
self.fields["state"].choices = [
x for x in self.fields["state"].choices if x[0] not in excluded
]
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Div(template="snippets/bulk-help.html"),
SearchField("q"),
Field("state"),
Field("add_flags"),
Field("remove_flags"),
)
if labels:
self.helper.layout.append(InlineCheckboxes("add_labels"))
self.helper.layout.append(InlineCheckboxes("remove_labels"))
class ContributorAgreementForm(forms.Form):
confirm = forms.BooleanField(
label=_("I accept the contributor agreement"), required=True
)
next = forms.CharField(required=False, widget=forms.HiddenInput)
class BaseDeleteForm(forms.Form):
confirm = forms.CharField(required=True)
warning_template = ""
def __init__(self, obj, *args, **kwargs):
super().__init__(*args, **kwargs)
self.obj = obj
self.helper = FormHelper(self)
self.helper.layout = Layout(
ContextDiv(
template=self.warning_template,
css_class="form-group",
context={"object": obj},
),
Field("confirm"),
)
self.helper.form_tag = False
def clean(self):
if self.cleaned_data.get("confirm") != self.obj.full_slug:
raise ValidationError(
_("The slug does not match the one marked for deletion!")
)
class TranslationDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the full slug of the translation to confirm."),
required=True,
)
warning_template = "trans/delete-translation.html"
class ComponentDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the full slug of the component to confirm."),
required=True,
)
warning_template = "trans/delete-component.html"
class ProjectDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the slug of the project to confirm."),
required=True,
)
warning_template = "trans/delete-project.html"
class ProjectLanguageDeleteForm(BaseDeleteForm):
confirm = forms.CharField(
label=_("Removal confirmation"),
help_text=_("Please type in the slug of the project and language to confirm."),
required=True,
)
warning_template = "trans/delete-project-language.html"
class AnnouncementForm(forms.ModelForm):
"""Component base form."""
class Meta:
model = Announcement
fields = ["message", "category", "expiry", "notify"]
widgets = {
"expiry": WeblateDateInput(),
"message": MarkdownTextarea,
}
class ChangesForm(forms.Form):
project = forms.ChoiceField(label=_("Project"), choices=[("", "")], required=False)
lang = forms.ChoiceField(label=_("Language"), choices=[("", "")], required=False)
action = forms.MultipleChoiceField(
label=_("Action"),
required=False,
widget=SortedSelectMultiple,
choices=Change.ACTION_CHOICES,
)
user = UsernameField(label=_("Author username"), required=False, help_text=None)
start_date = WeblateDateField(
label=_("Starting date"), required=False, datepicker=False
)
end_date = WeblateDateField(
label=_("Ending date"), required=False, datepicker=False
)
def __init__(self, request, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["lang"].choices += Language.objects.have_translation().as_choices()
self.fields["project"].choices += [
(project.slug, project.name) for project in request.user.allowed_projects
]
class LabelForm(forms.ModelForm):
class Meta:
model = Label
fields = ("name", "color")
widgets = {"color": ColorWidget()}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
class ProjectTokenDeleteForm(forms.Form):
token = forms.ModelChoiceField(
ProjectToken.objects.none(),
widget=forms.HiddenInput,
required=True,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["token"].queryset = project.projecttoken_set.all()
class ProjectTokenCreateForm(forms.ModelForm):
class Meta:
model = ProjectToken
fields = ["name", "expires", "project"]
widgets = {
"expires": WeblateDateInput(),
"project": forms.HiddenInput,
}
def __init__(self, project, *args, **kwargs):
self.project = project
kwargs["initial"] = {"project": project}
super().__init__(*args, **kwargs)
def clean_project(self):
if self.project != self.cleaned_data["project"]:
raise ValidationError("Invalid project!")
return self.cleaned_data["project"]
def clean_expires(self):
expires = self.cleaned_data["expires"]
expires = expires.replace(hour=23, minute=59, second=59, microsecond=999999)
if expires < timezone.now():
raise forms.ValidationError(gettext("Expiry cannot be in the past!"))
return expires
class ProjectGroupDeleteForm(forms.Form):
group = forms.ModelChoiceField(
Group.objects.none(),
widget=forms.HiddenInput,
required=True,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["group"].queryset = project.defined_groups.all()
class ProjectUserGroupForm(UserManageForm):
groups = forms.ModelMultipleChoiceField(
Group.objects.none(),
widget=forms.CheckboxSelectMultiple,
label=_("Teams"),
required=False,
)
def __init__(self, project, *args, **kwargs):
self.project = project
super().__init__(*args, **kwargs)
self.fields["user"].widget = forms.HiddenInput()
self.fields["groups"].queryset = project.defined_groups.all()
| xss | {
"code": [
" label = str(unit.translation.language)"
],
"line_no": [
321
]
} | {
"code": [
"from django.utils.html import escape",
" label = escape(unit.translation.language)"
],
"line_no": [
40,
322
]
} |
import copy
import json
import re
from datetime import date, datetime, timedelta
from typing import Dict, List
from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios, Tab, TabHolder
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, Layout
from django import .forms
from django.conf import settings
from django.core.exceptions import NON_FIELD_ERRORS, PermissionDenied, ValidationError
from django.core.validators import FileExtensionValidator
from django.db.models import Q
from django.forms import .model_to_dict
from django.forms.models import ModelChoiceIterator
from django.forms.utils import from_current_timezone
from django.template.loader import .render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.http import .urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from translation_finder import DiscoveryResult, FUNC_33
from weblate.auth.models import Group, User
from weblate.checks.flags import Flags
from weblate.checks.models import CHECKS
from weblate.checks.utils import highlight_string
from weblate.formats.models import EXPORTERS, FILE_FORMATS
from weblate.glossary.forms import GlossaryAddMixin
from weblate.lang.data import BASIC_LANGUAGES
from weblate.lang.models import Language
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.trans.defines import COMPONENT_NAME_LENGTH, REPO_LENGTH
from weblate.trans.filter import FILTERS, get_filter_choice
from weblate.trans.models import (
Announcement,
Change,
Component,
Label,
Project,
ProjectToken,
Unit,
)
from weblate.trans.specialchars import RTL_CHARS_DATA, get_special_chars
from weblate.trans.util import check_upload_method_permissions, is_repo_link
from weblate.trans.validators import validate_check_flags
from weblate.utils.antispam import is_spam
from weblate.utils.errors import .report_error
from weblate.utils.forms import (
ColorWidget,
ContextDiv,
EmailField,
SearchField,
SortedSelect,
SortedSelectMultiple,
UsernameField,
)
from weblate.utils.hash import .checksum_to_hash, hash_to_checksum
from weblate.utils.search import parse_query
from weblate.utils.state import (
STATE_APPROVED,
STATE_CHOICES,
STATE_EMPTY,
STATE_FUZZY,
STATE_READONLY,
STATE_TRANSLATED,
)
from weblate.utils.validators import validate_file_extension
from weblate.vcs.models import VCS_REGISTRY
VAR_0 = """
<button class="btn btn-default {0}" title="{1}" {2}>{3}</button>
"""
VAR_1 = """
<VAR_134 class="btn btn-default {0}" title="{1}">
<input type="radio" VAR_20="{2}" VAR_13="{3}" {4}/>
{5}
</VAR_134>
"""
VAR_2 = """
<div class="btn-VAR_100 btn-VAR_100-xs" {0}>{1}</div>
"""
VAR_3 = """
<div class="btn-toolbar pull-right flip editor-toolbar">{0}</div>
"""
class CLASS_0(forms.Textarea):
def __init__(self, **VAR_7):
VAR_7["attrs"] = {
"dir": "auto",
"class": "markdown-editor highlight-editor",
"data-mode": "markdown",
}
super().__init__(**VAR_7)
class CLASS_1(forms.DateInput):
def __init__(self, VAR_12=True, **VAR_7):
VAR_21 = {"type": "date"}
if VAR_12:
VAR_21["data-provide"] = "datepicker"
VAR_21["data-date-format"] = "yyyy-mm-dd"
super().__init__(VAR_21=attrs, VAR_36="%Y-%m-%d", **VAR_7)
class CLASS_2(forms.DateField):
def __init__(self, VAR_12=True, **VAR_7):
if "widget" not in VAR_7:
VAR_7["widget"] = CLASS_1(VAR_12=datepicker)
super().__init__(**VAR_7)
def FUNC_3(self, VAR_13):
VAR_13 = super().to_python(VAR_13)
if isinstance(VAR_13, date):
return from_current_timezone(
datetime(VAR_13.year, VAR_13.month, VAR_13.day, 0, 0, 0)
)
return VAR_13
class CLASS_3(forms.CharField):
def __init__(self, *VAR_6, **VAR_7):
VAR_7["widget"] = forms.HiddenInput
super().__init__(*VAR_6, **VAR_7)
def FUNC_4(self, VAR_13):
super().clean(VAR_13)
if not VAR_13:
return None
try:
return checksum_to_hash(VAR_13)
except ValueError:
raise ValidationError(_("Invalid VAR_26 specified!"))
class CLASS_4(forms.CharField):
def FUNC_4(self, VAR_13):
if not VAR_13:
return None
try:
return User.objects.get(Q(username=VAR_13) | Q(email=VAR_13))
except User.DoesNotExist:
raise ValidationError(_("Could not find any such VAR_4."))
except User.MultipleObjectsReturned:
raise ValidationError(_("More possible users were found."))
class CLASS_5(forms.CharField):
def __init__(self, **VAR_7):
if "label" not in VAR_7:
VAR_7["label"] = _("Query")
if "required" not in VAR_7:
VAR_7["required"] = False
super().__init__(**VAR_7)
def FUNC_4(self, VAR_13):
if not VAR_13:
if self.required:
raise ValidationError(_("Missing query string."))
return ""
try:
parse_query(VAR_13)
return VAR_13
except Exception as error:
report_error()
raise ValidationError(_("Could not parse query string: {}").format(error))
class CLASS_6(forms.CharField):
VAR_14 = [validate_check_flags]
class CLASS_7(forms.Textarea):
def __init__(self, *VAR_6, **VAR_7):
self.profile = None
super().__init__(*VAR_6, **VAR_7)
def FUNC_5(self, VAR_15):
VAR_101 = []
chars = []
for VAR_20, char, VAR_13 in RTL_CHARS_DATA:
VAR_102.append(
VAR_0.format(
"specialchar",
VAR_20,
'data-VAR_13="{}"'.format(
VAR_13.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
)
VAR_101.append(VAR_2.format("", "\n".join(VAR_102)))
return VAR_3.format("\n".join(VAR_101))
def FUNC_6(self, VAR_16, VAR_15):
if VAR_16.direction != "rtl":
return ""
VAR_103 = f"rtl-{VAR_15}"
VAR_104 = [
VAR_1.format(
"direction-toggle active",
gettext("Toggle text direction"),
VAR_103,
"rtl",
'checked="checked"',
"RTL",
),
VAR_1.format(
"direction-toggle",
gettext("Toggle text direction"),
VAR_103,
"ltr",
"",
"LTR",
),
]
VAR_101 = [VAR_2.format('data-toggle="buttons"', "\n".join(VAR_104))]
return mark_safe(VAR_3.format("\n".join(VAR_101)))
def FUNC_7(self, VAR_16, VAR_15, VAR_17, VAR_18, VAR_19):
VAR_105 = self.profile
VAR_101 = []
chars = [
VAR_0.format(
"specialchar",
VAR_20,
'data-VAR_13="{}"'.format(
VAR_13.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
for VAR_20, char, VAR_13 in get_special_chars(
VAR_16, VAR_105.special_chars, VAR_17.source
)
]
VAR_101.append(VAR_2.format("", "\n".join(VAR_102)))
VAR_43 = VAR_3.format("\n".join(VAR_101))
if VAR_16.direction == "rtl":
VAR_43 = self.get_rtl_toolbar(VAR_15) + VAR_43
return mark_safe(VAR_43)
def FUNC_8(self, VAR_20, VAR_13, VAR_21=None, VAR_22=None, **VAR_7):
VAR_17 = VAR_13
VAR_106 = VAR_17.get_target_plurals()
VAR_59 = VAR_17.translation.language
VAR_107 = VAR_17.translation.plural
VAR_108 = self.attrs["tabindex"]
VAR_109 = [hl[2] for hl in highlight_string(VAR_17.source_string, VAR_17)]
VAR_21["class"] = "translation-editor VAR_114-control highlight-editor"
VAR_21["tabindex"] = VAR_108
VAR_21["lang"] = VAR_59.code
VAR_21["dir"] = VAR_59.direction
VAR_21["rows"] = 3
VAR_21["data-max"] = VAR_17.get_max_length()
VAR_21["data-mode"] = VAR_17.edit_mode
VAR_21["data-placeables"] = "|".join(re.escape(pl) for pl in VAR_109 if pl)
if VAR_17.readonly:
VAR_21["readonly"] = 1
VAR_110 = []
VAR_111 = VAR_17.get_source_plurals()
VAR_112 = f"id_{VAR_17.checksum}"
for VAR_18, val in enumerate(VAR_106):
VAR_15 = f"{VAR_20}_{VAR_18}"
VAR_132 = f"{VAR_112}_{VAR_18}"
VAR_21["id"] = VAR_132
VAR_21["tabindex"] = VAR_108 + VAR_18
if VAR_18 and len(VAR_111) > 1:
VAR_19 = VAR_111[1]
else:
VAR_19 = VAR_111[0]
VAR_133 = super().render(VAR_15, val, VAR_21, VAR_22, **VAR_7)
VAR_134 = str(VAR_17.translation.language)
if len(VAR_106) != 1:
VAR_134 = f"{VAR_134}, {VAR_107.get_plural_label(VAR_18)}"
VAR_110.append(
render_to_string(
"snippets/editor.html",
{
"toolbar": self.get_toolbar(VAR_59, VAR_132, VAR_17, VAR_18, VAR_19),
"fieldid": VAR_132,
"label": mark_safe(VAR_134),
"textarea": VAR_133,
"max_length": VAR_21["data-max"],
"length": len(val),
"source_length": len(VAR_19),
"rtl_toggle": self.get_rtl_toggle(VAR_59, VAR_132),
},
)
)
if len(VAR_106) > 1:
VAR_110.append(
render_to_string(
"snippets/VAR_107-formula.html",
{"plural": VAR_107, "user": self.profile.user},
)
)
return mark_safe("".join(VAR_110))
def FUNC_9(self, VAR_10, VAR_23, VAR_20):
VAR_110 = []
for VAR_18 in range(0, 10):
VAR_15 = f"{VAR_20}_{VAR_18:d}"
if VAR_15 not in VAR_10:
break
VAR_110.append(VAR_10.get(VAR_15, ""))
return [r.replace("\r", "") for r in VAR_110]
class CLASS_8(forms.CharField):
def __init__(self, VAR_24=None, VAR_25=None, **VAR_7):
VAR_7["label"] = ""
super().__init__(widget=CLASS_7, **VAR_7)
def FUNC_3(self, VAR_13):
return VAR_13
def FUNC_4(self, VAR_13):
VAR_13 = super().clean(VAR_13)
if not VAR_13 or (self.required and not any(VAR_13)):
raise ValidationError(_("Missing translated string!"))
return VAR_13
class CLASS_9(forms.ChoiceField):
def __init__(self, *VAR_6, **VAR_7):
VAR_7["label"] = _("Search filter")
if "required" not in VAR_7:
VAR_7["required"] = False
VAR_7["choices"] = get_filter_choice()
VAR_7["error_messages"] = {
"invalid_choice": _("Please choose a valid filter type.")
}
super().__init__(*VAR_6, **VAR_7)
def FUNC_3(self, VAR_13):
if VAR_13 == "untranslated":
return "todo"
return super().to_python(VAR_13)
class CLASS_10(forms.Form):
VAR_26 = CLASS_3(VAR_113=True)
def __init__(self, VAR_27, *VAR_6, **VAR_7):
self.unit_set = VAR_27
super().__init__(*VAR_6, **VAR_7)
def FUNC_10(self):
if "checksum" not in self.cleaned_data:
return
VAR_27 = self.unit_set
try:
self.cleaned_data["unit"] = VAR_27.filter(
id_hash=self.cleaned_data["checksum"]
)[0]
except (Unit.DoesNotExist, IndexError):
raise ValidationError(
_("The string you wanted to translate is no longer available.")
)
class CLASS_11(forms.Form):
def __init__(self, VAR_17: Unit, *VAR_6, **VAR_7):
self.unit = VAR_17
super().__init__(*VAR_6, **VAR_7)
class CLASS_12(forms.BooleanField):
VAR_28 = True
def __init__(self, *VAR_6, **VAR_7):
VAR_7["label"] = _("Needs editing")
VAR_7["help_text"] = _(
'Strings are usually marked as "Needs editing" after the VAR_19 '
"string is updated, or when marked as such manually."
)
super().__init__(*VAR_6, **VAR_7)
self.widget.attrs["class"] = "fuzzy_checkbox"
class CLASS_13(CLASS_11):
VAR_29 = CLASS_3(VAR_113=True)
VAR_30 = CLASS_3(VAR_113=True)
VAR_31 = CLASS_8(VAR_113=False)
VAR_32 = CLASS_12(VAR_113=False)
VAR_33 = forms.ChoiceField(
VAR_134=_("Review state"),
VAR_68=[
(STATE_FUZZY, _("Needs editing")),
(STATE_TRANSLATED, _("Waiting for review")),
(STATE_APPROVED, _("Approved")),
],
VAR_113=False,
widget=forms.RadioSelect,
)
VAR_34 = forms.CharField(
widget=CLASS_0,
VAR_134=_("Explanation"),
help_text=_(
"Additional VAR_34 to clarify meaning or usage of the string."
),
VAR_24=1000,
VAR_113=False,
)
def __init__(self, VAR_4, VAR_17: Unit, *VAR_6, **VAR_7):
if VAR_17 is not None:
VAR_7["initial"] = {
"checksum": VAR_17.checksum,
"contentsum": hash_to_checksum(VAR_17.content_hash),
"translationsum": hash_to_checksum(VAR_17.get_target_hash()),
"target": VAR_17,
"fuzzy": VAR_17.fuzzy,
"review": VAR_17.state,
"explanation": VAR_17.explanation,
}
VAR_7["auto_id"] = f"id_{VAR_17.checksum}_%s"
VAR_108 = VAR_7.pop("tabindex", 100)
super().__init__(VAR_17, *VAR_6, **VAR_7)
if VAR_17.readonly:
for VAR_39 in ["target", "fuzzy", "review"]:
self.fields[VAR_39].widget.attrs["readonly"] = 1
self.fields["review"].choices = [
(STATE_READONLY, _("Read only")),
]
self.user = VAR_4
self.fields["target"].widget.attrs["tabindex"] = VAR_108
self.fields["target"].widget.profile = VAR_4.profile
self.fields["review"].widget.attrs["class"] = "review_radio"
if VAR_6:
self.fields["review"].choices.append((STATE_EMPTY, ""))
self.helper = FormHelper()
self.helper.form_method = "post"
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Field("target"),
Field("fuzzy"),
Field("contentsum"),
Field("translationsum"),
InlineRadios("review"),
Field("explanation"),
)
if VAR_17 and VAR_4.has_perm("unit.review", VAR_17.translation):
self.fields["fuzzy"].widget = forms.HiddenInput()
else:
self.fields["review"].widget = forms.HiddenInput()
if not VAR_17.translation.component.is_glossary:
self.fields["explanation"].widget = forms.HiddenInput()
def FUNC_4(self):
super().clean()
VAR_113 = {"target", "contentsum", "translationsum"}
if not VAR_113.issubset(self.cleaned_data):
return
VAR_17 = self.unit
if self.cleaned_data["contentsum"] != VAR_17.content_hash:
raise ValidationError(
_(
"Source string has been changed meanwhile. "
"Please check your changes."
)
)
if self.cleaned_data["translationsum"] != VAR_17.get_target_hash():
raise ValidationError(
_(
"Translation of the string has been changed meanwhile. "
"Please check your changes."
)
)
VAR_24 = VAR_17.get_max_length()
for text in self.cleaned_data["target"]:
if len(text) > VAR_24:
raise ValidationError(_("Translation text too long!"))
if self.user.has_perm(
"unit.review", VAR_17.translation
) and self.cleaned_data.get("review"):
self.cleaned_data["state"] = int(self.cleaned_data["review"])
elif self.cleaned_data["fuzzy"]:
self.cleaned_data["state"] = STATE_FUZZY
else:
self.cleaned_data["state"] = STATE_TRANSLATED
class CLASS_14(CLASS_13):
VAR_26 = CLASS_3(VAR_113=True)
def __init__(self, VAR_4, VAR_17, *VAR_6, **VAR_7):
super().__init__(VAR_4, VAR_17, *VAR_6, **VAR_7)
self.helper.form_action = reverse(
"save_zen", VAR_7=VAR_17.translation.get_reverse_url_kwargs()
)
self.helper.form_tag = True
self.helper.disable_csrf = False
self.helper.layout.append(Field("checksum"))
class CLASS_15(forms.Form):
VAR_35 = CLASS_5()
VAR_36 = forms.ChoiceField(
VAR_134=_("File format"),
VAR_68=[(x.name, x.verbose) for x in EXPORTERS.values()],
VAR_11="po",
VAR_113=True,
widget=forms.RadioSelect,
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
InlineRadios("format"),
)
class CLASS_16(forms.Form):
VAR_37 = forms.FileField(VAR_134=_("File"), validators=[validate_file_extension])
VAR_38 = forms.ChoiceField(
VAR_134=_("File upload mode"),
VAR_68=(
("translate", _("Add as translation")),
("approve", _("Add as approved translation")),
("suggest", _("Add as suggestion")),
("fuzzy", _("Add as VAR_5 needing edit")),
("replace", _("Replace existing VAR_5 file")),
("source", _("Update VAR_19 strings")),
("add", _("Add new strings")),
),
widget=forms.RadioSelect,
VAR_113=True,
)
VAR_32 = forms.ChoiceField(
VAR_134=_("Processing of strings needing edit"),
VAR_68=(
("", _("Do not import")),
("process", _("Import as string needing edit")),
("approve", _("Import as translated")),
),
VAR_113=False,
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
@staticmethod
def FUNC_11(VAR_39):
return ("user/files", f"upload-{VAR_39.name}")
def FUNC_12(self, VAR_13):
VAR_68 = self.fields["method"].choices
self.fields["method"].choices = [
choice for choice in VAR_68 if choice[0] != VAR_13
]
class CLASS_17(CLASS_16):
VAR_40 = forms.ChoiceField(
VAR_134=_("Conflict handling"),
help_text=_(
"Whether to overwrite existing translations if the string is "
"already translated."
),
VAR_68=(
("", _("Update only untranslated strings")),
("replace-translated", _("Update translated strings")),
("replace-approved", _("Update translated and approved strings")),
),
VAR_113=False,
VAR_11="replace-translated",
)
class CLASS_18(CLASS_17):
VAR_41 = forms.CharField(VAR_134=_("Author name"))
VAR_42 = EmailField(VAR_134=_("Author e-mail"))
def FUNC_0(VAR_4, VAR_5, *VAR_6, **VAR_7):
if VAR_4.has_perm("upload.authorship", VAR_5):
VAR_114 = CLASS_18
VAR_7["initial"] = {"author_name": VAR_4.full_name, "author_email": VAR_4.email}
elif VAR_4.has_perm("upload.overwrite", VAR_5):
VAR_114 = CLASS_17
else:
VAR_114 = CLASS_16
VAR_43 = VAR_114(*VAR_6, **VAR_7)
for VAR_38 in [x[0] for x in VAR_43.fields["method"].choices]:
if not check_upload_method_permissions(VAR_4, VAR_5, VAR_38):
VAR_43.remove_translation_choice(VAR_38)
if not VAR_4.has_perm("unit.review", VAR_5) and not VAR_114 == CLASS_16:
VAR_43.fields["conflicts"].choices = [
choice
for choice in VAR_43.fields["conflicts"].choices
if choice[0] != "approved"
]
return VAR_43
class CLASS_19(forms.Form):
VAR_35 = CLASS_5()
VAR_44 = forms.CharField(VAR_113=False, widget=forms.HiddenInput)
VAR_26 = CLASS_3(VAR_113=False)
VAR_45 = forms.IntegerField(min_value=-1, VAR_113=False, widget=forms.HiddenInput)
VAR_46 = {}
def __init__(self, VAR_4, VAR_16=None, VAR_47=True, **VAR_7):
self.user = VAR_4
self.language = VAR_16
super().__init__(**VAR_7)
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Div(
Field("offset", **self.offset_kwargs),
SearchField("q"),
Field("sort_by", template="snippets/sort-VAR_39.html"),
css_class="btn-toolbar",
role="toolbar",
),
ContextDiv(
template="snippets/query-builder.html",
VAR_87={
"user": self.user,
"month_ago": timezone.now() - timedelta(days=31),
"show_builder": VAR_47,
"language": self.language,
},
),
Field("checksum"),
)
def FUNC_13(self):
return FILTERS.get_search_name(self.cleaned_data.get("q", ""))
def FUNC_14(self):
return self.cleaned_data["q"]
def FUNC_15(self):
if self.cleaned_data.get("offset") is None:
self.cleaned_data["offset"] = 1
return self.cleaned_data["offset"]
def VAR_115(self):
VAR_115 = []
VAR_116 = {"offset", "checksum"}
for param in sorted(self.cleaned_data):
VAR_13 = self.cleaned_data[param]
if VAR_13 is None or param in VAR_116:
continue
if isinstance(VAR_13, bool):
if VAR_13:
VAR_115.append((param, "1"))
elif isinstance(VAR_13, int):
if VAR_13 > 0:
VAR_115.append((param, str(VAR_13)))
elif isinstance(VAR_13, datetime):
VAR_115.append((param, VAR_13.date().isoformat()))
elif isinstance(VAR_13, list):
for val in VAR_13:
VAR_115.append((param, val))
elif isinstance(VAR_13, User):
VAR_115.append((param, VAR_13.username))
else:
if VAR_13:
VAR_115.append((param, VAR_13))
return VAR_115
def FUNC_17(self):
return FUNC_17(self.items())
def FUNC_18(self):
VAR_10 = copy.copy(self.data)
VAR_10["offset"] = "1"
VAR_10["checksum"] = ""
self.data = VAR_10
return self
class CLASS_20(CLASS_19):
VAR_45 = forms.IntegerField(min_value=-1, VAR_113=False)
VAR_46 = {"template": "snippets/position-VAR_39.html"}
class CLASS_21(CLASS_11):
VAR_48 = forms.IntegerField()
def FUNC_4(self):
super().clean()
if "merge" not in self.cleaned_data:
return None
try:
VAR_17 = self.unit
VAR_5 = VAR_17.translation
VAR_58 = VAR_5.component.project
self.cleaned_data["merge_unit"] = VAR_135 = Unit.objects.get(
pk=self.cleaned_data["merge"],
translation__component__project=VAR_58,
translation__language=VAR_5.language,
)
if not VAR_5.is_source and VAR_17.source != VAR_135.source:
raise ValidationError(_("Could not find merged string."))
except Unit.DoesNotExist:
raise ValidationError(_("Could not find merged string."))
return self.cleaned_data
class CLASS_22(CLASS_11):
VAR_49 = forms.IntegerField()
def FUNC_4(self):
super().clean()
if "revert" not in self.cleaned_data:
return None
try:
self.cleaned_data["revert_change"] = Change.objects.get(
pk=self.cleaned_data["revert"], VAR_17=self.unit
)
except Change.DoesNotExist:
raise ValidationError(_("Could not find reverted change."))
return self.cleaned_data
class CLASS_23(forms.Form):
VAR_50 = forms.ChoiceField(
VAR_134=_("Automatic VAR_5 mode"),
VAR_68=[
("suggest", _("Add as suggestion")),
("translate", _("Add as translation")),
("fuzzy", _("Add as needing edit")),
],
VAR_11="suggest",
)
VAR_51 = CLASS_9(
VAR_113=True,
VAR_11="todo",
help_text=_(
"Please note that translating all strings will "
"discard all existing translations."
),
)
VAR_52 = forms.ChoiceField(
VAR_134=_("Automatic VAR_5 source"),
VAR_68=[
("others", _("Other VAR_5 components")),
("mt", _("Machine translation")),
],
VAR_11="others",
)
VAR_9 = forms.ChoiceField(
VAR_134=_("Components"),
VAR_113=False,
help_text=_(
"Turn on contribution to shared VAR_5 memory for the VAR_58 to "
"get VAR_127 to additional components."
),
VAR_11="",
)
VAR_53 = forms.MultipleChoiceField(
VAR_134=_("Machine VAR_5 engines"), VAR_68=[], VAR_113=False
)
VAR_54 = forms.IntegerField(
VAR_134=_("Score threshold"), VAR_11=80, min_value=1, max_value=100
)
def __init__(self, VAR_55, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.obj = VAR_55
self.components = VAR_55.project.component_set.filter(
VAR_72=VAR_55.source_language
) | Component.objects.filter(
source_language_id=VAR_55.source_language_id,
project__contribute_shared_tm=True,
).exclude(
VAR_58=VAR_55.project
)
if len(self.components.values_list("id")[:30]) == 30:
self.fields["component"] = forms.CharField(
VAR_113=False,
VAR_134=_("Components"),
help_text=_(
"Enter VAR_9 to use as VAR_19, "
"keep blank to use all components in current VAR_58."
),
)
else:
VAR_68 = [
(s.id, str(s))
for s in self.components.order_project().prefetch_related("project")
]
self.fields["component"].choices = [
("", _("All components in current project"))
] + VAR_68
self.fields["engines"].choices = [
(VAR_148, mt.name) for VAR_148, mt in MACHINE_TRANSLATION_SERVICES.items()
]
if "weblate" in MACHINE_TRANSLATION_SERVICES.keys():
self.fields["engines"].initial = "weblate"
VAR_117 = {"all", "nottranslated", "todo", "fuzzy", "check:inconsistent"}
self.fields["filter_type"].choices = [
x for x in self.fields["filter_type"].choices if x[0] in VAR_117
]
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("mode"),
Field("filter_type"),
InlineRadios("auto_source", id="select_auto_source"),
Div("component", css_id="auto_source_others"),
Div("engines", "threshold", css_id="auto_source_mt"),
)
def FUNC_19(self):
VAR_9 = self.cleaned_data["component"]
if not VAR_9:
return None
if VAR_9.isdigit():
try:
VAR_43 = self.components.get(pk=VAR_9)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
VAR_136 = VAR_9.count("/")
if VAR_136 == 0:
try:
VAR_43 = self.components.get(
VAR_69=VAR_9, VAR_58=self.obj.project
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
elif VAR_136 == 1:
VAR_145, VAR_146 = VAR_9.split("/")
try:
VAR_43 = self.components.get(
VAR_69=VAR_146, project__slug=VAR_145
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
raise ValidationError(_("Please provide valid VAR_9 VAR_69!"))
return VAR_43.pk
class CLASS_24(forms.Form):
VAR_56 = forms.ChoiceField(
VAR_134=_("Scope"),
help_text=_(
"Is your VAR_57 specific to this "
"translation or generic for all of them?"
),
VAR_68=(
(
"report",
_("Report issue with the VAR_19 string"),
),
(
"global",
_("Source string VAR_57, suggestions for changes to this string"),
),
(
"translation",
_("Translation VAR_57, discussions with other translators"),
),
),
)
VAR_57 = forms.CharField(
widget=CLASS_0,
VAR_134=_("New comment"),
help_text=_("You can use Markdown and mention users by @username."),
VAR_24=1000,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
if not VAR_58.source_review:
self.fields["scope"].choices = self.fields["scope"].choices[1:]
class CLASS_25(forms.Form):
VAR_59 = forms.ChoiceField(VAR_113=False, VAR_68=[("", _("All languages"))])
VAR_9 = forms.ChoiceField(VAR_113=False, VAR_68=[("", _("All components"))])
def __init__(self, VAR_4, VAR_58, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["lang"].choices += VAR_58.languages.as_choices()
self.fields["component"].choices += (
VAR_58.component_set.filter_access(VAR_4)
.order()
.values_list("slug", "name")
)
class CLASS_26(forms.Form):
VAR_59 = forms.MultipleChoiceField(
VAR_134=_("Languages"), VAR_68=[], widget=forms.SelectMultiple
)
def FUNC_20(self):
return Language.objects.exclude(
Q(translation__component=self.component) | Q(VAR_9=self.component)
)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.component = VAR_9
VAR_118 = self.get_lang_objects()
self.fields["lang"].choices = VAR_118.as_choices()
class CLASS_27(CLASS_26):
VAR_59 = forms.ChoiceField(VAR_134=_("Language"), VAR_68=[], widget=forms.Select)
def FUNC_20(self):
VAR_119 = BASIC_LANGUAGES
if settings.BASIC_LANGUAGES is not None:
VAR_119 = settings.BASIC_LANGUAGES
return super().get_lang_objects().filter(code__in=VAR_119)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(VAR_9, *VAR_6, **VAR_7)
self.fields["lang"].choices = [("", _("Please choose"))] + self.fields[
"lang"
].choices
def FUNC_21(self):
return [self.cleaned_data["lang"]]
def FUNC_1(VAR_8, VAR_9):
if not VAR_8.user.has_perm("translation.add", VAR_9):
raise PermissionDenied()
if VAR_8.user.has_perm("translation.add_more", VAR_9):
return CLASS_26
return CLASS_27
class CLASS_28(forms.ModelForm):
class CLASS_82:
VAR_120 = Unit
VAR_121 = ("explanation", "labels", "extra_flags")
VAR_122 = {
"labels": forms.CheckboxSelectMultiple(),
"explanation": CLASS_0,
}
VAR_60 = {
"explanation": ("admin/translating", "additional-explanation"),
"labels": ("devel/translations", "labels"),
"extra_flags": ("admin/translating", "additional-flags"),
}
def FUNC_11(self, VAR_39):
return self.doc_links[VAR_39.name]
def __init__(self, VAR_10=None, VAR_61=None, VAR_4=None, **VAR_7):
VAR_7["initial"] = {
"labels": Label.objects.filter(
Q(VAR_17=VAR_61) | Q(unit__source_unit=VAR_61)
)
}
super().__init__(VAR_10=data, VAR_61=instance, **VAR_7)
VAR_58 = VAR_61.translation.component.project
self.fields["labels"].queryset = VAR_58.label_set.all()
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Field("explanation"),
Field("labels"),
ContextDiv(
template="snippets/labels_description.html",
VAR_87={"project": VAR_58, "user": VAR_4},
),
Field("extra_flags"),
)
def FUNC_22(self, VAR_62=True):
if VAR_62:
self.instance.save(same_content=True)
self._save_m2m()
return self.instance
return super().save(VAR_62)
class CLASS_29(forms.Form):
VAR_4 = CLASS_4(
VAR_134=_("User to add"),
help_text=_(
"Please type in an existing Weblate account VAR_20 or e-mail address."
),
)
class CLASS_30(forms.Form):
VAR_4 = CLASS_4(
VAR_134=_("User to block"),
help_text=_(
"Please type in an existing Weblate account VAR_20 or e-mail address."
),
)
VAR_63 = forms.ChoiceField(
VAR_134=_("Block duration"),
VAR_68=(
("", _("Block VAR_4 until I unblock them")),
("1", _("Block VAR_4 for one day")),
("7", _("Block VAR_4 for one week")),
("30", _("Block VAR_4 for one month")),
),
VAR_113=False,
)
class CLASS_31(forms.Form):
VAR_64 = forms.ChoiceField(
VAR_134=_("Report format"),
help_text=_("Choose VAR_37 VAR_36 for the report"),
VAR_68=(
("rst", _("reStructuredText")),
("json", _("JSON")),
("html", _("HTML")),
),
)
VAR_65 = forms.ChoiceField(
VAR_134=_("Report period"),
VAR_68=(
("30days", _("Last 30 days")),
("this-month", _("This month")),
("month", _("Last month")),
("this-year", _("This year")),
("year", _("Last year")),
("", _("As specified")),
),
VAR_113=False,
)
VAR_66 = CLASS_2(
VAR_134=_("Starting date"), VAR_113=False, VAR_12=False
)
VAR_67 = CLASS_2(
VAR_134=_("Ending date"), VAR_113=False, VAR_12=False
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Field("style"),
Field("period"),
Div(
"start_date",
"end_date",
css_class="input-VAR_100 input-daterange",
data_provide="datepicker",
data_date_format="yyyy-mm-dd",
),
)
def FUNC_4(self):
super().clean()
if "period" not in self.cleaned_data:
return
if self.cleaned_data["period"] == "30days":
VAR_137 = timezone.now()
VAR_138 = VAR_137 - timedelta(days=30)
elif self.cleaned_data["period"] == "month":
VAR_137 = timezone.now().replace(day=1) - timedelta(days=1)
VAR_138 = VAR_137.replace(day=1)
elif self.cleaned_data["period"] == "this-month":
VAR_137 = timezone.now().replace(day=1) + timedelta(days=31)
VAR_137 = VAR_137.replace(day=1) - timedelta(days=1)
VAR_138 = VAR_137.replace(day=1)
elif self.cleaned_data["period"] == "year":
VAR_147 = timezone.now().year - 1
VAR_137 = timezone.make_aware(datetime(VAR_147, 12, 31))
VAR_138 = timezone.make_aware(datetime(VAR_147, 1, 1))
elif self.cleaned_data["period"] == "this-year":
VAR_147 = timezone.now().year
VAR_137 = timezone.make_aware(datetime(VAR_147, 12, 31))
VAR_138 = timezone.make_aware(datetime(VAR_147, 1, 1))
else:
if not self.cleaned_data.get("start_date"):
raise ValidationError({"start_date": _("Missing date!")})
if not self.cleaned_data.get("end_date"):
raise ValidationError({"end_date": _("Missing date!")})
VAR_138 = self.cleaned_data["start_date"]
VAR_137 = self.cleaned_data["end_date"]
self.cleaned_data["start_date"] = VAR_138.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.cleaned_data["end_date"] = VAR_137.replace(
hour=23, minute=59, second=59, microsecond=999999
)
if self.cleaned_data["start_date"] > self.cleaned_data["end_date"]:
VAR_139 = _("Starting date has to be before ending date!")
raise ValidationError({"start_date": VAR_139, "end_date": VAR_139})
class CLASS_32:
def FUNC_23(self):
VAR_78 = self.cleaned_data.get("repo")
if not VAR_78 or not is_repo_link(VAR_78) or "/" not in VAR_78[10:]:
return VAR_78
VAR_58, VAR_9 = VAR_78[10:].split("/", 1)
try:
VAR_55 = Component.objects.get(
slug__iexact=VAR_9, project__slug__iexact=VAR_58
)
except Component.DoesNotExist:
return VAR_78
if not self.request.user.has_perm("component.edit", VAR_55):
raise ValidationError(
_("You do not have permission to VAR_127 this VAR_9!")
)
return VAR_78
class CLASS_33(CLASS_32, forms.ModelForm):
class CLASS_82:
VAR_120 = Component
VAR_121 = []
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.request = VAR_8
self.helper = FormHelper()
self.helper.form_tag = False
class CLASS_34(SortedSelectMultiple):
def __init__(self, VAR_21=None, VAR_68=()):
VAR_68 = CHECKS.get_choices()
super().__init__(VAR_21=attrs, VAR_68=choices)
def FUNC_9(self, VAR_10, VAR_23, VAR_20):
VAR_13 = super().value_from_datadict(VAR_10, VAR_23, VAR_20)
if isinstance(VAR_13, str):
return json.loads(VAR_13)
return VAR_13
def FUNC_24(self, VAR_13):
VAR_13 = super().format_value(VAR_13)
if isinstance(VAR_13, str):
return VAR_13
return json.dumps(VAR_13)
class CLASS_35(forms.CharField):
def FUNC_3(self, VAR_13):
return VAR_13
class CLASS_36:
@staticmethod
def FUNC_11(VAR_39):
return ("admin/projects", f"component-{VAR_39.name}")
class CLASS_37:
@staticmethod
def FUNC_11(VAR_39):
return ("admin/projects", f"project-{VAR_39.name}")
class CLASS_38:
def FUNC_25(self, VAR_13):
if is_spam(VAR_13, self.request):
raise ValidationError(_("This VAR_39 has been identified as spam!"))
class CLASS_39(CLASS_38):
def FUNC_26(self):
VAR_13 = self.cleaned_data["agreement"]
self.spam_check(VAR_13)
return VAR_13
class CLASS_40(CLASS_38):
def FUNC_27(self):
VAR_13 = self.cleaned_data["web"]
self.spam_check(VAR_13)
return VAR_13
def FUNC_28(self):
VAR_13 = self.cleaned_data["instructions"]
self.spam_check(VAR_13)
return VAR_13
class CLASS_41(
CLASS_33, CLASS_36, CLASS_39
):
class CLASS_82:
VAR_120 = Component
VAR_121 = (
"name",
"report_source_bugs",
"license",
"agreement",
"allow_translation_propagation",
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
"priority",
"check_flags",
"enforced_checks",
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"push_on_commit",
"commit_pending_age",
"merge_style",
"file_format",
"edit_template",
"new_lang",
"language_code_style",
"source_language",
"new_base",
"filemask",
"template",
"intermediate",
"language_regex",
"variant_regex",
"restricted",
"auto_lock_error",
"links",
"manage_units",
"is_glossary",
"glossary_color",
)
VAR_122 = {
"enforced_checks": CLASS_34,
"source_language": SortedSelect,
}
VAR_123 = {"enforced_checks": CLASS_35}
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
if self.hide_restricted:
self.fields["restricted"].widget = forms.HiddenInput()
self.fields["links"].queryset = VAR_8.user.managed_projects.exclude(
pk=self.instance.pk
)
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
Fieldset(_("Name"), "name"),
Fieldset(_("License"), "license", "agreement"),
Fieldset(_("Upstream links"), "report_source_bugs"),
Fieldset(
_("Listing and access"),
"priority",
"restricted",
"links",
),
Fieldset(
_("Glossary"),
"is_glossary",
"glossary_color",
),
css_id="basic",
),
Tab(
_("Translation"),
Fieldset(
_("Suggestions"),
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
),
Fieldset(
_("Translation settings"),
"allow_translation_propagation",
"manage_units",
"check_flags",
"variant_regex",
"enforced_checks",
),
css_id="translation",
),
Tab(
_("Version control"),
Fieldset(
_("Locations"),
Div(template="trans/repo_help.html"),
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
),
Fieldset(
_("Version control settings"),
"push_on_commit",
"commit_pending_age",
"merge_style",
"auto_lock_error",
),
css_id="vcs",
),
Tab(
_("Commit messages"),
Fieldset(
_("Commit messages"),
ContextDiv(
template="trans/messages_help.html",
VAR_87={"user": VAR_8.user},
),
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
),
css_id="messages",
),
Tab(
_("Files"),
Fieldset(
_("Translation files"),
"file_format",
"filemask",
"language_regex",
"source_language",
),
Fieldset(
_("Monolingual translations"),
"template",
"edit_template",
"intermediate",
),
Fieldset(
_("Adding new languages"),
"new_base",
"new_lang",
"language_code_style",
),
css_id="files",
),
template="layout/pills.html",
)
)
VAR_124 = (
"git",
"gerrit",
"github",
"gitlab",
"pagure",
"local",
"git-force-push",
)
if self.instance.vcs not in VAR_124:
vcses = (self.instance.vcs,)
self.fields["vcs"].choices = [
c for c in self.fields["vcs"].choices if c[0] in VAR_124
]
@property
def FUNC_29(self):
VAR_4 = self.request.user
if VAR_4.is_superuser:
return False
if settings.OFFER_HOSTING:
return True
return not any(
"component.edit" in permissions
for permissions, _langs in VAR_4.component_permissions[self.instance.pk]
)
def FUNC_4(self):
VAR_10 = self.cleaned_data
if self.hide_restricted:
VAR_10["restricted"] = self.instance.restricted
class CLASS_42(CLASS_33, CLASS_36, CLASS_39):
class CLASS_82:
VAR_120 = Component
VAR_121 = [
"project",
"name",
"slug",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"file_format",
"filemask",
"template",
"edit_template",
"intermediate",
"new_base",
"license",
"new_lang",
"language_code_style",
"language_regex",
"source_language",
"is_glossary",
]
VAR_122 = {"source_language": SortedSelect}
class CLASS_43(forms.Form, CLASS_36, CLASS_39):
VAR_20 = forms.CharField(
VAR_134=_("Component name"),
VAR_24=COMPONENT_NAME_LENGTH,
help_text=_("Display name"),
)
VAR_69 = forms.SlugField(
VAR_134=_("URL slug"),
VAR_24=COMPONENT_NAME_LENGTH,
help_text=_("Name used in URLs and filenames."),
)
VAR_70 = forms.BooleanField(
VAR_134=_("Use as a glossary"),
VAR_113=False,
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper()
self.helper.form_tag = False
self.request = VAR_8
class CLASS_44(CLASS_43):
VAR_9 = forms.ModelChoiceField(
VAR_90=Component.objects.none(),
VAR_134=_("Component"),
help_text=_("Select existing VAR_9 to copy configuration from."),
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
if "instance" in VAR_7:
kwargs.pop("instance")
if "auto_id" not in VAR_7:
VAR_7["auto_id"] = "id_existing_%s"
super().__init__(VAR_8, *VAR_6, **VAR_7)
class CLASS_45(CLASS_44):
VAR_71 = forms.ChoiceField(VAR_134=_("Repository branch"))
branch_data: Dict[int, List[str]] = {}
VAR_61 = None
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_branch_%s"
super().__init__(*VAR_6, **VAR_7)
def FUNC_19(self):
VAR_9 = self.cleaned_data["component"]
self.fields["branch"].choices = [(x, x) for x in self.branch_data[VAR_9.pk]]
return VAR_9
def FUNC_4(self):
VAR_125 = ("branch", "slug", "name")
VAR_10 = self.cleaned_data
VAR_9 = VAR_10.get("component")
if not VAR_9 or any(VAR_39 not in VAR_10 for VAR_39 in VAR_125):
return
VAR_7 = model_to_dict(VAR_9, exclude=["id", "links"])
VAR_7["source_language"] = VAR_9.source_language
VAR_7["project"] = VAR_9.project
for VAR_39 in VAR_125:
VAR_7[VAR_39] = VAR_10[VAR_39]
self.instance = Component(**VAR_7)
try:
self.instance.full_clean()
except ValidationError as error:
VAR_43 = {NON_FIELD_ERRORS: []}
for VAR_148, VAR_13 in error.message_dict.items():
if VAR_148 in self.fields:
VAR_43[VAR_148] = VAR_13
else:
VAR_43[NON_FIELD_ERRORS].extend(VAR_13)
raise ValidationError(error.messages)
class CLASS_46(CLASS_43):
VAR_58 = forms.ModelChoiceField(
VAR_90=Project.objects.none(), VAR_134=_("Project")
)
VAR_72 = forms.ModelChoiceField(
widget=SortedSelect,
VAR_134=_("Source language"),
help_text=_("Language used for VAR_19 strings in all components"),
VAR_90=Language.objects.all(),
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
if "instance" in VAR_7:
kwargs.pop("instance")
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.fields["source_language"].initial = Language.objects.default_language
self.request = VAR_8
self.helper = FormHelper()
self.helper.form_tag = False
self.instance = None
def FUNC_4(self):
if "project" not in self.cleaned_data:
return
VAR_58 = self.cleaned_data["project"]
VAR_20 = self.cleaned_data.get("name")
if VAR_20 and VAR_58.component_set.filter(name__iexact=VAR_20).exists():
raise ValidationError(
{"name": _("Component with the same VAR_20 already exists.")}
)
VAR_69 = self.cleaned_data.get("slug")
if VAR_69 and VAR_58.component_set.filter(slug__iexact=VAR_69).exists():
raise ValidationError(
{"slug": _("Component with the same VAR_20 already exists.")}
)
class CLASS_47(CLASS_46):
VAR_73 = forms.ChoiceField(
VAR_134=_("File format"),
VAR_11="po-mono",
VAR_68=FILE_FORMATS.get_choices(
cond=lambda x: bool(x.new_translation) or hasattr(x, "update_bilingual")
),
)
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_scratchcreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_48(CLASS_46):
VAR_74 = forms.FileField(
VAR_134=_("ZIP VAR_37 containing translations"),
validators=[FileExtensionValidator(allowed_extensions=["zip"])],
widget=forms.FileInput(VAR_21={"accept": ".zip,application/zip"}),
)
VAR_75 = ["zipfile", "project", "name", "slug"]
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_zipcreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_49(CLASS_46):
VAR_76 = forms.FileField(
VAR_134=_("Document to translate"),
validators=[validate_file_extension],
)
VAR_75 = ["docfile", "project", "name", "slug"]
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_doccreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_50(CLASS_32, CLASS_46):
VAR_58 = forms.ModelChoiceField(
VAR_90=Project.objects.none(), VAR_134=_("Project")
)
VAR_77 = forms.ChoiceField(
VAR_134=_("Version control system"),
help_text=_(
"Version control system to use to VAR_127 your "
"repository with translations."
),
VAR_68=VCS_REGISTRY.get_choices(exclude={"local"}),
VAR_11=settings.DEFAULT_VCS,
)
VAR_78 = forms.CharField(
VAR_134=_("Source code repository"),
VAR_24=REPO_LENGTH,
help_text=_(
"URL of a repository, use weblate://VAR_58/VAR_9 "
"for sharing with other VAR_9."
),
)
VAR_71 = forms.CharField(
VAR_134=_("Repository branch"),
VAR_24=REPO_LENGTH,
help_text=_("Repository VAR_71 to translate"),
VAR_113=False,
)
def FUNC_30(self, VAR_10):
VAR_126 = copy.copy(VAR_10)
if "discovery" in VAR_126:
params.pop("discovery")
VAR_61 = Component(**VAR_126)
VAR_61.clean_fields(exclude=("filemask", "file_format", "license"))
VAR_61.validate_unique()
VAR_61.clean_repo()
self.instance = VAR_61
VAR_78 = VAR_61.suggest_repo_link()
if VAR_78:
VAR_10["repo"] = VAR_78
VAR_10["branch"] = ""
self.clean_instance(VAR_10)
def FUNC_4(self):
self.clean_instance(self.cleaned_data)
class CLASS_51(CLASS_50):
VAR_79 = forms.ChoiceField(
VAR_134=_("Choose VAR_5 VAR_23 to import"),
VAR_68=[("manual", _("Specify configuration manually"))],
VAR_113=True,
widget=forms.RadioSelect,
)
def FUNC_31(self, VAR_13):
VAR_87 = copy.copy(VAR_13)
try:
VAR_140 = FILE_FORMATS[VAR_13["file_format"]]
VAR_87["file_format_name"] = VAR_140.name
VAR_87["valid"] = True
except KeyError:
VAR_87["file_format_name"] = VAR_13["file_format"]
VAR_87["valid"] = False
VAR_87["origin"] = VAR_13.meta["origin"]
return render_to_string("trans/FUNC_33-choice.html", VAR_87)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
for VAR_39, VAR_13 in self.fields.items():
if VAR_39 == "discovery":
continue
VAR_13.widget = forms.HiddenInput()
self.fields["vcs"].choices = VCS_REGISTRY.get_choices()
self.discovered = self.perform_discovery(VAR_8, VAR_7)
for i, VAR_13 in enumerate(self.discovered):
self.fields["discovery"].choices.append((i, self.render_choice(VAR_13)))
def FUNC_32(self, VAR_8, VAR_7):
if "data" in VAR_7 and "create_discovery" in VAR_8.session:
VAR_141 = []
for i, VAR_10 in enumerate(VAR_8.session["create_discovery"]):
VAR_144 = DiscoveryResult(VAR_10)
VAR_144.meta = VAR_8.session["create_discovery_meta"][i]
VAR_141.append(VAR_144)
return VAR_141
try:
self.clean_instance(VAR_7["initial"])
VAR_141 = self.discover()
if not VAR_141:
discovered = self.discover(VAR_80=True)
except ValidationError:
VAR_141 = []
VAR_8.session["create_discovery"] = VAR_141
VAR_8.session["create_discovery_meta"] = [x.meta for x in VAR_141]
return VAR_141
def FUNC_33(self, VAR_80: bool = False):
return FUNC_33(
self.instance.full_path,
VAR_72=self.instance.source_language.code,
VAR_80=eager,
)
def FUNC_4(self):
super().clean()
VAR_79 = self.cleaned_data.get("discovery")
if VAR_79 and VAR_79 != "manual":
self.cleaned_data.update(self.discovered[int(VAR_79)])
class CLASS_52(CLASS_33, CLASS_36):
class CLASS_82:
VAR_120 = Component
VAR_121 = ["slug"]
class CLASS_53(CLASS_33, CLASS_36):
class CLASS_82:
VAR_120 = Component
VAR_121 = ["project"]
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.fields["project"].queryset = VAR_8.user.managed_projects
class CLASS_54(CLASS_33, CLASS_37, CLASS_40):
class CLASS_82:
VAR_120 = Project
VAR_121 = (
"name",
"web",
"instructions",
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"access_control",
"translation_review",
"source_review",
)
VAR_122 = {
"access_control": forms.RadioSelect,
"instructions": CLASS_0,
"language_aliases": forms.TextInput,
}
def FUNC_4(self):
VAR_10 = self.cleaned_data
if settings.OFFER_HOSTING:
VAR_10["contribute_shared_tm"] = VAR_10["use_shared_tm"]
if (
"access_control" not in VAR_10
or VAR_10["access_control"] is None
or VAR_10["access_control"] == ""
):
VAR_10["access_control"] = self.instance.access_control
VAR_127 = VAR_10["access_control"]
self.changed_access = VAR_127 != self.instance.access_control
if self.changed_access and not self.user_can_change_access:
raise ValidationError(
{
"access_control": _(
"You do not have permission to change VAR_58 VAR_127 control."
)
}
)
if self.changed_access and VAR_127 in (
Project.ACCESS_PUBLIC,
Project.ACCESS_PROTECTED,
):
VAR_142 = self.instance.component_set.filter(license="")
if VAR_142:
raise ValidationError(
{
"access_control": _(
"You must specify a license for these components "
"to make them publicly accessible: %s"
)
% ", ".join(VAR_142.values_list("name", flat=True))
}
)
def FUNC_22(self, VAR_62: bool = True):
super().save(VAR_62=commit)
if self.changed_access:
Change.objects.create(
VAR_58=self.instance,
VAR_98=Change.ACTION_ACCESS_EDIT,
VAR_4=self.user,
details={"access_control": self.instance.access_control},
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.user = VAR_8.user
self.user_can_change_access = VAR_8.user.has_perm(
"billing:VAR_58.permissions", self.instance
)
self.changed_access = False
self.helper.form_tag = False
if not self.user_can_change_access:
VAR_143 = {"disabled": True}
self.fields["access_control"].required = False
self.fields["access_control"].help_text = _(
"You do not have permission to change VAR_58 VAR_127 control."
)
else:
VAR_143 = {}
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
"name",
"web",
"instructions",
css_id="basic",
),
Tab(
_("Access"),
InlineRadios(
"access_control",
template="%s/layout/radioselect_access.html",
**VAR_143,
),
css_id="access",
),
Tab(
_("Workflow"),
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"translation_review",
"source_review",
css_id="workflow",
),
Tab(
_("Components"),
ContextDiv(
template="snippets/VAR_58-VAR_9-settings.html",
VAR_87={"object": self.instance, "user": VAR_8.user},
),
css_id="components",
),
template="layout/pills.html",
)
)
if settings.OFFER_HOSTING:
self.fields["contribute_shared_tm"].widget = forms.HiddenInput()
self.fields["use_shared_tm"].help_text = _(
"Uses and contributes to the pool of shared translations "
"between projects."
)
self.fields["access_control"].choices = [
choice
for choice in self.fields["access_control"].choices
if choice[0] != Project.ACCESS_CUSTOM
]
class CLASS_55(CLASS_33, CLASS_37):
class CLASS_82:
VAR_120 = Project
VAR_121 = ["slug"]
class CLASS_56(CLASS_33, CLASS_37, CLASS_40):
VAR_81 = forms.ModelChoiceField(
VAR_134=_("Billing"),
VAR_90=User.objects.none(),
VAR_113=True,
empty_label=None,
)
class CLASS_82:
VAR_120 = Project
VAR_121 = ("name", "slug", "web", "instructions")
class CLASS_57(forms.Form):
VAR_35 = CLASS_5(
VAR_113=False, help_text=_("Optional additional filter on the strings")
)
VAR_82 = forms.CharField(
VAR_134=_("Search string"),
VAR_25=1,
VAR_113=True,
strip=False,
help_text=_("Case sensitive string to VAR_82 for and replace."),
)
VAR_83 = forms.CharField(
VAR_134=_("Replacement string"), VAR_25=1, VAR_113=True, strip=False
)
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_replace_%s"
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
Field("search"),
Field("replacement"),
Div(template="snippets/replace-help.html"),
)
class CLASS_58(forms.Form):
VAR_84 = forms.ModelMultipleChoiceField(VAR_90=Unit.objects.none(), VAR_113=False)
VAR_85 = forms.BooleanField(VAR_113=True, VAR_11=True, widget=forms.HiddenInput)
def __init__(self, VAR_84, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["units"].queryset = VAR_84
class CLASS_59(forms.Form):
VAR_59 = forms.MultipleChoiceField(
VAR_134=_("Languages"), VAR_68=[], widget=forms.SelectMultiple
)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
VAR_118 = Language.objects.filter(translation__component=VAR_9).exclude(
pk=VAR_9.source_language_id
)
self.fields["lang"].choices = VAR_118.as_choices()
class CLASS_60(forms.Form):
VAR_86 = forms.ModelChoiceField(
Unit.objects.none(),
widget=forms.HiddenInput,
VAR_113=False,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.translation = VAR_5
self.fields["variant"].queryset = VAR_5.unit_set.all()
self.user = VAR_4
def FUNC_4(self):
try:
VAR_10 = self.as_kwargs()
except KeyError:
return
self.translation.validate_new_unit_data(**VAR_10)
def FUNC_34(self):
return ""
def FUNC_35(self):
VAR_128 = Flags()
VAR_128.merge(self.get_glossary_flags())
VAR_86 = self.cleaned_data.get("variant")
if VAR_86:
VAR_128.set_value("variant", VAR_86.source)
return {
"context": self.cleaned_data.get("context", ""),
"source": self.cleaned_data["source"],
"target": self.cleaned_data.get("target"),
"extra_flags": VAR_128.format(),
"explanation": self.cleaned_data.get("explanation", ""),
"auto_context": self.cleaned_data.get("auto_context", False),
}
class CLASS_61(CLASS_60):
VAR_87 = forms.CharField(
VAR_134=_("Translation key"),
help_text=_(
"Key used to identify string in VAR_5 VAR_37. "
"File VAR_36 specific rules might apply."
),
VAR_113=True,
)
VAR_19 = CLASS_8(
VAR_134=_("Source VAR_16 text"),
help_text=_(
"You can edit this later, as with any other string in "
"the VAR_19 VAR_16."
),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = VAR_4.profile
self.fields["source"].initial = Unit(VAR_5=translation, id_hash=0)
class CLASS_62(CLASS_60):
VAR_87 = forms.CharField(
VAR_134=_("Context"),
help_text=_("Optional VAR_87 to clarify the VAR_19 strings."),
VAR_113=False,
)
VAR_88 = forms.BooleanField(
VAR_113=False,
VAR_11=True,
VAR_134=_("Automatically adjust VAR_87 when same string already exists."),
)
VAR_19 = CLASS_8(
VAR_134=_("Source string"),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["context"].label = VAR_5.component.context_label
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = VAR_4.profile
self.fields["source"].initial = Unit(
VAR_5=VAR_5.component.source_translation, id_hash=0
)
class CLASS_63(CLASS_62):
VAR_31 = CLASS_8(
VAR_134=_("Translated string"),
help_text=_(
"You can edit this later, as with any other string in the VAR_5."
),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["target"].widget.attrs["tabindex"] = 101
self.fields["target"].widget.profile = VAR_4.profile
self.fields["target"].initial = Unit(VAR_5=translation, id_hash=0)
class CLASS_64(GlossaryAddMixin, CLASS_62):
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
if VAR_7["initial"] is None:
VAR_7["initial"] = {}
VAR_7["initial"]["terminology"] = True
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
class CLASS_65(GlossaryAddMixin, CLASS_63):
pass
def FUNC_2(VAR_5, VAR_4, VAR_10=None, VAR_11=None):
if VAR_5.component.has_template():
return CLASS_61(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
if VAR_5.component.is_glossary:
if VAR_5.is_source:
return CLASS_64(
VAR_5, VAR_4, VAR_10=data, VAR_11=initial
)
return CLASS_65(
VAR_5, VAR_4, VAR_10=data, VAR_11=initial
)
if VAR_5.is_source:
return CLASS_62(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
return CLASS_63(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
class CLASS_66(ModelChoiceIterator):
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for VAR_55 in self.queryset:
yield self.choice(VAR_55)
def __len__(self):
return len(self.queryset) + (1 if self.field.empty_label is not None else 0)
def __bool__(self):
return self.field.empty_label is not None or bool(self.queryset)
class CLASS_67(forms.ModelMultipleChoiceField):
VAR_89 = CLASS_66
def FUNC_36(self):
return self._queryset
def FUNC_37(self, VAR_90):
self._queryset = VAR_90
self.widget.choices = self.choices
VAR_90 = property(FUNC_36, FUNC_37)
class CLASS_68(forms.Form):
VAR_35 = CLASS_5(VAR_113=True)
VAR_91 = forms.ChoiceField(
VAR_134=_("State to set"), VAR_68=((-1, _("Do not change")),) + STATE_CHOICES
)
VAR_92 = CLASS_6(VAR_134=_("Translation VAR_128 to add"), VAR_113=False)
VAR_93 = CLASS_6(VAR_134=_("Translation VAR_128 to remove"), VAR_113=False)
VAR_94 = CLASS_67(
VAR_90=Label.objects.none(),
VAR_134=_("Labels to add"),
widget=forms.CheckboxSelectMultiple(),
VAR_113=False,
)
VAR_95 = CLASS_67(
VAR_90=Label.objects.none(),
VAR_134=_("Labels to remove"),
widget=forms.CheckboxSelectMultiple(),
VAR_113=False,
)
def __init__(self, VAR_4, VAR_55, *VAR_6, **VAR_7):
VAR_58 = VAR_7.pop("project")
VAR_7["auto_id"] = "id_bulk_%s"
super().__init__(*VAR_6, **VAR_7)
VAR_129 = VAR_58.label_set.all()
if VAR_129:
self.fields["remove_labels"].queryset = VAR_129
self.fields["add_labels"].queryset = VAR_129
VAR_130 = {STATE_EMPTY, STATE_READONLY}
if VAR_4 is not None and not VAR_4.has_perm("unit.review", VAR_55):
VAR_130.add(STATE_APPROVED)
self.fields["state"].choices = [
x for x in self.fields["state"].choices if x[0] not in VAR_130
]
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Div(template="snippets/bulk-help.html"),
SearchField("q"),
Field("state"),
Field("add_flags"),
Field("remove_flags"),
)
if VAR_129:
self.helper.layout.append(InlineCheckboxes("add_labels"))
self.helper.layout.append(InlineCheckboxes("remove_labels"))
class CLASS_69(forms.Form):
VAR_85 = forms.BooleanField(
VAR_134=_("I accept the contributor agreement"), VAR_113=True
)
VAR_96 = forms.CharField(VAR_113=False, widget=forms.HiddenInput)
class CLASS_70(forms.Form):
VAR_85 = forms.CharField(VAR_113=True)
VAR_97 = ""
def __init__(self, VAR_55, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.obj = VAR_55
self.helper = FormHelper(self)
self.helper.layout = Layout(
ContextDiv(
template=self.warning_template,
css_class="form-group",
VAR_87={"object": VAR_55},
),
Field("confirm"),
)
self.helper.form_tag = False
def FUNC_4(self):
if self.cleaned_data.get("confirm") != self.obj.full_slug:
raise ValidationError(
_("The VAR_69 does not match the one marked for deletion!")
)
class CLASS_71(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the full VAR_69 of the VAR_5 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_5.html"
class CLASS_72(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the full VAR_69 of the VAR_9 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_9.html"
class CLASS_73(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the VAR_69 of the VAR_58 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_58.html"
class CLASS_74(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the VAR_69 of the VAR_58 and VAR_16 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_58-VAR_16.html"
class CLASS_75(forms.ModelForm):
class CLASS_82:
VAR_120 = Announcement
VAR_121 = ["message", "category", "expiry", "notify"]
VAR_122 = {
"expiry": CLASS_1(),
"message": CLASS_0,
}
class CLASS_76(forms.Form):
VAR_58 = forms.ChoiceField(VAR_134=_("Project"), VAR_68=[("", "")], VAR_113=False)
VAR_59 = forms.ChoiceField(VAR_134=_("Language"), VAR_68=[("", "")], VAR_113=False)
VAR_98 = forms.MultipleChoiceField(
VAR_134=_("Action"),
VAR_113=False,
widget=SortedSelectMultiple,
VAR_68=Change.ACTION_CHOICES,
)
VAR_4 = UsernameField(VAR_134=_("Author username"), VAR_113=False, help_text=None)
VAR_66 = CLASS_2(
VAR_134=_("Starting date"), VAR_113=False, VAR_12=False
)
VAR_67 = CLASS_2(
VAR_134=_("Ending date"), VAR_113=False, VAR_12=False
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["lang"].choices += Language.objects.have_translation().as_choices()
self.fields["project"].choices += [
(VAR_58.slug, VAR_58.name) for VAR_58 in VAR_8.user.allowed_projects
]
class CLASS_77(forms.ModelForm):
class CLASS_82:
VAR_120 = Label
VAR_121 = ("name", "color")
VAR_122 = {"color": ColorWidget()}
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
class CLASS_78(forms.Form):
VAR_99 = forms.ModelChoiceField(
ProjectToken.objects.none(),
widget=forms.HiddenInput,
VAR_113=True,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["token"].queryset = VAR_58.projecttoken_set.all()
class CLASS_79(forms.ModelForm):
class CLASS_82:
VAR_120 = ProjectToken
VAR_121 = ["name", "expires", "project"]
VAR_122 = {
"expires": CLASS_1(),
"project": forms.HiddenInput,
}
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
VAR_7["initial"] = {"project": VAR_58}
super().__init__(*VAR_6, **VAR_7)
def FUNC_38(self):
if self.project != self.cleaned_data["project"]:
raise ValidationError("Invalid VAR_58!")
return self.cleaned_data["project"]
def FUNC_39(self):
VAR_131 = self.cleaned_data["expires"]
VAR_131 = expires.replace(hour=23, minute=59, second=59, microsecond=999999)
if VAR_131 < timezone.now():
raise forms.ValidationError(gettext("Expiry cannot be in the past!"))
return VAR_131
class CLASS_80(forms.Form):
VAR_100 = forms.ModelChoiceField(
Group.objects.none(),
widget=forms.HiddenInput,
VAR_113=True,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["group"].queryset = VAR_58.defined_groups.all()
class CLASS_81(CLASS_29):
VAR_101 = forms.ModelMultipleChoiceField(
Group.objects.none(),
widget=forms.CheckboxSelectMultiple,
VAR_134=_("Teams"),
VAR_113=False,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["user"].widget = forms.HiddenInput()
self.fields["groups"].queryset = VAR_58.defined_groups.all()
|
import copy
import json
import re
from datetime import date, datetime, timedelta
from typing import Dict, List
from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios, Tab, TabHolder
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, Layout
from django import .forms
from django.conf import settings
from django.core.exceptions import NON_FIELD_ERRORS, PermissionDenied, ValidationError
from django.core.validators import FileExtensionValidator
from django.db.models import Q
from django.forms import .model_to_dict
from django.forms.models import ModelChoiceIterator
from django.forms.utils import from_current_timezone
from django.template.loader import .render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.html import escape
from django.utils.http import .urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from translation_finder import DiscoveryResult, FUNC_33
from weblate.auth.models import Group, User
from weblate.checks.flags import Flags
from weblate.checks.models import CHECKS
from weblate.checks.utils import highlight_string
from weblate.formats.models import EXPORTERS, FILE_FORMATS
from weblate.glossary.forms import GlossaryAddMixin
from weblate.lang.data import BASIC_LANGUAGES
from weblate.lang.models import Language
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.trans.defines import COMPONENT_NAME_LENGTH, REPO_LENGTH
from weblate.trans.filter import FILTERS, get_filter_choice
from weblate.trans.models import (
Announcement,
Change,
Component,
Label,
Project,
ProjectToken,
Unit,
)
from weblate.trans.specialchars import RTL_CHARS_DATA, get_special_chars
from weblate.trans.util import check_upload_method_permissions, is_repo_link
from weblate.trans.validators import validate_check_flags
from weblate.utils.antispam import is_spam
from weblate.utils.errors import .report_error
from weblate.utils.forms import (
ColorWidget,
ContextDiv,
EmailField,
SearchField,
SortedSelect,
SortedSelectMultiple,
UsernameField,
)
from weblate.utils.hash import .checksum_to_hash, hash_to_checksum
from weblate.utils.search import parse_query
from weblate.utils.state import (
STATE_APPROVED,
STATE_CHOICES,
STATE_EMPTY,
STATE_FUZZY,
STATE_READONLY,
STATE_TRANSLATED,
)
from weblate.utils.validators import validate_file_extension
from weblate.vcs.models import VCS_REGISTRY
VAR_0 = """
<button class="btn btn-default {0}" title="{1}" {2}>{3}</button>
"""
VAR_1 = """
<VAR_134 class="btn btn-default {0}" title="{1}">
<input type="radio" VAR_20="{2}" VAR_13="{3}" {4}/>
{5}
</VAR_134>
"""
VAR_2 = """
<div class="btn-VAR_100 btn-VAR_100-xs" {0}>{1}</div>
"""
VAR_3 = """
<div class="btn-toolbar pull-right flip editor-toolbar">{0}</div>
"""
class CLASS_0(forms.Textarea):
def __init__(self, **VAR_7):
VAR_7["attrs"] = {
"dir": "auto",
"class": "markdown-editor highlight-editor",
"data-mode": "markdown",
}
super().__init__(**VAR_7)
class CLASS_1(forms.DateInput):
def __init__(self, VAR_12=True, **VAR_7):
VAR_21 = {"type": "date"}
if VAR_12:
VAR_21["data-provide"] = "datepicker"
VAR_21["data-date-format"] = "yyyy-mm-dd"
super().__init__(VAR_21=attrs, VAR_36="%Y-%m-%d", **VAR_7)
class CLASS_2(forms.DateField):
def __init__(self, VAR_12=True, **VAR_7):
if "widget" not in VAR_7:
VAR_7["widget"] = CLASS_1(VAR_12=datepicker)
super().__init__(**VAR_7)
def FUNC_3(self, VAR_13):
VAR_13 = super().to_python(VAR_13)
if isinstance(VAR_13, date):
return from_current_timezone(
datetime(VAR_13.year, VAR_13.month, VAR_13.day, 0, 0, 0)
)
return VAR_13
class CLASS_3(forms.CharField):
def __init__(self, *VAR_6, **VAR_7):
VAR_7["widget"] = forms.HiddenInput
super().__init__(*VAR_6, **VAR_7)
def FUNC_4(self, VAR_13):
super().clean(VAR_13)
if not VAR_13:
return None
try:
return checksum_to_hash(VAR_13)
except ValueError:
raise ValidationError(_("Invalid VAR_26 specified!"))
class CLASS_4(forms.CharField):
def FUNC_4(self, VAR_13):
if not VAR_13:
return None
try:
return User.objects.get(Q(username=VAR_13) | Q(email=VAR_13))
except User.DoesNotExist:
raise ValidationError(_("Could not find any such VAR_4."))
except User.MultipleObjectsReturned:
raise ValidationError(_("More possible users were found."))
class CLASS_5(forms.CharField):
def __init__(self, **VAR_7):
if "label" not in VAR_7:
VAR_7["label"] = _("Query")
if "required" not in VAR_7:
VAR_7["required"] = False
super().__init__(**VAR_7)
def FUNC_4(self, VAR_13):
if not VAR_13:
if self.required:
raise ValidationError(_("Missing query string."))
return ""
try:
parse_query(VAR_13)
return VAR_13
except Exception as error:
report_error()
raise ValidationError(_("Could not parse query string: {}").format(error))
class CLASS_6(forms.CharField):
VAR_14 = [validate_check_flags]
class CLASS_7(forms.Textarea):
def __init__(self, *VAR_6, **VAR_7):
self.profile = None
super().__init__(*VAR_6, **VAR_7)
def FUNC_5(self, VAR_15):
VAR_101 = []
chars = []
for VAR_20, char, VAR_13 in RTL_CHARS_DATA:
VAR_102.append(
VAR_0.format(
"specialchar",
VAR_20,
'data-VAR_13="{}"'.format(
VAR_13.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
)
VAR_101.append(VAR_2.format("", "\n".join(VAR_102)))
return VAR_3.format("\n".join(VAR_101))
def FUNC_6(self, VAR_16, VAR_15):
if VAR_16.direction != "rtl":
return ""
VAR_103 = f"rtl-{VAR_15}"
VAR_104 = [
VAR_1.format(
"direction-toggle active",
gettext("Toggle text direction"),
VAR_103,
"rtl",
'checked="checked"',
"RTL",
),
VAR_1.format(
"direction-toggle",
gettext("Toggle text direction"),
VAR_103,
"ltr",
"",
"LTR",
),
]
VAR_101 = [VAR_2.format('data-toggle="buttons"', "\n".join(VAR_104))]
return mark_safe(VAR_3.format("\n".join(VAR_101)))
def FUNC_7(self, VAR_16, VAR_15, VAR_17, VAR_18, VAR_19):
VAR_105 = self.profile
VAR_101 = []
chars = [
VAR_0.format(
"specialchar",
VAR_20,
'data-VAR_13="{}"'.format(
VAR_13.encode("ascii", "xmlcharrefreplace").decode("ascii")
),
char,
)
for VAR_20, char, VAR_13 in get_special_chars(
VAR_16, VAR_105.special_chars, VAR_17.source
)
]
VAR_101.append(VAR_2.format("", "\n".join(VAR_102)))
VAR_43 = VAR_3.format("\n".join(VAR_101))
if VAR_16.direction == "rtl":
VAR_43 = self.get_rtl_toolbar(VAR_15) + VAR_43
return mark_safe(VAR_43)
def FUNC_8(self, VAR_20, VAR_13, VAR_21=None, VAR_22=None, **VAR_7):
VAR_17 = VAR_13
VAR_106 = VAR_17.get_target_plurals()
VAR_59 = VAR_17.translation.language
VAR_107 = VAR_17.translation.plural
VAR_108 = self.attrs["tabindex"]
VAR_109 = [hl[2] for hl in highlight_string(VAR_17.source_string, VAR_17)]
VAR_21["class"] = "translation-editor VAR_114-control highlight-editor"
VAR_21["tabindex"] = VAR_108
VAR_21["lang"] = VAR_59.code
VAR_21["dir"] = VAR_59.direction
VAR_21["rows"] = 3
VAR_21["data-max"] = VAR_17.get_max_length()
VAR_21["data-mode"] = VAR_17.edit_mode
VAR_21["data-placeables"] = "|".join(re.escape(pl) for pl in VAR_109 if pl)
if VAR_17.readonly:
VAR_21["readonly"] = 1
VAR_110 = []
VAR_111 = VAR_17.get_source_plurals()
VAR_112 = f"id_{VAR_17.checksum}"
for VAR_18, val in enumerate(VAR_106):
VAR_15 = f"{VAR_20}_{VAR_18}"
VAR_132 = f"{VAR_112}_{VAR_18}"
VAR_21["id"] = VAR_132
VAR_21["tabindex"] = VAR_108 + VAR_18
if VAR_18 and len(VAR_111) > 1:
VAR_19 = VAR_111[1]
else:
VAR_19 = VAR_111[0]
VAR_133 = super().render(VAR_15, val, VAR_21, VAR_22, **VAR_7)
VAR_134 = escape(VAR_17.translation.language)
if len(VAR_106) != 1:
VAR_134 = f"{VAR_134}, {VAR_107.get_plural_label(VAR_18)}"
VAR_110.append(
render_to_string(
"snippets/editor.html",
{
"toolbar": self.get_toolbar(VAR_59, VAR_132, VAR_17, VAR_18, VAR_19),
"fieldid": VAR_132,
"label": mark_safe(VAR_134),
"textarea": VAR_133,
"max_length": VAR_21["data-max"],
"length": len(val),
"source_length": len(VAR_19),
"rtl_toggle": self.get_rtl_toggle(VAR_59, VAR_132),
},
)
)
if len(VAR_106) > 1:
VAR_110.append(
render_to_string(
"snippets/VAR_107-formula.html",
{"plural": VAR_107, "user": self.profile.user},
)
)
return mark_safe("".join(VAR_110))
def FUNC_9(self, VAR_10, VAR_23, VAR_20):
VAR_110 = []
for VAR_18 in range(0, 10):
VAR_15 = f"{VAR_20}_{VAR_18:d}"
if VAR_15 not in VAR_10:
break
VAR_110.append(VAR_10.get(VAR_15, ""))
return [r.replace("\r", "") for r in VAR_110]
class CLASS_8(forms.CharField):
def __init__(self, VAR_24=None, VAR_25=None, **VAR_7):
VAR_7["label"] = ""
super().__init__(widget=CLASS_7, **VAR_7)
def FUNC_3(self, VAR_13):
return VAR_13
def FUNC_4(self, VAR_13):
VAR_13 = super().clean(VAR_13)
if not VAR_13 or (self.required and not any(VAR_13)):
raise ValidationError(_("Missing translated string!"))
return VAR_13
class CLASS_9(forms.ChoiceField):
def __init__(self, *VAR_6, **VAR_7):
VAR_7["label"] = _("Search filter")
if "required" not in VAR_7:
VAR_7["required"] = False
VAR_7["choices"] = get_filter_choice()
VAR_7["error_messages"] = {
"invalid_choice": _("Please choose a valid filter type.")
}
super().__init__(*VAR_6, **VAR_7)
def FUNC_3(self, VAR_13):
if VAR_13 == "untranslated":
return "todo"
return super().to_python(VAR_13)
class CLASS_10(forms.Form):
VAR_26 = CLASS_3(VAR_113=True)
def __init__(self, VAR_27, *VAR_6, **VAR_7):
self.unit_set = VAR_27
super().__init__(*VAR_6, **VAR_7)
def FUNC_10(self):
if "checksum" not in self.cleaned_data:
return
VAR_27 = self.unit_set
try:
self.cleaned_data["unit"] = VAR_27.filter(
id_hash=self.cleaned_data["checksum"]
)[0]
except (Unit.DoesNotExist, IndexError):
raise ValidationError(
_("The string you wanted to translate is no longer available.")
)
class CLASS_11(forms.Form):
def __init__(self, VAR_17: Unit, *VAR_6, **VAR_7):
self.unit = VAR_17
super().__init__(*VAR_6, **VAR_7)
class CLASS_12(forms.BooleanField):
VAR_28 = True
def __init__(self, *VAR_6, **VAR_7):
VAR_7["label"] = _("Needs editing")
VAR_7["help_text"] = _(
'Strings are usually marked as "Needs editing" after the VAR_19 '
"string is updated, or when marked as such manually."
)
super().__init__(*VAR_6, **VAR_7)
self.widget.attrs["class"] = "fuzzy_checkbox"
class CLASS_13(CLASS_11):
VAR_29 = CLASS_3(VAR_113=True)
VAR_30 = CLASS_3(VAR_113=True)
VAR_31 = CLASS_8(VAR_113=False)
VAR_32 = CLASS_12(VAR_113=False)
VAR_33 = forms.ChoiceField(
VAR_134=_("Review state"),
VAR_68=[
(STATE_FUZZY, _("Needs editing")),
(STATE_TRANSLATED, _("Waiting for review")),
(STATE_APPROVED, _("Approved")),
],
VAR_113=False,
widget=forms.RadioSelect,
)
VAR_34 = forms.CharField(
widget=CLASS_0,
VAR_134=_("Explanation"),
help_text=_(
"Additional VAR_34 to clarify meaning or usage of the string."
),
VAR_24=1000,
VAR_113=False,
)
def __init__(self, VAR_4, VAR_17: Unit, *VAR_6, **VAR_7):
if VAR_17 is not None:
VAR_7["initial"] = {
"checksum": VAR_17.checksum,
"contentsum": hash_to_checksum(VAR_17.content_hash),
"translationsum": hash_to_checksum(VAR_17.get_target_hash()),
"target": VAR_17,
"fuzzy": VAR_17.fuzzy,
"review": VAR_17.state,
"explanation": VAR_17.explanation,
}
VAR_7["auto_id"] = f"id_{VAR_17.checksum}_%s"
VAR_108 = VAR_7.pop("tabindex", 100)
super().__init__(VAR_17, *VAR_6, **VAR_7)
if VAR_17.readonly:
for VAR_39 in ["target", "fuzzy", "review"]:
self.fields[VAR_39].widget.attrs["readonly"] = 1
self.fields["review"].choices = [
(STATE_READONLY, _("Read only")),
]
self.user = VAR_4
self.fields["target"].widget.attrs["tabindex"] = VAR_108
self.fields["target"].widget.profile = VAR_4.profile
self.fields["review"].widget.attrs["class"] = "review_radio"
if VAR_6:
self.fields["review"].choices.append((STATE_EMPTY, ""))
self.helper = FormHelper()
self.helper.form_method = "post"
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Field("target"),
Field("fuzzy"),
Field("contentsum"),
Field("translationsum"),
InlineRadios("review"),
Field("explanation"),
)
if VAR_17 and VAR_4.has_perm("unit.review", VAR_17.translation):
self.fields["fuzzy"].widget = forms.HiddenInput()
else:
self.fields["review"].widget = forms.HiddenInput()
if not VAR_17.translation.component.is_glossary:
self.fields["explanation"].widget = forms.HiddenInput()
def FUNC_4(self):
super().clean()
VAR_113 = {"target", "contentsum", "translationsum"}
if not VAR_113.issubset(self.cleaned_data):
return
VAR_17 = self.unit
if self.cleaned_data["contentsum"] != VAR_17.content_hash:
raise ValidationError(
_(
"Source string has been changed meanwhile. "
"Please check your changes."
)
)
if self.cleaned_data["translationsum"] != VAR_17.get_target_hash():
raise ValidationError(
_(
"Translation of the string has been changed meanwhile. "
"Please check your changes."
)
)
VAR_24 = VAR_17.get_max_length()
for text in self.cleaned_data["target"]:
if len(text) > VAR_24:
raise ValidationError(_("Translation text too long!"))
if self.user.has_perm(
"unit.review", VAR_17.translation
) and self.cleaned_data.get("review"):
self.cleaned_data["state"] = int(self.cleaned_data["review"])
elif self.cleaned_data["fuzzy"]:
self.cleaned_data["state"] = STATE_FUZZY
else:
self.cleaned_data["state"] = STATE_TRANSLATED
class CLASS_14(CLASS_13):
VAR_26 = CLASS_3(VAR_113=True)
def __init__(self, VAR_4, VAR_17, *VAR_6, **VAR_7):
super().__init__(VAR_4, VAR_17, *VAR_6, **VAR_7)
self.helper.form_action = reverse(
"save_zen", VAR_7=VAR_17.translation.get_reverse_url_kwargs()
)
self.helper.form_tag = True
self.helper.disable_csrf = False
self.helper.layout.append(Field("checksum"))
class CLASS_15(forms.Form):
VAR_35 = CLASS_5()
VAR_36 = forms.ChoiceField(
VAR_134=_("File format"),
VAR_68=[(x.name, x.verbose) for x in EXPORTERS.values()],
VAR_11="po",
VAR_113=True,
widget=forms.RadioSelect,
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
InlineRadios("format"),
)
class CLASS_16(forms.Form):
VAR_37 = forms.FileField(VAR_134=_("File"), validators=[validate_file_extension])
VAR_38 = forms.ChoiceField(
VAR_134=_("File upload mode"),
VAR_68=(
("translate", _("Add as translation")),
("approve", _("Add as approved translation")),
("suggest", _("Add as suggestion")),
("fuzzy", _("Add as VAR_5 needing edit")),
("replace", _("Replace existing VAR_5 file")),
("source", _("Update VAR_19 strings")),
("add", _("Add new strings")),
),
widget=forms.RadioSelect,
VAR_113=True,
)
VAR_32 = forms.ChoiceField(
VAR_134=_("Processing of strings needing edit"),
VAR_68=(
("", _("Do not import")),
("process", _("Import as string needing edit")),
("approve", _("Import as translated")),
),
VAR_113=False,
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
@staticmethod
def FUNC_11(VAR_39):
return ("user/files", f"upload-{VAR_39.name}")
def FUNC_12(self, VAR_13):
VAR_68 = self.fields["method"].choices
self.fields["method"].choices = [
choice for choice in VAR_68 if choice[0] != VAR_13
]
class CLASS_17(CLASS_16):
VAR_40 = forms.ChoiceField(
VAR_134=_("Conflict handling"),
help_text=_(
"Whether to overwrite existing translations if the string is "
"already translated."
),
VAR_68=(
("", _("Update only untranslated strings")),
("replace-translated", _("Update translated strings")),
("replace-approved", _("Update translated and approved strings")),
),
VAR_113=False,
VAR_11="replace-translated",
)
class CLASS_18(CLASS_17):
VAR_41 = forms.CharField(VAR_134=_("Author name"))
VAR_42 = EmailField(VAR_134=_("Author e-mail"))
def FUNC_0(VAR_4, VAR_5, *VAR_6, **VAR_7):
if VAR_4.has_perm("upload.authorship", VAR_5):
VAR_114 = CLASS_18
VAR_7["initial"] = {"author_name": VAR_4.full_name, "author_email": VAR_4.email}
elif VAR_4.has_perm("upload.overwrite", VAR_5):
VAR_114 = CLASS_17
else:
VAR_114 = CLASS_16
VAR_43 = VAR_114(*VAR_6, **VAR_7)
for VAR_38 in [x[0] for x in VAR_43.fields["method"].choices]:
if not check_upload_method_permissions(VAR_4, VAR_5, VAR_38):
VAR_43.remove_translation_choice(VAR_38)
if not VAR_4.has_perm("unit.review", VAR_5) and not VAR_114 == CLASS_16:
VAR_43.fields["conflicts"].choices = [
choice
for choice in VAR_43.fields["conflicts"].choices
if choice[0] != "approved"
]
return VAR_43
class CLASS_19(forms.Form):
VAR_35 = CLASS_5()
VAR_44 = forms.CharField(VAR_113=False, widget=forms.HiddenInput)
VAR_26 = CLASS_3(VAR_113=False)
VAR_45 = forms.IntegerField(min_value=-1, VAR_113=False, widget=forms.HiddenInput)
VAR_46 = {}
def __init__(self, VAR_4, VAR_16=None, VAR_47=True, **VAR_7):
self.user = VAR_4
self.language = VAR_16
super().__init__(**VAR_7)
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Div(
Field("offset", **self.offset_kwargs),
SearchField("q"),
Field("sort_by", template="snippets/sort-VAR_39.html"),
css_class="btn-toolbar",
role="toolbar",
),
ContextDiv(
template="snippets/query-builder.html",
VAR_87={
"user": self.user,
"month_ago": timezone.now() - timedelta(days=31),
"show_builder": VAR_47,
"language": self.language,
},
),
Field("checksum"),
)
def FUNC_13(self):
return FILTERS.get_search_name(self.cleaned_data.get("q", ""))
def FUNC_14(self):
return self.cleaned_data["q"]
def FUNC_15(self):
if self.cleaned_data.get("offset") is None:
self.cleaned_data["offset"] = 1
return self.cleaned_data["offset"]
def VAR_115(self):
VAR_115 = []
VAR_116 = {"offset", "checksum"}
for param in sorted(self.cleaned_data):
VAR_13 = self.cleaned_data[param]
if VAR_13 is None or param in VAR_116:
continue
if isinstance(VAR_13, bool):
if VAR_13:
VAR_115.append((param, "1"))
elif isinstance(VAR_13, int):
if VAR_13 > 0:
VAR_115.append((param, str(VAR_13)))
elif isinstance(VAR_13, datetime):
VAR_115.append((param, VAR_13.date().isoformat()))
elif isinstance(VAR_13, list):
for val in VAR_13:
VAR_115.append((param, val))
elif isinstance(VAR_13, User):
VAR_115.append((param, VAR_13.username))
else:
if VAR_13:
VAR_115.append((param, VAR_13))
return VAR_115
def FUNC_17(self):
return FUNC_17(self.items())
def FUNC_18(self):
VAR_10 = copy.copy(self.data)
VAR_10["offset"] = "1"
VAR_10["checksum"] = ""
self.data = VAR_10
return self
class CLASS_20(CLASS_19):
VAR_45 = forms.IntegerField(min_value=-1, VAR_113=False)
VAR_46 = {"template": "snippets/position-VAR_39.html"}
class CLASS_21(CLASS_11):
VAR_48 = forms.IntegerField()
def FUNC_4(self):
super().clean()
if "merge" not in self.cleaned_data:
return None
try:
VAR_17 = self.unit
VAR_5 = VAR_17.translation
VAR_58 = VAR_5.component.project
self.cleaned_data["merge_unit"] = VAR_135 = Unit.objects.get(
pk=self.cleaned_data["merge"],
translation__component__project=VAR_58,
translation__language=VAR_5.language,
)
if not VAR_5.is_source and VAR_17.source != VAR_135.source:
raise ValidationError(_("Could not find merged string."))
except Unit.DoesNotExist:
raise ValidationError(_("Could not find merged string."))
return self.cleaned_data
class CLASS_22(CLASS_11):
VAR_49 = forms.IntegerField()
def FUNC_4(self):
super().clean()
if "revert" not in self.cleaned_data:
return None
try:
self.cleaned_data["revert_change"] = Change.objects.get(
pk=self.cleaned_data["revert"], VAR_17=self.unit
)
except Change.DoesNotExist:
raise ValidationError(_("Could not find reverted change."))
return self.cleaned_data
class CLASS_23(forms.Form):
VAR_50 = forms.ChoiceField(
VAR_134=_("Automatic VAR_5 mode"),
VAR_68=[
("suggest", _("Add as suggestion")),
("translate", _("Add as translation")),
("fuzzy", _("Add as needing edit")),
],
VAR_11="suggest",
)
VAR_51 = CLASS_9(
VAR_113=True,
VAR_11="todo",
help_text=_(
"Please note that translating all strings will "
"discard all existing translations."
),
)
VAR_52 = forms.ChoiceField(
VAR_134=_("Automatic VAR_5 source"),
VAR_68=[
("others", _("Other VAR_5 components")),
("mt", _("Machine translation")),
],
VAR_11="others",
)
VAR_9 = forms.ChoiceField(
VAR_134=_("Components"),
VAR_113=False,
help_text=_(
"Turn on contribution to shared VAR_5 memory for the VAR_58 to "
"get VAR_127 to additional components."
),
VAR_11="",
)
VAR_53 = forms.MultipleChoiceField(
VAR_134=_("Machine VAR_5 engines"), VAR_68=[], VAR_113=False
)
VAR_54 = forms.IntegerField(
VAR_134=_("Score threshold"), VAR_11=80, min_value=1, max_value=100
)
def __init__(self, VAR_55, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.obj = VAR_55
self.components = VAR_55.project.component_set.filter(
VAR_72=VAR_55.source_language
) | Component.objects.filter(
source_language_id=VAR_55.source_language_id,
project__contribute_shared_tm=True,
).exclude(
VAR_58=VAR_55.project
)
if len(self.components.values_list("id")[:30]) == 30:
self.fields["component"] = forms.CharField(
VAR_113=False,
VAR_134=_("Components"),
help_text=_(
"Enter VAR_9 to use as VAR_19, "
"keep blank to use all components in current VAR_58."
),
)
else:
VAR_68 = [
(s.id, str(s))
for s in self.components.order_project().prefetch_related("project")
]
self.fields["component"].choices = [
("", _("All components in current project"))
] + VAR_68
self.fields["engines"].choices = [
(VAR_148, mt.name) for VAR_148, mt in MACHINE_TRANSLATION_SERVICES.items()
]
if "weblate" in MACHINE_TRANSLATION_SERVICES.keys():
self.fields["engines"].initial = "weblate"
VAR_117 = {"all", "nottranslated", "todo", "fuzzy", "check:inconsistent"}
self.fields["filter_type"].choices = [
x for x in self.fields["filter_type"].choices if x[0] in VAR_117
]
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("mode"),
Field("filter_type"),
InlineRadios("auto_source", id="select_auto_source"),
Div("component", css_id="auto_source_others"),
Div("engines", "threshold", css_id="auto_source_mt"),
)
def FUNC_19(self):
VAR_9 = self.cleaned_data["component"]
if not VAR_9:
return None
if VAR_9.isdigit():
try:
VAR_43 = self.components.get(pk=VAR_9)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
VAR_136 = VAR_9.count("/")
if VAR_136 == 0:
try:
VAR_43 = self.components.get(
VAR_69=VAR_9, VAR_58=self.obj.project
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
elif VAR_136 == 1:
VAR_145, VAR_146 = VAR_9.split("/")
try:
VAR_43 = self.components.get(
VAR_69=VAR_146, project__slug=VAR_145
)
except Component.DoesNotExist:
raise ValidationError(_("Component not found!"))
else:
raise ValidationError(_("Please provide valid VAR_9 VAR_69!"))
return VAR_43.pk
class CLASS_24(forms.Form):
VAR_56 = forms.ChoiceField(
VAR_134=_("Scope"),
help_text=_(
"Is your VAR_57 specific to this "
"translation or generic for all of them?"
),
VAR_68=(
(
"report",
_("Report issue with the VAR_19 string"),
),
(
"global",
_("Source string VAR_57, suggestions for changes to this string"),
),
(
"translation",
_("Translation VAR_57, discussions with other translators"),
),
),
)
VAR_57 = forms.CharField(
widget=CLASS_0,
VAR_134=_("New comment"),
help_text=_("You can use Markdown and mention users by @username."),
VAR_24=1000,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
if not VAR_58.source_review:
self.fields["scope"].choices = self.fields["scope"].choices[1:]
class CLASS_25(forms.Form):
VAR_59 = forms.ChoiceField(VAR_113=False, VAR_68=[("", _("All languages"))])
VAR_9 = forms.ChoiceField(VAR_113=False, VAR_68=[("", _("All components"))])
def __init__(self, VAR_4, VAR_58, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["lang"].choices += VAR_58.languages.as_choices()
self.fields["component"].choices += (
VAR_58.component_set.filter_access(VAR_4)
.order()
.values_list("slug", "name")
)
class CLASS_26(forms.Form):
VAR_59 = forms.MultipleChoiceField(
VAR_134=_("Languages"), VAR_68=[], widget=forms.SelectMultiple
)
def FUNC_20(self):
return Language.objects.exclude(
Q(translation__component=self.component) | Q(VAR_9=self.component)
)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.component = VAR_9
VAR_118 = self.get_lang_objects()
self.fields["lang"].choices = VAR_118.as_choices()
class CLASS_27(CLASS_26):
VAR_59 = forms.ChoiceField(VAR_134=_("Language"), VAR_68=[], widget=forms.Select)
def FUNC_20(self):
VAR_119 = BASIC_LANGUAGES
if settings.BASIC_LANGUAGES is not None:
VAR_119 = settings.BASIC_LANGUAGES
return super().get_lang_objects().filter(code__in=VAR_119)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(VAR_9, *VAR_6, **VAR_7)
self.fields["lang"].choices = [("", _("Please choose"))] + self.fields[
"lang"
].choices
def FUNC_21(self):
return [self.cleaned_data["lang"]]
def FUNC_1(VAR_8, VAR_9):
if not VAR_8.user.has_perm("translation.add", VAR_9):
raise PermissionDenied()
if VAR_8.user.has_perm("translation.add_more", VAR_9):
return CLASS_26
return CLASS_27
class CLASS_28(forms.ModelForm):
class CLASS_82:
VAR_120 = Unit
VAR_121 = ("explanation", "labels", "extra_flags")
VAR_122 = {
"labels": forms.CheckboxSelectMultiple(),
"explanation": CLASS_0,
}
VAR_60 = {
"explanation": ("admin/translating", "additional-explanation"),
"labels": ("devel/translations", "labels"),
"extra_flags": ("admin/translating", "additional-flags"),
}
def FUNC_11(self, VAR_39):
return self.doc_links[VAR_39.name]
def __init__(self, VAR_10=None, VAR_61=None, VAR_4=None, **VAR_7):
VAR_7["initial"] = {
"labels": Label.objects.filter(
Q(VAR_17=VAR_61) | Q(unit__source_unit=VAR_61)
)
}
super().__init__(VAR_10=data, VAR_61=instance, **VAR_7)
VAR_58 = VAR_61.translation.component.project
self.fields["labels"].queryset = VAR_58.label_set.all()
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_tag = False
self.helper.layout = Layout(
Field("explanation"),
Field("labels"),
ContextDiv(
template="snippets/labels_description.html",
VAR_87={"project": VAR_58, "user": VAR_4},
),
Field("extra_flags"),
)
def FUNC_22(self, VAR_62=True):
if VAR_62:
self.instance.save(same_content=True)
self._save_m2m()
return self.instance
return super().save(VAR_62)
class CLASS_29(forms.Form):
VAR_4 = CLASS_4(
VAR_134=_("User to add"),
help_text=_(
"Please type in an existing Weblate account VAR_20 or e-mail address."
),
)
class CLASS_30(forms.Form):
VAR_4 = CLASS_4(
VAR_134=_("User to block"),
help_text=_(
"Please type in an existing Weblate account VAR_20 or e-mail address."
),
)
VAR_63 = forms.ChoiceField(
VAR_134=_("Block duration"),
VAR_68=(
("", _("Block VAR_4 until I unblock them")),
("1", _("Block VAR_4 for one day")),
("7", _("Block VAR_4 for one week")),
("30", _("Block VAR_4 for one month")),
),
VAR_113=False,
)
class CLASS_31(forms.Form):
VAR_64 = forms.ChoiceField(
VAR_134=_("Report format"),
help_text=_("Choose VAR_37 VAR_36 for the report"),
VAR_68=(
("rst", _("reStructuredText")),
("json", _("JSON")),
("html", _("HTML")),
),
)
VAR_65 = forms.ChoiceField(
VAR_134=_("Report period"),
VAR_68=(
("30days", _("Last 30 days")),
("this-month", _("This month")),
("month", _("Last month")),
("this-year", _("This year")),
("year", _("Last year")),
("", _("As specified")),
),
VAR_113=False,
)
VAR_66 = CLASS_2(
VAR_134=_("Starting date"), VAR_113=False, VAR_12=False
)
VAR_67 = CLASS_2(
VAR_134=_("Ending date"), VAR_113=False, VAR_12=False
)
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Field("style"),
Field("period"),
Div(
"start_date",
"end_date",
css_class="input-VAR_100 input-daterange",
data_provide="datepicker",
data_date_format="yyyy-mm-dd",
),
)
def FUNC_4(self):
super().clean()
if "period" not in self.cleaned_data:
return
if self.cleaned_data["period"] == "30days":
VAR_137 = timezone.now()
VAR_138 = VAR_137 - timedelta(days=30)
elif self.cleaned_data["period"] == "month":
VAR_137 = timezone.now().replace(day=1) - timedelta(days=1)
VAR_138 = VAR_137.replace(day=1)
elif self.cleaned_data["period"] == "this-month":
VAR_137 = timezone.now().replace(day=1) + timedelta(days=31)
VAR_137 = VAR_137.replace(day=1) - timedelta(days=1)
VAR_138 = VAR_137.replace(day=1)
elif self.cleaned_data["period"] == "year":
VAR_147 = timezone.now().year - 1
VAR_137 = timezone.make_aware(datetime(VAR_147, 12, 31))
VAR_138 = timezone.make_aware(datetime(VAR_147, 1, 1))
elif self.cleaned_data["period"] == "this-year":
VAR_147 = timezone.now().year
VAR_137 = timezone.make_aware(datetime(VAR_147, 12, 31))
VAR_138 = timezone.make_aware(datetime(VAR_147, 1, 1))
else:
if not self.cleaned_data.get("start_date"):
raise ValidationError({"start_date": _("Missing date!")})
if not self.cleaned_data.get("end_date"):
raise ValidationError({"end_date": _("Missing date!")})
VAR_138 = self.cleaned_data["start_date"]
VAR_137 = self.cleaned_data["end_date"]
self.cleaned_data["start_date"] = VAR_138.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.cleaned_data["end_date"] = VAR_137.replace(
hour=23, minute=59, second=59, microsecond=999999
)
if self.cleaned_data["start_date"] > self.cleaned_data["end_date"]:
VAR_139 = _("Starting date has to be before ending date!")
raise ValidationError({"start_date": VAR_139, "end_date": VAR_139})
class CLASS_32:
def FUNC_23(self):
VAR_78 = self.cleaned_data.get("repo")
if not VAR_78 or not is_repo_link(VAR_78) or "/" not in VAR_78[10:]:
return VAR_78
VAR_58, VAR_9 = VAR_78[10:].split("/", 1)
try:
VAR_55 = Component.objects.get(
slug__iexact=VAR_9, project__slug__iexact=VAR_58
)
except Component.DoesNotExist:
return VAR_78
if not self.request.user.has_perm("component.edit", VAR_55):
raise ValidationError(
_("You do not have permission to VAR_127 this VAR_9!")
)
return VAR_78
class CLASS_33(CLASS_32, forms.ModelForm):
class CLASS_82:
VAR_120 = Component
VAR_121 = []
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.request = VAR_8
self.helper = FormHelper()
self.helper.form_tag = False
class CLASS_34(SortedSelectMultiple):
def __init__(self, VAR_21=None, VAR_68=()):
VAR_68 = CHECKS.get_choices()
super().__init__(VAR_21=attrs, VAR_68=choices)
def FUNC_9(self, VAR_10, VAR_23, VAR_20):
VAR_13 = super().value_from_datadict(VAR_10, VAR_23, VAR_20)
if isinstance(VAR_13, str):
return json.loads(VAR_13)
return VAR_13
def FUNC_24(self, VAR_13):
VAR_13 = super().format_value(VAR_13)
if isinstance(VAR_13, str):
return VAR_13
return json.dumps(VAR_13)
class CLASS_35(forms.CharField):
def FUNC_3(self, VAR_13):
return VAR_13
class CLASS_36:
@staticmethod
def FUNC_11(VAR_39):
return ("admin/projects", f"component-{VAR_39.name}")
class CLASS_37:
@staticmethod
def FUNC_11(VAR_39):
return ("admin/projects", f"project-{VAR_39.name}")
class CLASS_38:
def FUNC_25(self, VAR_13):
if is_spam(VAR_13, self.request):
raise ValidationError(_("This VAR_39 has been identified as spam!"))
class CLASS_39(CLASS_38):
def FUNC_26(self):
VAR_13 = self.cleaned_data["agreement"]
self.spam_check(VAR_13)
return VAR_13
class CLASS_40(CLASS_38):
def FUNC_27(self):
VAR_13 = self.cleaned_data["web"]
self.spam_check(VAR_13)
return VAR_13
def FUNC_28(self):
VAR_13 = self.cleaned_data["instructions"]
self.spam_check(VAR_13)
return VAR_13
class CLASS_41(
CLASS_33, CLASS_36, CLASS_39
):
class CLASS_82:
VAR_120 = Component
VAR_121 = (
"name",
"report_source_bugs",
"license",
"agreement",
"allow_translation_propagation",
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
"priority",
"check_flags",
"enforced_checks",
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"push_on_commit",
"commit_pending_age",
"merge_style",
"file_format",
"edit_template",
"new_lang",
"language_code_style",
"source_language",
"new_base",
"filemask",
"template",
"intermediate",
"language_regex",
"variant_regex",
"restricted",
"auto_lock_error",
"links",
"manage_units",
"is_glossary",
"glossary_color",
)
VAR_122 = {
"enforced_checks": CLASS_34,
"source_language": SortedSelect,
}
VAR_123 = {"enforced_checks": CLASS_35}
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
if self.hide_restricted:
self.fields["restricted"].widget = forms.HiddenInput()
self.fields["links"].queryset = VAR_8.user.managed_projects.exclude(
pk=self.instance.pk
)
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
Fieldset(_("Name"), "name"),
Fieldset(_("License"), "license", "agreement"),
Fieldset(_("Upstream links"), "report_source_bugs"),
Fieldset(
_("Listing and access"),
"priority",
"restricted",
"links",
),
Fieldset(
_("Glossary"),
"is_glossary",
"glossary_color",
),
css_id="basic",
),
Tab(
_("Translation"),
Fieldset(
_("Suggestions"),
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
),
Fieldset(
_("Translation settings"),
"allow_translation_propagation",
"manage_units",
"check_flags",
"variant_regex",
"enforced_checks",
),
css_id="translation",
),
Tab(
_("Version control"),
Fieldset(
_("Locations"),
Div(template="trans/repo_help.html"),
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
),
Fieldset(
_("Version control settings"),
"push_on_commit",
"commit_pending_age",
"merge_style",
"auto_lock_error",
),
css_id="vcs",
),
Tab(
_("Commit messages"),
Fieldset(
_("Commit messages"),
ContextDiv(
template="trans/messages_help.html",
VAR_87={"user": VAR_8.user},
),
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
),
css_id="messages",
),
Tab(
_("Files"),
Fieldset(
_("Translation files"),
"file_format",
"filemask",
"language_regex",
"source_language",
),
Fieldset(
_("Monolingual translations"),
"template",
"edit_template",
"intermediate",
),
Fieldset(
_("Adding new languages"),
"new_base",
"new_lang",
"language_code_style",
),
css_id="files",
),
template="layout/pills.html",
)
)
VAR_124 = (
"git",
"gerrit",
"github",
"gitlab",
"pagure",
"local",
"git-force-push",
)
if self.instance.vcs not in VAR_124:
vcses = (self.instance.vcs,)
self.fields["vcs"].choices = [
c for c in self.fields["vcs"].choices if c[0] in VAR_124
]
@property
def FUNC_29(self):
VAR_4 = self.request.user
if VAR_4.is_superuser:
return False
if settings.OFFER_HOSTING:
return True
return not any(
"component.edit" in permissions
for permissions, _langs in VAR_4.component_permissions[self.instance.pk]
)
def FUNC_4(self):
VAR_10 = self.cleaned_data
if self.hide_restricted:
VAR_10["restricted"] = self.instance.restricted
class CLASS_42(CLASS_33, CLASS_36, CLASS_39):
class CLASS_82:
VAR_120 = Component
VAR_121 = [
"project",
"name",
"slug",
"vcs",
"repo",
"branch",
"push",
"push_branch",
"repoweb",
"file_format",
"filemask",
"template",
"edit_template",
"intermediate",
"new_base",
"license",
"new_lang",
"language_code_style",
"language_regex",
"source_language",
"is_glossary",
]
VAR_122 = {"source_language": SortedSelect}
class CLASS_43(forms.Form, CLASS_36, CLASS_39):
VAR_20 = forms.CharField(
VAR_134=_("Component name"),
VAR_24=COMPONENT_NAME_LENGTH,
help_text=_("Display name"),
)
VAR_69 = forms.SlugField(
VAR_134=_("URL slug"),
VAR_24=COMPONENT_NAME_LENGTH,
help_text=_("Name used in URLs and filenames."),
)
VAR_70 = forms.BooleanField(
VAR_134=_("Use as a glossary"),
VAR_113=False,
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper()
self.helper.form_tag = False
self.request = VAR_8
class CLASS_44(CLASS_43):
VAR_9 = forms.ModelChoiceField(
VAR_90=Component.objects.none(),
VAR_134=_("Component"),
help_text=_("Select existing VAR_9 to copy configuration from."),
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
if "instance" in VAR_7:
kwargs.pop("instance")
if "auto_id" not in VAR_7:
VAR_7["auto_id"] = "id_existing_%s"
super().__init__(VAR_8, *VAR_6, **VAR_7)
class CLASS_45(CLASS_44):
VAR_71 = forms.ChoiceField(VAR_134=_("Repository branch"))
branch_data: Dict[int, List[str]] = {}
VAR_61 = None
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_branch_%s"
super().__init__(*VAR_6, **VAR_7)
def FUNC_19(self):
VAR_9 = self.cleaned_data["component"]
self.fields["branch"].choices = [(x, x) for x in self.branch_data[VAR_9.pk]]
return VAR_9
def FUNC_4(self):
VAR_125 = ("branch", "slug", "name")
VAR_10 = self.cleaned_data
VAR_9 = VAR_10.get("component")
if not VAR_9 or any(VAR_39 not in VAR_10 for VAR_39 in VAR_125):
return
VAR_7 = model_to_dict(VAR_9, exclude=["id", "links"])
VAR_7["source_language"] = VAR_9.source_language
VAR_7["project"] = VAR_9.project
for VAR_39 in VAR_125:
VAR_7[VAR_39] = VAR_10[VAR_39]
self.instance = Component(**VAR_7)
try:
self.instance.full_clean()
except ValidationError as error:
VAR_43 = {NON_FIELD_ERRORS: []}
for VAR_148, VAR_13 in error.message_dict.items():
if VAR_148 in self.fields:
VAR_43[VAR_148] = VAR_13
else:
VAR_43[NON_FIELD_ERRORS].extend(VAR_13)
raise ValidationError(error.messages)
class CLASS_46(CLASS_43):
VAR_58 = forms.ModelChoiceField(
VAR_90=Project.objects.none(), VAR_134=_("Project")
)
VAR_72 = forms.ModelChoiceField(
widget=SortedSelect,
VAR_134=_("Source language"),
help_text=_("Language used for VAR_19 strings in all components"),
VAR_90=Language.objects.all(),
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
if "instance" in VAR_7:
kwargs.pop("instance")
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.fields["source_language"].initial = Language.objects.default_language
self.request = VAR_8
self.helper = FormHelper()
self.helper.form_tag = False
self.instance = None
def FUNC_4(self):
if "project" not in self.cleaned_data:
return
VAR_58 = self.cleaned_data["project"]
VAR_20 = self.cleaned_data.get("name")
if VAR_20 and VAR_58.component_set.filter(name__iexact=VAR_20).exists():
raise ValidationError(
{"name": _("Component with the same VAR_20 already exists.")}
)
VAR_69 = self.cleaned_data.get("slug")
if VAR_69 and VAR_58.component_set.filter(slug__iexact=VAR_69).exists():
raise ValidationError(
{"slug": _("Component with the same VAR_20 already exists.")}
)
class CLASS_47(CLASS_46):
VAR_73 = forms.ChoiceField(
VAR_134=_("File format"),
VAR_11="po-mono",
VAR_68=FILE_FORMATS.get_choices(
cond=lambda x: bool(x.new_translation) or hasattr(x, "update_bilingual")
),
)
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_scratchcreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_48(CLASS_46):
VAR_74 = forms.FileField(
VAR_134=_("ZIP VAR_37 containing translations"),
validators=[FileExtensionValidator(allowed_extensions=["zip"])],
widget=forms.FileInput(VAR_21={"accept": ".zip,application/zip"}),
)
VAR_75 = ["zipfile", "project", "name", "slug"]
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_zipcreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_49(CLASS_46):
VAR_76 = forms.FileField(
VAR_134=_("Document to translate"),
validators=[validate_file_extension],
)
VAR_75 = ["docfile", "project", "name", "slug"]
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_doccreate_%s"
super().__init__(*VAR_6, **VAR_7)
class CLASS_50(CLASS_32, CLASS_46):
VAR_58 = forms.ModelChoiceField(
VAR_90=Project.objects.none(), VAR_134=_("Project")
)
VAR_77 = forms.ChoiceField(
VAR_134=_("Version control system"),
help_text=_(
"Version control system to use to VAR_127 your "
"repository with translations."
),
VAR_68=VCS_REGISTRY.get_choices(exclude={"local"}),
VAR_11=settings.DEFAULT_VCS,
)
VAR_78 = forms.CharField(
VAR_134=_("Source code repository"),
VAR_24=REPO_LENGTH,
help_text=_(
"URL of a repository, use weblate://VAR_58/VAR_9 "
"for sharing with other VAR_9."
),
)
VAR_71 = forms.CharField(
VAR_134=_("Repository branch"),
VAR_24=REPO_LENGTH,
help_text=_("Repository VAR_71 to translate"),
VAR_113=False,
)
def FUNC_30(self, VAR_10):
VAR_126 = copy.copy(VAR_10)
if "discovery" in VAR_126:
params.pop("discovery")
VAR_61 = Component(**VAR_126)
VAR_61.clean_fields(exclude=("filemask", "file_format", "license"))
VAR_61.validate_unique()
VAR_61.clean_repo()
self.instance = VAR_61
VAR_78 = VAR_61.suggest_repo_link()
if VAR_78:
VAR_10["repo"] = VAR_78
VAR_10["branch"] = ""
self.clean_instance(VAR_10)
def FUNC_4(self):
self.clean_instance(self.cleaned_data)
class CLASS_51(CLASS_50):
VAR_79 = forms.ChoiceField(
VAR_134=_("Choose VAR_5 VAR_23 to import"),
VAR_68=[("manual", _("Specify configuration manually"))],
VAR_113=True,
widget=forms.RadioSelect,
)
def FUNC_31(self, VAR_13):
VAR_87 = copy.copy(VAR_13)
try:
VAR_140 = FILE_FORMATS[VAR_13["file_format"]]
VAR_87["file_format_name"] = VAR_140.name
VAR_87["valid"] = True
except KeyError:
VAR_87["file_format_name"] = VAR_13["file_format"]
VAR_87["valid"] = False
VAR_87["origin"] = VAR_13.meta["origin"]
return render_to_string("trans/FUNC_33-choice.html", VAR_87)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
for VAR_39, VAR_13 in self.fields.items():
if VAR_39 == "discovery":
continue
VAR_13.widget = forms.HiddenInput()
self.fields["vcs"].choices = VCS_REGISTRY.get_choices()
self.discovered = self.perform_discovery(VAR_8, VAR_7)
for i, VAR_13 in enumerate(self.discovered):
self.fields["discovery"].choices.append((i, self.render_choice(VAR_13)))
def FUNC_32(self, VAR_8, VAR_7):
if "data" in VAR_7 and "create_discovery" in VAR_8.session:
VAR_141 = []
for i, VAR_10 in enumerate(VAR_8.session["create_discovery"]):
VAR_144 = DiscoveryResult(VAR_10)
VAR_144.meta = VAR_8.session["create_discovery_meta"][i]
VAR_141.append(VAR_144)
return VAR_141
try:
self.clean_instance(VAR_7["initial"])
VAR_141 = self.discover()
if not VAR_141:
discovered = self.discover(VAR_80=True)
except ValidationError:
VAR_141 = []
VAR_8.session["create_discovery"] = VAR_141
VAR_8.session["create_discovery_meta"] = [x.meta for x in VAR_141]
return VAR_141
def FUNC_33(self, VAR_80: bool = False):
return FUNC_33(
self.instance.full_path,
VAR_72=self.instance.source_language.code,
VAR_80=eager,
)
def FUNC_4(self):
super().clean()
VAR_79 = self.cleaned_data.get("discovery")
if VAR_79 and VAR_79 != "manual":
self.cleaned_data.update(self.discovered[int(VAR_79)])
class CLASS_52(CLASS_33, CLASS_36):
class CLASS_82:
VAR_120 = Component
VAR_121 = ["slug"]
class CLASS_53(CLASS_33, CLASS_36):
class CLASS_82:
VAR_120 = Component
VAR_121 = ["project"]
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.fields["project"].queryset = VAR_8.user.managed_projects
class CLASS_54(CLASS_33, CLASS_37, CLASS_40):
class CLASS_82:
VAR_120 = Project
VAR_121 = (
"name",
"web",
"instructions",
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"access_control",
"translation_review",
"source_review",
)
VAR_122 = {
"access_control": forms.RadioSelect,
"instructions": CLASS_0,
"language_aliases": forms.TextInput,
}
def FUNC_4(self):
VAR_10 = self.cleaned_data
if settings.OFFER_HOSTING:
VAR_10["contribute_shared_tm"] = VAR_10["use_shared_tm"]
if (
"access_control" not in VAR_10
or VAR_10["access_control"] is None
or VAR_10["access_control"] == ""
):
VAR_10["access_control"] = self.instance.access_control
VAR_127 = VAR_10["access_control"]
self.changed_access = VAR_127 != self.instance.access_control
if self.changed_access and not self.user_can_change_access:
raise ValidationError(
{
"access_control": _(
"You do not have permission to change VAR_58 VAR_127 control."
)
}
)
if self.changed_access and VAR_127 in (
Project.ACCESS_PUBLIC,
Project.ACCESS_PROTECTED,
):
VAR_142 = self.instance.component_set.filter(license="")
if VAR_142:
raise ValidationError(
{
"access_control": _(
"You must specify a license for these components "
"to make them publicly accessible: %s"
)
% ", ".join(VAR_142.values_list("name", flat=True))
}
)
def FUNC_22(self, VAR_62: bool = True):
super().save(VAR_62=commit)
if self.changed_access:
Change.objects.create(
VAR_58=self.instance,
VAR_98=Change.ACTION_ACCESS_EDIT,
VAR_4=self.user,
details={"access_control": self.instance.access_control},
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(VAR_8, *VAR_6, **VAR_7)
self.user = VAR_8.user
self.user_can_change_access = VAR_8.user.has_perm(
"billing:VAR_58.permissions", self.instance
)
self.changed_access = False
self.helper.form_tag = False
if not self.user_can_change_access:
VAR_143 = {"disabled": True}
self.fields["access_control"].required = False
self.fields["access_control"].help_text = _(
"You do not have permission to change VAR_58 VAR_127 control."
)
else:
VAR_143 = {}
self.helper.layout = Layout(
TabHolder(
Tab(
_("Basic"),
"name",
"web",
"instructions",
css_id="basic",
),
Tab(
_("Access"),
InlineRadios(
"access_control",
template="%s/layout/radioselect_access.html",
**VAR_143,
),
css_id="access",
),
Tab(
_("Workflow"),
"set_language_team",
"use_shared_tm",
"contribute_shared_tm",
"enable_hooks",
"language_aliases",
"translation_review",
"source_review",
css_id="workflow",
),
Tab(
_("Components"),
ContextDiv(
template="snippets/VAR_58-VAR_9-settings.html",
VAR_87={"object": self.instance, "user": VAR_8.user},
),
css_id="components",
),
template="layout/pills.html",
)
)
if settings.OFFER_HOSTING:
self.fields["contribute_shared_tm"].widget = forms.HiddenInput()
self.fields["use_shared_tm"].help_text = _(
"Uses and contributes to the pool of shared translations "
"between projects."
)
self.fields["access_control"].choices = [
choice
for choice in self.fields["access_control"].choices
if choice[0] != Project.ACCESS_CUSTOM
]
class CLASS_55(CLASS_33, CLASS_37):
class CLASS_82:
VAR_120 = Project
VAR_121 = ["slug"]
class CLASS_56(CLASS_33, CLASS_37, CLASS_40):
VAR_81 = forms.ModelChoiceField(
VAR_134=_("Billing"),
VAR_90=User.objects.none(),
VAR_113=True,
empty_label=None,
)
class CLASS_82:
VAR_120 = Project
VAR_121 = ("name", "slug", "web", "instructions")
class CLASS_57(forms.Form):
VAR_35 = CLASS_5(
VAR_113=False, help_text=_("Optional additional filter on the strings")
)
VAR_82 = forms.CharField(
VAR_134=_("Search string"),
VAR_25=1,
VAR_113=True,
strip=False,
help_text=_("Case sensitive string to VAR_82 for and replace."),
)
VAR_83 = forms.CharField(
VAR_134=_("Replacement string"), VAR_25=1, VAR_113=True, strip=False
)
def __init__(self, *VAR_6, **VAR_7):
VAR_7["auto_id"] = "id_replace_%s"
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
SearchField("q"),
Field("search"),
Field("replacement"),
Div(template="snippets/replace-help.html"),
)
class CLASS_58(forms.Form):
VAR_84 = forms.ModelMultipleChoiceField(VAR_90=Unit.objects.none(), VAR_113=False)
VAR_85 = forms.BooleanField(VAR_113=True, VAR_11=True, widget=forms.HiddenInput)
def __init__(self, VAR_84, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["units"].queryset = VAR_84
class CLASS_59(forms.Form):
VAR_59 = forms.MultipleChoiceField(
VAR_134=_("Languages"), VAR_68=[], widget=forms.SelectMultiple
)
def __init__(self, VAR_9, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
VAR_118 = Language.objects.filter(translation__component=VAR_9).exclude(
pk=VAR_9.source_language_id
)
self.fields["lang"].choices = VAR_118.as_choices()
class CLASS_60(forms.Form):
VAR_86 = forms.ModelChoiceField(
Unit.objects.none(),
widget=forms.HiddenInput,
VAR_113=False,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.translation = VAR_5
self.fields["variant"].queryset = VAR_5.unit_set.all()
self.user = VAR_4
def FUNC_4(self):
try:
VAR_10 = self.as_kwargs()
except KeyError:
return
self.translation.validate_new_unit_data(**VAR_10)
def FUNC_34(self):
return ""
def FUNC_35(self):
VAR_128 = Flags()
VAR_128.merge(self.get_glossary_flags())
VAR_86 = self.cleaned_data.get("variant")
if VAR_86:
VAR_128.set_value("variant", VAR_86.source)
return {
"context": self.cleaned_data.get("context", ""),
"source": self.cleaned_data["source"],
"target": self.cleaned_data.get("target"),
"extra_flags": VAR_128.format(),
"explanation": self.cleaned_data.get("explanation", ""),
"auto_context": self.cleaned_data.get("auto_context", False),
}
class CLASS_61(CLASS_60):
VAR_87 = forms.CharField(
VAR_134=_("Translation key"),
help_text=_(
"Key used to identify string in VAR_5 VAR_37. "
"File VAR_36 specific rules might apply."
),
VAR_113=True,
)
VAR_19 = CLASS_8(
VAR_134=_("Source VAR_16 text"),
help_text=_(
"You can edit this later, as with any other string in "
"the VAR_19 VAR_16."
),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = VAR_4.profile
self.fields["source"].initial = Unit(VAR_5=translation, id_hash=0)
class CLASS_62(CLASS_60):
VAR_87 = forms.CharField(
VAR_134=_("Context"),
help_text=_("Optional VAR_87 to clarify the VAR_19 strings."),
VAR_113=False,
)
VAR_88 = forms.BooleanField(
VAR_113=False,
VAR_11=True,
VAR_134=_("Automatically adjust VAR_87 when same string already exists."),
)
VAR_19 = CLASS_8(
VAR_134=_("Source string"),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["context"].widget.attrs["tabindex"] = 99
self.fields["context"].label = VAR_5.component.context_label
self.fields["source"].widget.attrs["tabindex"] = 100
self.fields["source"].widget.profile = VAR_4.profile
self.fields["source"].initial = Unit(
VAR_5=VAR_5.component.source_translation, id_hash=0
)
class CLASS_63(CLASS_62):
VAR_31 = CLASS_8(
VAR_134=_("Translated string"),
help_text=_(
"You can edit this later, as with any other string in the VAR_5."
),
VAR_113=True,
)
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
self.fields["target"].widget.attrs["tabindex"] = 101
self.fields["target"].widget.profile = VAR_4.profile
self.fields["target"].initial = Unit(VAR_5=translation, id_hash=0)
class CLASS_64(GlossaryAddMixin, CLASS_62):
def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):
if VAR_7["initial"] is None:
VAR_7["initial"] = {}
VAR_7["initial"]["terminology"] = True
super().__init__(VAR_5, VAR_4, *VAR_6, **VAR_7)
class CLASS_65(GlossaryAddMixin, CLASS_63):
pass
def FUNC_2(VAR_5, VAR_4, VAR_10=None, VAR_11=None):
if VAR_5.component.has_template():
return CLASS_61(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
if VAR_5.component.is_glossary:
if VAR_5.is_source:
return CLASS_64(
VAR_5, VAR_4, VAR_10=data, VAR_11=initial
)
return CLASS_65(
VAR_5, VAR_4, VAR_10=data, VAR_11=initial
)
if VAR_5.is_source:
return CLASS_62(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
return CLASS_63(VAR_5, VAR_4, VAR_10=data, VAR_11=initial)
class CLASS_66(ModelChoiceIterator):
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for VAR_55 in self.queryset:
yield self.choice(VAR_55)
def __len__(self):
return len(self.queryset) + (1 if self.field.empty_label is not None else 0)
def __bool__(self):
return self.field.empty_label is not None or bool(self.queryset)
class CLASS_67(forms.ModelMultipleChoiceField):
VAR_89 = CLASS_66
def FUNC_36(self):
return self._queryset
def FUNC_37(self, VAR_90):
self._queryset = VAR_90
self.widget.choices = self.choices
VAR_90 = property(FUNC_36, FUNC_37)
class CLASS_68(forms.Form):
VAR_35 = CLASS_5(VAR_113=True)
VAR_91 = forms.ChoiceField(
VAR_134=_("State to set"), VAR_68=((-1, _("Do not change")),) + STATE_CHOICES
)
VAR_92 = CLASS_6(VAR_134=_("Translation VAR_128 to add"), VAR_113=False)
VAR_93 = CLASS_6(VAR_134=_("Translation VAR_128 to remove"), VAR_113=False)
VAR_94 = CLASS_67(
VAR_90=Label.objects.none(),
VAR_134=_("Labels to add"),
widget=forms.CheckboxSelectMultiple(),
VAR_113=False,
)
VAR_95 = CLASS_67(
VAR_90=Label.objects.none(),
VAR_134=_("Labels to remove"),
widget=forms.CheckboxSelectMultiple(),
VAR_113=False,
)
def __init__(self, VAR_4, VAR_55, *VAR_6, **VAR_7):
VAR_58 = VAR_7.pop("project")
VAR_7["auto_id"] = "id_bulk_%s"
super().__init__(*VAR_6, **VAR_7)
VAR_129 = VAR_58.label_set.all()
if VAR_129:
self.fields["remove_labels"].queryset = VAR_129
self.fields["add_labels"].queryset = VAR_129
VAR_130 = {STATE_EMPTY, STATE_READONLY}
if VAR_4 is not None and not VAR_4.has_perm("unit.review", VAR_55):
VAR_130.add(STATE_APPROVED)
self.fields["state"].choices = [
x for x in self.fields["state"].choices if x[0] not in VAR_130
]
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Div(template="snippets/bulk-help.html"),
SearchField("q"),
Field("state"),
Field("add_flags"),
Field("remove_flags"),
)
if VAR_129:
self.helper.layout.append(InlineCheckboxes("add_labels"))
self.helper.layout.append(InlineCheckboxes("remove_labels"))
class CLASS_69(forms.Form):
VAR_85 = forms.BooleanField(
VAR_134=_("I accept the contributor agreement"), VAR_113=True
)
VAR_96 = forms.CharField(VAR_113=False, widget=forms.HiddenInput)
class CLASS_70(forms.Form):
VAR_85 = forms.CharField(VAR_113=True)
VAR_97 = ""
def __init__(self, VAR_55, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.obj = VAR_55
self.helper = FormHelper(self)
self.helper.layout = Layout(
ContextDiv(
template=self.warning_template,
css_class="form-group",
VAR_87={"object": VAR_55},
),
Field("confirm"),
)
self.helper.form_tag = False
def FUNC_4(self):
if self.cleaned_data.get("confirm") != self.obj.full_slug:
raise ValidationError(
_("The VAR_69 does not match the one marked for deletion!")
)
class CLASS_71(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the full VAR_69 of the VAR_5 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_5.html"
class CLASS_72(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the full VAR_69 of the VAR_9 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_9.html"
class CLASS_73(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the VAR_69 of the VAR_58 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_58.html"
class CLASS_74(CLASS_70):
VAR_85 = forms.CharField(
VAR_134=_("Removal confirmation"),
help_text=_("Please type in the VAR_69 of the VAR_58 and VAR_16 to VAR_85."),
VAR_113=True,
)
VAR_97 = "trans/delete-VAR_58-VAR_16.html"
class CLASS_75(forms.ModelForm):
class CLASS_82:
VAR_120 = Announcement
VAR_121 = ["message", "category", "expiry", "notify"]
VAR_122 = {
"expiry": CLASS_1(),
"message": CLASS_0,
}
class CLASS_76(forms.Form):
VAR_58 = forms.ChoiceField(VAR_134=_("Project"), VAR_68=[("", "")], VAR_113=False)
VAR_59 = forms.ChoiceField(VAR_134=_("Language"), VAR_68=[("", "")], VAR_113=False)
VAR_98 = forms.MultipleChoiceField(
VAR_134=_("Action"),
VAR_113=False,
widget=SortedSelectMultiple,
VAR_68=Change.ACTION_CHOICES,
)
VAR_4 = UsernameField(VAR_134=_("Author username"), VAR_113=False, help_text=None)
VAR_66 = CLASS_2(
VAR_134=_("Starting date"), VAR_113=False, VAR_12=False
)
VAR_67 = CLASS_2(
VAR_134=_("Ending date"), VAR_113=False, VAR_12=False
)
def __init__(self, VAR_8, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.fields["lang"].choices += Language.objects.have_translation().as_choices()
self.fields["project"].choices += [
(VAR_58.slug, VAR_58.name) for VAR_58 in VAR_8.user.allowed_projects
]
class CLASS_77(forms.ModelForm):
class CLASS_82:
VAR_120 = Label
VAR_121 = ("name", "color")
VAR_122 = {"color": ColorWidget()}
def __init__(self, *VAR_6, **VAR_7):
super().__init__(*VAR_6, **VAR_7)
self.helper = FormHelper(self)
self.helper.form_tag = False
class CLASS_78(forms.Form):
VAR_99 = forms.ModelChoiceField(
ProjectToken.objects.none(),
widget=forms.HiddenInput,
VAR_113=True,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["token"].queryset = VAR_58.projecttoken_set.all()
class CLASS_79(forms.ModelForm):
class CLASS_82:
VAR_120 = ProjectToken
VAR_121 = ["name", "expires", "project"]
VAR_122 = {
"expires": CLASS_1(),
"project": forms.HiddenInput,
}
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
VAR_7["initial"] = {"project": VAR_58}
super().__init__(*VAR_6, **VAR_7)
def FUNC_38(self):
if self.project != self.cleaned_data["project"]:
raise ValidationError("Invalid VAR_58!")
return self.cleaned_data["project"]
def FUNC_39(self):
VAR_131 = self.cleaned_data["expires"]
VAR_131 = expires.replace(hour=23, minute=59, second=59, microsecond=999999)
if VAR_131 < timezone.now():
raise forms.ValidationError(gettext("Expiry cannot be in the past!"))
return VAR_131
class CLASS_80(forms.Form):
VAR_100 = forms.ModelChoiceField(
Group.objects.none(),
widget=forms.HiddenInput,
VAR_113=True,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["group"].queryset = VAR_58.defined_groups.all()
class CLASS_81(CLASS_29):
VAR_101 = forms.ModelMultipleChoiceField(
Group.objects.none(),
widget=forms.CheckboxSelectMultiple,
VAR_134=_("Teams"),
VAR_113=False,
)
def __init__(self, VAR_58, *VAR_6, **VAR_7):
self.project = VAR_58
super().__init__(*VAR_6, **VAR_7)
self.fields["user"].widget = forms.HiddenInput()
self.fields["groups"].queryset = VAR_58.defined_groups.all()
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
25,
45,
92,
108,
109,
118,
119,
127,
128,
134,
143,
144,
147,
151,
160,
161,
172,
173,
181,
193,
194,
197,
198,
201,
205,
208,
209,
222,
225,
229,
230,
252,
257,
258,
272,
274,
276,
279,
281,
290,
291,
302,
303,
308,
317,
318,
320,
339,
340,
348,
349,
351,
361,
362,
365,
368,
372,
376,
382,
383,
394,
399,
400,
403,
405,
409,
414,
416,
425,
426,
431,
432,
435,
444,
445,
448,
472,
497,
518,
521,
522,
526,
528,
536,
544,
557,
558,
561,
570,
571,
581,
590,
591,
594,
619,
624,
628,
635,
636,
639,
654,
655,
658,
661,
662,
676,
684,
685,
688,
689,
695,
701,
724,
728,
731,
736,
739,
743,
747,
751,
755,
763,
767,
770,
778,
779,
783,
784,
787,
789,
803,
809,
810,
813,
815,
827,
828,
831,
872,
877,
878,
887,
888,
890,
904,
908,
914,
916,
920,
929,
959,
960,
963,
991,
994,
997,
998,
1001,
1004,
1008,
1015,
1016,
1019,
1023,
1028,
1034,
1035,
1038,
1040,
1046,
1052,
1054,
1056,
1057,
1065,
1066,
1075,
1081,
1084,
1106,
1113,
1114,
1122,
1123,
1141,
1142,
1171,
1187,
1190,
1193,
1194,
1214,
1221,
1228,
1232,
1233,
1251,
1252,
1255,
1259,
1265,
1266,
1271,
1277,
1283,
1284,
1288,
1289,
1294,
1295,
1300,
1301,
1306,
1307,
1313,
1314,
1320,
1325,
1326,
1331,
1383,
1506,
1518,
1523,
1524,
1527,
1554,
1555,
1571,
1577,
1578,
1585,
1592,
1593,
1596,
1599,
1603,
1608,
1616,
1625,
1626,
1634,
1635,
1646,
1651,
1657,
1672,
1673,
1682,
1686,
1687,
1694,
1696,
1700,
1701,
1707,
1709,
1713,
1714,
1717,
1721,
1748,
1753,
1759,
1760,
1766,
1769,
1770,
1778,
1790,
1797,
1802,
1821,
1828,
1834,
1835,
1838,
1842,
1843,
1846,
1850,
1854,
1855,
1858,
1879,
1891,
1893,
1917,
1927,
1984,
1996,
1997,
2000,
2004,
2005,
2008,
2009,
2010,
2017,
2021,
2022,
2037,
2049,
2050,
2054,
2058,
2059,
2062,
2066,
2073,
2074,
2081,
2087,
2092,
2095,
2098,
2113,
2114,
2132,
2139,
2140,
2156,
2166,
2167,
2176,
2182,
2183,
2190,
2191,
2194,
2195,
2210,
2211,
2215,
2218,
2224,
2227,
2230,
2231,
2234,
2237,
2241,
2243,
2244,
2264,
2273,
2277,
2278,
2282,
2295,
2296,
2302,
2303,
2307,
2321,
2327,
2328,
2336,
2337,
2345,
2346,
2354,
2355,
2363,
2364,
2367,
2375,
2376,
2393,
2400,
2401,
2407,
2412,
2413,
2420,
2425,
2426,
2435,
2440,
2445,
2452,
2453,
2460,
2465,
2466,
2474,
2480,
146,
200,
364,
365,
366,
367,
402,
447,
593,
638,
657,
664,
687,
786,
812,
830,
962,
1000,
1018,
1037,
1059,
1254,
1330,
1526,
1716,
1717,
1718,
1719,
1720,
1837,
1845,
1857,
1999,
2007,
2061,
2213,
2214,
2215,
2216,
2217,
2366,
136,
254,
283,
353,
374,
411,
630,
697,
726,
772,
874,
1006
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
25,
46,
93,
109,
110,
119,
120,
128,
129,
135,
144,
145,
148,
152,
161,
162,
173,
174,
182,
194,
195,
198,
199,
202,
206,
209,
210,
223,
226,
230,
231,
253,
258,
259,
273,
275,
277,
280,
282,
291,
292,
303,
304,
309,
318,
319,
321,
340,
341,
349,
350,
352,
362,
363,
366,
369,
373,
377,
383,
384,
395,
400,
401,
404,
406,
410,
415,
417,
426,
427,
432,
433,
436,
445,
446,
449,
473,
498,
519,
522,
523,
527,
529,
537,
545,
558,
559,
562,
571,
572,
582,
591,
592,
595,
620,
625,
629,
636,
637,
640,
655,
656,
659,
662,
663,
677,
685,
686,
689,
690,
696,
702,
725,
729,
732,
737,
740,
744,
748,
752,
756,
764,
768,
771,
779,
780,
784,
785,
788,
790,
804,
810,
811,
814,
816,
828,
829,
832,
873,
878,
879,
888,
889,
891,
905,
909,
915,
917,
921,
930,
960,
961,
964,
992,
995,
998,
999,
1002,
1005,
1009,
1016,
1017,
1020,
1024,
1029,
1035,
1036,
1039,
1041,
1047,
1053,
1055,
1057,
1058,
1066,
1067,
1076,
1082,
1085,
1107,
1114,
1115,
1123,
1124,
1142,
1143,
1172,
1188,
1191,
1194,
1195,
1215,
1222,
1229,
1233,
1234,
1252,
1253,
1256,
1260,
1266,
1267,
1272,
1278,
1284,
1285,
1289,
1290,
1295,
1296,
1301,
1302,
1307,
1308,
1314,
1315,
1321,
1326,
1327,
1332,
1384,
1507,
1519,
1524,
1525,
1528,
1555,
1556,
1572,
1578,
1579,
1586,
1593,
1594,
1597,
1600,
1604,
1609,
1617,
1626,
1627,
1635,
1636,
1647,
1652,
1658,
1673,
1674,
1683,
1687,
1688,
1695,
1697,
1701,
1702,
1708,
1710,
1714,
1715,
1718,
1722,
1749,
1754,
1760,
1761,
1767,
1770,
1771,
1779,
1791,
1798,
1803,
1822,
1829,
1835,
1836,
1839,
1843,
1844,
1847,
1851,
1855,
1856,
1859,
1880,
1892,
1894,
1918,
1928,
1985,
1997,
1998,
2001,
2005,
2006,
2009,
2010,
2011,
2018,
2022,
2023,
2038,
2050,
2051,
2055,
2059,
2060,
2063,
2067,
2074,
2075,
2082,
2088,
2093,
2096,
2099,
2114,
2115,
2133,
2140,
2141,
2157,
2167,
2168,
2177,
2183,
2184,
2191,
2192,
2195,
2196,
2211,
2212,
2216,
2219,
2225,
2228,
2231,
2232,
2235,
2238,
2242,
2244,
2245,
2265,
2274,
2278,
2279,
2283,
2296,
2297,
2303,
2304,
2308,
2322,
2328,
2329,
2337,
2338,
2346,
2347,
2355,
2356,
2364,
2365,
2368,
2376,
2377,
2394,
2401,
2402,
2408,
2413,
2414,
2421,
2426,
2427,
2436,
2441,
2446,
2453,
2454,
2461,
2466,
2467,
2475,
2481,
147,
201,
365,
366,
367,
368,
403,
448,
594,
639,
658,
665,
688,
787,
813,
831,
963,
1001,
1019,
1038,
1060,
1255,
1331,
1527,
1717,
1718,
1719,
1720,
1721,
1838,
1846,
1858,
2000,
2008,
2062,
2214,
2215,
2216,
2217,
2218,
2367,
137,
255,
284,
354,
375,
412,
631,
698,
727,
773,
875,
1007
] |
0CWE-22
| import abc
import logging
import os
import shutil
import tarfile
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
logger = logging.getLogger(__name__)
def get_persistor(name: Text) -> Optional["Persistor"]:
"""Returns an instance of the requested persistor.
Currently, `aws`, `gcs`, `azure` and providing module paths are supported remote
storages.
"""
if name == "aws":
return AWSPersistor(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if name == "gcs":
return GCSPersistor(os.environ.get("BUCKET_NAME"))
if name == "azure":
return AzurePersistor(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if name:
try:
persistor = rasa.shared.utils.common.class_from_module_path(name)
return persistor()
except ImportError:
raise ImportError(
f"Unknown model persistor {name}. Please make sure to "
"either use an included model persistor (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model persistor."
)
return None
class Persistor(abc.ABC):
"""Store models in cloud and fetch them when needed."""
def persist(self, model_directory: Text, model_name: Text) -> None:
"""Uploads a model persisted in the `target_dir` to cloud storage."""
if not os.path.isdir(model_directory):
raise ValueError(f"Target directory '{model_directory}' not found.")
file_key, tar_path = self._compress(model_directory, model_name)
self._persist_tar(file_key, tar_path)
def retrieve(self, model_name: Text, target_path: Text) -> None:
"""Downloads a model that has been persisted to cloud storage."""
tar_name = model_name
if not model_name.endswith("tar.gz"):
# ensure backward compatibility
tar_name = self._tar_name(model_name)
self._retrieve_tar(tar_name)
self._decompress(os.path.basename(tar_name), target_path)
@abc.abstractmethod
def _retrieve_tar(self, filename: Text) -> Text:
"""Downloads a model previously persisted to cloud storage."""
raise NotImplementedError
@abc.abstractmethod
def _persist_tar(self, filekey: Text, tarname: Text) -> None: # noqa: F841
"""Uploads a model persisted in the `target_dir` to cloud storage."""
raise NotImplementedError
def _compress(self, model_directory: Text, model_name: Text) -> Tuple[Text, Text]:
"""Creates a compressed archive and returns key and tar."""
import tempfile
dirpath = tempfile.mkdtemp()
base_name = self._tar_name(model_name, include_extension=False)
tar_name = shutil.make_archive(
os.path.join(dirpath, base_name),
"gztar",
root_dir=model_directory,
base_dir=".",
)
file_key = os.path.basename(tar_name)
return file_key, tar_name
@staticmethod
def _tar_name(model_name: Text, include_extension: bool = True) -> Text:
ext = ".tar.gz" if include_extension else ""
return f"{model_name}{ext}"
@staticmethod
def _decompress(compressed_path: Text, target_path: Text) -> None:
with tarfile.open(compressed_path, "r:gz") as tar:
tar.extractall(target_path) # target dir will be created if it not exists
class AWSPersistor(Persistor):
"""Store models on S3.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(
self,
bucket_name: Text,
endpoint_url: Optional[Text] = None,
region_name: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", endpoint_url=endpoint_url, region_name=region_name
)
self._ensure_bucket_exists(bucket_name, region_name)
self.bucket_name = bucket_name
self.bucket = self.s3.Bucket(bucket_name)
def _ensure_bucket_exists(
self, bucket_name: Text, region_name: Optional[Text] = None
) -> None:
import boto3
import botocore
if not region_name:
region_name = boto3.DEFAULT_SESSION.region_name
bucket_config = {"LocationConstraint": region_name}
# noinspection PyUnresolvedReferences
try:
self.s3.create_bucket(
Bucket=bucket_name, CreateBucketConfiguration=bucket_config
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to s3."""
with open(tar_path, "rb") as f:
self.s3.Object(self.bucket_name, file_key).put(Body=f)
def _retrieve_tar(self, model_path: Text) -> None:
"""Downloads a model that has previously been persisted to s3."""
tar_name = os.path.basename(model_path)
with open(tar_name, "wb") as f:
self.bucket.download_fileobj(model_path, f)
class GCSPersistor(Persistor):
"""Store models on Google Cloud Storage.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(self, bucket_name: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(bucket_name)
self.bucket_name = bucket_name
self.bucket = self.storage_client.bucket(bucket_name)
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(bucket_name)
except exceptions.Conflict:
# bucket exists
pass
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to GCS."""
blob = self.bucket.blob(file_key)
blob.upload_from_filename(tar_path)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to GCS."""
blob = self.bucket.blob(target_filename)
blob.download_to_filename(target_filename)
class AzurePersistor(Persistor):
"""Store models on Azure"""
def __init__(
self, azure_container: Text, azure_account_name: Text, azure_account_key: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{azure_account_name}.blob.core.windows.net/",
credential=azure_account_key,
)
self._ensure_container_exists(azure_container)
self.container_name = azure_container
def _ensure_container_exists(self, container_name: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(container_name)
except ResourceExistsError:
# no need to create the container, it already exists
pass
def _container_client(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to Azure."""
with open(tar_path, "rb") as data:
self._container_client().upload_blob(name=file_key, data=data)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to Azure."""
blob_client = self._container_client().get_blob_client(target_filename)
with open(target_filename, "wb") as blob:
download_stream = blob_client.download_blob()
blob.write(download_stream.readall())
| import abc
import logging
import os
import shutil
from tarsafe import TarSafe
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
logger = logging.getLogger(__name__)
def get_persistor(name: Text) -> Optional["Persistor"]:
"""Returns an instance of the requested persistor.
Currently, `aws`, `gcs`, `azure` and providing module paths are supported remote
storages.
"""
if name == "aws":
return AWSPersistor(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if name == "gcs":
return GCSPersistor(os.environ.get("BUCKET_NAME"))
if name == "azure":
return AzurePersistor(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if name:
try:
persistor = rasa.shared.utils.common.class_from_module_path(name)
return persistor()
except ImportError:
raise ImportError(
f"Unknown model persistor {name}. Please make sure to "
"either use an included model persistor (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model persistor."
)
return None
class Persistor(abc.ABC):
"""Store models in cloud and fetch them when needed."""
def persist(self, model_directory: Text, model_name: Text) -> None:
"""Uploads a model persisted in the `target_dir` to cloud storage."""
if not os.path.isdir(model_directory):
raise ValueError(f"Target directory '{model_directory}' not found.")
file_key, tar_path = self._compress(model_directory, model_name)
self._persist_tar(file_key, tar_path)
def retrieve(self, model_name: Text, target_path: Text) -> None:
"""Downloads a model that has been persisted to cloud storage."""
tar_name = model_name
if not model_name.endswith("tar.gz"):
# ensure backward compatibility
tar_name = self._tar_name(model_name)
self._retrieve_tar(tar_name)
self._decompress(os.path.basename(tar_name), target_path)
@abc.abstractmethod
def _retrieve_tar(self, filename: Text) -> Text:
"""Downloads a model previously persisted to cloud storage."""
raise NotImplementedError
@abc.abstractmethod
def _persist_tar(self, filekey: Text, tarname: Text) -> None: # noqa: F841
"""Uploads a model persisted in the `target_dir` to cloud storage."""
raise NotImplementedError
def _compress(self, model_directory: Text, model_name: Text) -> Tuple[Text, Text]:
"""Creates a compressed archive and returns key and tar."""
import tempfile
dirpath = tempfile.mkdtemp()
base_name = self._tar_name(model_name, include_extension=False)
tar_name = shutil.make_archive(
os.path.join(dirpath, base_name),
"gztar",
root_dir=model_directory,
base_dir=".",
)
file_key = os.path.basename(tar_name)
return file_key, tar_name
@staticmethod
def _tar_name(model_name: Text, include_extension: bool = True) -> Text:
ext = ".tar.gz" if include_extension else ""
return f"{model_name}{ext}"
@staticmethod
def _decompress(compressed_path: Text, target_path: Text) -> None:
with TarSafe.open(compressed_path, "r:gz") as tar:
tar.extractall(target_path) # target dir will be created if it not exists
class AWSPersistor(Persistor):
"""Store models on S3.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(
self,
bucket_name: Text,
endpoint_url: Optional[Text] = None,
region_name: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", endpoint_url=endpoint_url, region_name=region_name
)
self._ensure_bucket_exists(bucket_name, region_name)
self.bucket_name = bucket_name
self.bucket = self.s3.Bucket(bucket_name)
def _ensure_bucket_exists(
self, bucket_name: Text, region_name: Optional[Text] = None
) -> None:
import boto3
import botocore
if not region_name:
region_name = boto3.DEFAULT_SESSION.region_name
bucket_config = {"LocationConstraint": region_name}
# noinspection PyUnresolvedReferences
try:
self.s3.create_bucket(
Bucket=bucket_name, CreateBucketConfiguration=bucket_config
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to s3."""
with open(tar_path, "rb") as f:
self.s3.Object(self.bucket_name, file_key).put(Body=f)
def _retrieve_tar(self, model_path: Text) -> None:
"""Downloads a model that has previously been persisted to s3."""
tar_name = os.path.basename(model_path)
with open(tar_name, "wb") as f:
self.bucket.download_fileobj(model_path, f)
class GCSPersistor(Persistor):
"""Store models on Google Cloud Storage.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(self, bucket_name: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(bucket_name)
self.bucket_name = bucket_name
self.bucket = self.storage_client.bucket(bucket_name)
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(bucket_name)
except exceptions.Conflict:
# bucket exists
pass
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to GCS."""
blob = self.bucket.blob(file_key)
blob.upload_from_filename(tar_path)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to GCS."""
blob = self.bucket.blob(target_filename)
blob.download_to_filename(target_filename)
class AzurePersistor(Persistor):
"""Store models on Azure"""
def __init__(
self, azure_container: Text, azure_account_name: Text, azure_account_key: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{azure_account_name}.blob.core.windows.net/",
credential=azure_account_key,
)
self._ensure_container_exists(azure_container)
self.container_name = azure_container
def _ensure_container_exists(self, container_name: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(container_name)
except ResourceExistsError:
# no need to create the container, it already exists
pass
def _container_client(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to Azure."""
with open(tar_path, "rb") as data:
self._container_client().upload_blob(name=file_key, data=data)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to Azure."""
blob_client = self._container_client().get_blob_client(target_filename)
with open(target_filename, "wb") as blob:
download_stream = blob_client.download_blob()
blob.write(download_stream.readall())
| path_disclosure | {
"code": [
"import tarfile",
" with tarfile.open(compressed_path, \"r:gz\") as tar:"
],
"line_no": [
5,
106
]
} | {
"code": [
"from tarsafe import TarSafe",
" with TarSafe.open(compressed_path, \"r:gz\") as tar:"
],
"line_no": [
5,
106
]
} | import abc
import logging
import os
import shutil
import tarfile
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1: Text) -> Optional["Persistor"]:
if VAR_1 == "aws":
return CLASS_1(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if VAR_1 == "gcs":
return CLASS_2(os.environ.get("BUCKET_NAME"))
if VAR_1 == "azure":
return CLASS_3(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if VAR_1:
try:
VAR_28 = rasa.shared.utils.common.class_from_module_path(VAR_1)
return VAR_28()
except ImportError:
raise ImportError(
f"Unknown model VAR_28 {VAR_1}. Please make sure to "
"either use an included model VAR_28 (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model VAR_28."
)
return None
class CLASS_0(abc.ABC):
def FUNC_1(self, VAR_2: Text, VAR_3: Text) -> None:
if not os.path.isdir(VAR_2):
raise ValueError(f"Target directory '{VAR_2}' not found.")
VAR_13, VAR_14 = self._compress(VAR_2, VAR_3)
self._persist_tar(VAR_13, VAR_14)
def FUNC_2(self, VAR_3: Text, VAR_4: Text) -> None:
VAR_21 = VAR_3
if not VAR_3.endswith("tar.gz"):
VAR_21 = self._tar_name(VAR_3)
self._retrieve_tar(VAR_21)
self._decompress(os.path.basename(VAR_21), VAR_4)
@abc.abstractmethod
def FUNC_3(self, VAR_5: Text) -> Text:
raise NotImplementedError
@abc.abstractmethod
def FUNC_4(self, VAR_6: Text, VAR_7: Text) -> None: # noqa: F841
raise NotImplementedError
def FUNC_5(self, VAR_2: Text, VAR_3: Text) -> Tuple[Text, Text]:
import tempfile
VAR_22 = tempfile.mkdtemp()
VAR_23 = self._tar_name(VAR_3, VAR_8=False)
VAR_21 = shutil.make_archive(
os.path.join(VAR_22, VAR_23),
"gztar",
root_dir=VAR_2,
base_dir=".",
)
VAR_13 = os.path.basename(VAR_21)
return VAR_13, VAR_21
@staticmethod
def FUNC_6(VAR_3: Text, VAR_8: bool = True) -> Text:
VAR_24 = ".tar.gz" if VAR_8 else ""
return f"{VAR_3}{VAR_24}"
@staticmethod
def FUNC_7(VAR_9: Text, VAR_4: Text) -> None:
with tarfile.open(VAR_9, "r:gz") as tar:
tar.extractall(VAR_4) # target dir will be created if it not exists
class CLASS_1(CLASS_0):
def __init__(
self,
VAR_10: Text,
VAR_11: Optional[Text] = None,
VAR_12: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", VAR_11=endpoint_url, VAR_12=region_name
)
self._ensure_bucket_exists(VAR_10, VAR_12)
self.bucket_name = VAR_10
self.bucket = self.s3.Bucket(VAR_10)
def FUNC_8(
self, VAR_10: Text, VAR_12: Optional[Text] = None
) -> None:
import boto3
import botocore
if not VAR_12:
region_name = boto3.DEFAULT_SESSION.region_name
VAR_25 = {"LocationConstraint": VAR_12}
try:
self.s3.create_bucket(
Bucket=VAR_10, CreateBucketConfiguration=VAR_25
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as f:
self.s3.Object(self.bucket_name, VAR_13).put(Body=f)
def FUNC_3(self, VAR_15: Text) -> None:
VAR_21 = os.path.basename(VAR_15)
with open(VAR_21, "wb") as f:
self.bucket.download_fileobj(VAR_15, f)
class CLASS_2(CLASS_0):
def __init__(self, VAR_10: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(VAR_10)
self.bucket_name = VAR_10
self.bucket = self.storage_client.bucket(VAR_10)
def FUNC_8(self, VAR_10: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(VAR_10)
except exceptions.Conflict:
pass
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
VAR_26 = self.bucket.blob(VAR_13)
VAR_26.upload_from_filename(VAR_14)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_26 = self.bucket.blob(VAR_16)
VAR_26.download_to_filename(VAR_16)
class CLASS_3(CLASS_0):
def __init__(
self, VAR_17: Text, VAR_18: Text, VAR_19: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{VAR_18}.blob.core.windows.net/",
credential=VAR_19,
)
self._ensure_container_exists(VAR_17)
self.container_name = VAR_17
def FUNC_9(self, VAR_20: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(VAR_20)
except ResourceExistsError:
pass
def FUNC_10(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as data:
self._container_client().upload_blob(VAR_1=VAR_13, data=data)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_27 = self._container_client().get_blob_client(VAR_16)
with open(VAR_16, "wb") as VAR_26:
VAR_29 = VAR_27.download_blob()
VAR_26.write(VAR_29.readall())
| import abc
import logging
import os
import shutil
from tarsafe import TarSafe
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1: Text) -> Optional["Persistor"]:
if VAR_1 == "aws":
return CLASS_1(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if VAR_1 == "gcs":
return CLASS_2(os.environ.get("BUCKET_NAME"))
if VAR_1 == "azure":
return CLASS_3(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if VAR_1:
try:
VAR_28 = rasa.shared.utils.common.class_from_module_path(VAR_1)
return VAR_28()
except ImportError:
raise ImportError(
f"Unknown model VAR_28 {VAR_1}. Please make sure to "
"either use an included model VAR_28 (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model VAR_28."
)
return None
class CLASS_0(abc.ABC):
def FUNC_1(self, VAR_2: Text, VAR_3: Text) -> None:
if not os.path.isdir(VAR_2):
raise ValueError(f"Target directory '{VAR_2}' not found.")
VAR_13, VAR_14 = self._compress(VAR_2, VAR_3)
self._persist_tar(VAR_13, VAR_14)
def FUNC_2(self, VAR_3: Text, VAR_4: Text) -> None:
VAR_21 = VAR_3
if not VAR_3.endswith("tar.gz"):
VAR_21 = self._tar_name(VAR_3)
self._retrieve_tar(VAR_21)
self._decompress(os.path.basename(VAR_21), VAR_4)
@abc.abstractmethod
def FUNC_3(self, VAR_5: Text) -> Text:
raise NotImplementedError
@abc.abstractmethod
def FUNC_4(self, VAR_6: Text, VAR_7: Text) -> None: # noqa: F841
raise NotImplementedError
def FUNC_5(self, VAR_2: Text, VAR_3: Text) -> Tuple[Text, Text]:
import tempfile
VAR_22 = tempfile.mkdtemp()
VAR_23 = self._tar_name(VAR_3, VAR_8=False)
VAR_21 = shutil.make_archive(
os.path.join(VAR_22, VAR_23),
"gztar",
root_dir=VAR_2,
base_dir=".",
)
VAR_13 = os.path.basename(VAR_21)
return VAR_13, VAR_21
@staticmethod
def FUNC_6(VAR_3: Text, VAR_8: bool = True) -> Text:
VAR_24 = ".tar.gz" if VAR_8 else ""
return f"{VAR_3}{VAR_24}"
@staticmethod
def FUNC_7(VAR_9: Text, VAR_4: Text) -> None:
with TarSafe.open(VAR_9, "r:gz") as tar:
tar.extractall(VAR_4) # target dir will be created if it not exists
class CLASS_1(CLASS_0):
def __init__(
self,
VAR_10: Text,
VAR_11: Optional[Text] = None,
VAR_12: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", VAR_11=endpoint_url, VAR_12=region_name
)
self._ensure_bucket_exists(VAR_10, VAR_12)
self.bucket_name = VAR_10
self.bucket = self.s3.Bucket(VAR_10)
def FUNC_8(
self, VAR_10: Text, VAR_12: Optional[Text] = None
) -> None:
import boto3
import botocore
if not VAR_12:
region_name = boto3.DEFAULT_SESSION.region_name
VAR_25 = {"LocationConstraint": VAR_12}
try:
self.s3.create_bucket(
Bucket=VAR_10, CreateBucketConfiguration=VAR_25
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as f:
self.s3.Object(self.bucket_name, VAR_13).put(Body=f)
def FUNC_3(self, VAR_15: Text) -> None:
VAR_21 = os.path.basename(VAR_15)
with open(VAR_21, "wb") as f:
self.bucket.download_fileobj(VAR_15, f)
class CLASS_2(CLASS_0):
def __init__(self, VAR_10: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(VAR_10)
self.bucket_name = VAR_10
self.bucket = self.storage_client.bucket(VAR_10)
def FUNC_8(self, VAR_10: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(VAR_10)
except exceptions.Conflict:
pass
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
VAR_26 = self.bucket.blob(VAR_13)
VAR_26.upload_from_filename(VAR_14)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_26 = self.bucket.blob(VAR_16)
VAR_26.download_to_filename(VAR_16)
class CLASS_3(CLASS_0):
def __init__(
self, VAR_17: Text, VAR_18: Text, VAR_19: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{VAR_18}.blob.core.windows.net/",
credential=VAR_19,
)
self._ensure_container_exists(VAR_17)
self.container_name = VAR_17
def FUNC_9(self, VAR_20: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(VAR_20)
except ResourceExistsError:
pass
def FUNC_10(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as data:
self._container_client().upload_blob(VAR_1=VAR_13, data=data)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_27 = self._container_client().get_blob_client(VAR_16)
with open(VAR_16, "wb") as VAR_26:
VAR_29 = VAR_27.download_blob()
VAR_26.write(VAR_29.readall())
| [
7,
10,
13,
15,
16,
19,
29,
48,
49,
52,
57,
60,
64,
66,
68,
71,
76,
81,
85,
96,
99,
102,
105,
108,
109,
112,
114,
122,
130,
136,
139,
141,
148,
151,
154,
160,
161,
164,
166,
169,
171,
174,
177,
180,
184,
186,
189,
192,
195,
198,
199,
202,
207,
209,
214,
217,
220,
224,
226,
229,
234,
238,
242,
18,
19,
20,
21,
22,
51,
111,
112,
113,
163,
164,
165,
201,
54,
62,
74,
79,
83,
150,
156,
188,
194,
231,
236
] | [
7,
10,
13,
15,
16,
19,
29,
48,
49,
52,
57,
60,
64,
66,
68,
71,
76,
81,
85,
96,
99,
102,
105,
108,
109,
112,
114,
122,
130,
136,
139,
141,
148,
151,
154,
160,
161,
164,
166,
169,
171,
174,
177,
180,
184,
186,
189,
192,
195,
198,
199,
202,
207,
209,
214,
217,
220,
224,
226,
229,
234,
238,
242,
18,
19,
20,
21,
22,
51,
111,
112,
113,
163,
164,
165,
201,
54,
62,
74,
79,
83,
150,
156,
188,
194,
231,
236
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
# since layouts will most likely break with multiple cells per row, we are
# limiting the amount.
ROW_CELL_LIMIT = 4
class EditorView(TemplateView):
template_name = "shuup/xtheme/editor.jinja"
xtheme_injection = False # We don't need the editing injection here, so opt-out
changed = False # Overridden in `save_layout`
def _get_default_layout(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def get_context_data(self, **kwargs): # doccov: ignore
ctx = super(EditorView, self).get_context_data(**kwargs)
ctx["layout"] = self.layout
ctx["csrf_token_str"] = get_token(self.request)
# ctx["layout_debug"] = pformat(ctx["layout"].serialize())
ctx["current_cell_coords"] = self.current_cell_coords
ctx["current_cell"] = self.current_cell
ctx["form"] = self.form
ctx["changed"] = self.changed
ctx["cell_limit"] = ROW_CELL_LIMIT
return ctx
def dispatch(self, request, *args, **kwargs): # doccov: ignore
if not could_edit(request):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
# We saved the default layout, so get rid of the humongous GET arg and try again
get_args = dict(self.request.GET.items())
get_args.pop("default_config", None)
global_type = get_args.pop("global_type", None)
if global_type:
get_args["view"] = XTHEME_GLOBAL_VIEW_NAME
# We are overriding the view with XTHEME_GLOBAL_VIEW_NAME if this is a global placeholder
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(get_args)))
return super(EditorView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs): # doccov: ignore
command = request.POST.get("command")
if command:
dispatcher = getattr(self, "dispatch_%s" % command, None)
if not callable(dispatcher):
raise Problem(_("Unknown command: `%s`.") % command)
dispatch_kwargs = dict(request.POST.items())
rv = dispatcher(**dispatch_kwargs)
if rv:
return rv
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(EditorView, self).get(request, *args, **kwargs)
if request.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
# after we save the new layout configs, make sure to reload the saved data in forms
# so the returned get() response contains updated data
self.build_form()
if request.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(request, *args, **kwargs)
def _populate_vars(self):
theme = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not theme:
raise Problem(_("Unable to determine the current theme."))
view_name = self.request.GET["view"]
global_type = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
theme=theme,
shop=self.request.shop,
view_name=view_name,
draft=True,
global_type=global_type,
)
# Let's store the layout data key for save here
self.layout_data_key = self.request.GET.get("layout_data_key", None)
# Let's use the layout identifier passed by the view to
# fetch correct layout
layout_identifier = self.request.GET.get("layout_identifier", None)
layout_cls = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == layout_identifier:
layout_cls = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
layout_cls=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(x, y) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(x=x, y=y)
self.build_form()
def build_form(self):
if not self.current_cell:
self.form = None
return
kwargs = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
kwargs["data"] = self.request.POST
kwargs["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**kwargs)
def save_layout(self, layout=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, layout=(layout or self.layout))
self.changed = True
def dispatch_add_cell(self, y, **kwargs):
y = int(y)
if len(self.layout.rows[y].cells) >= ROW_CELL_LIMIT:
raise ValueError(_("Can't add more than %d cells in one row.") % ROW_CELL_LIMIT)
if not (0 <= y < len(self.layout.rows)):
# No need to raise an exception, really.
# It must have been a honest mistake.
return
self.layout.rows[y].add_cell()
self.save_layout()
def dispatch_add_row(self, y=None, **kwargs):
row = self.layout.insert_row(y)
row.add_cell() # For convenience, add a cell to the row.
self.save_layout()
def dispatch_del_row(self, y, **kwargs):
self.layout.delete_row(y)
self.save_layout()
def dispatch_move_row_to_index(self, from_y, to_y, **kwargs):
self.layout.move_row_to_index(from_y, to_y)
self.save_layout()
def dispatch_move_cell_to_position(self, from_x, from_y, to_x, to_y, **kwargs):
self.layout.move_cell_to_position(from_x, from_y, to_x, to_y)
self.save_layout()
def dispatch_del_cell(self, x, y, **kwargs):
self.layout.delete_cell(x, y)
self.save_layout()
def dispatch_change_plugin(self, plugin="", **kwargs):
if self.current_cell:
if not plugin:
plugin = None
self.current_cell.plugin_identifier = plugin
self.save_layout()
def dispatch_publish(self, **kwargs):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def dispatch_revert(self, **kwargs):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
# since layouts will most likely break with multiple cells per row, we are
# limiting the amount.
ROW_CELL_LIMIT = 4
class EditorView(TemplateView):
template_name = "shuup/xtheme/editor.jinja"
xtheme_injection = False # We don't need the editing injection here, so opt-out
changed = False # Overridden in `save_layout`
def _get_default_layout(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def get_context_data(self, **kwargs): # doccov: ignore
ctx = super(EditorView, self).get_context_data(**kwargs)
ctx["layout"] = self.layout
ctx["csrf_token_str"] = get_token(self.request)
# ctx["layout_debug"] = pformat(ctx["layout"].serialize())
ctx["current_cell_coords"] = self.current_cell_coords
ctx["current_cell"] = self.current_cell
ctx["form"] = self.form
ctx["changed"] = self.changed
ctx["cell_limit"] = ROW_CELL_LIMIT
return ctx
def dispatch(self, request, *args, **kwargs): # doccov: ignore
if not could_edit(request):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
# We saved the default layout, so get rid of the humongous GET arg and try again
get_args = dict(self.request.GET.items())
get_args.pop("default_config", None)
global_type = get_args.pop("global_type", None)
if global_type:
get_args["view"] = XTHEME_GLOBAL_VIEW_NAME
# We are overriding the view with XTHEME_GLOBAL_VIEW_NAME if this is a global placeholder
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(get_args)))
return super(EditorView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs): # doccov: ignore
command = request.POST.get("command")
if command:
dispatcher = getattr(self, "dispatch_%s" % command, None)
if not callable(dispatcher):
raise Problem(_("Unknown command: `%s`.") % escape(command))
dispatch_kwargs = dict(request.POST.items())
rv = dispatcher(**dispatch_kwargs)
if rv:
return rv
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(EditorView, self).get(request, *args, **kwargs)
if request.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
# after we save the new layout configs, make sure to reload the saved data in forms
# so the returned get() response contains updated data
self.build_form()
if request.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(request, *args, **kwargs)
def _populate_vars(self):
theme = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not theme:
raise Problem(_("Unable to determine the current theme."))
view_name = self.request.GET["view"]
global_type = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
theme=theme,
shop=self.request.shop,
view_name=view_name,
draft=True,
global_type=global_type,
)
# Let's store the layout data key for save here
self.layout_data_key = self.request.GET.get("layout_data_key", None)
# Let's use the layout identifier passed by the view to
# fetch correct layout
layout_identifier = self.request.GET.get("layout_identifier", None)
layout_cls = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == layout_identifier:
layout_cls = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
layout_cls=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(x, y) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(x=x, y=y)
self.build_form()
def build_form(self):
if not self.current_cell:
self.form = None
return
kwargs = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
kwargs["data"] = self.request.POST
kwargs["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**kwargs)
def save_layout(self, layout=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, layout=(layout or self.layout))
self.changed = True
def dispatch_add_cell(self, y, **kwargs):
y = int(y)
if len(self.layout.rows[y].cells) >= ROW_CELL_LIMIT:
raise ValueError(_("Can't add more than %d cells in one row.") % ROW_CELL_LIMIT)
if not (0 <= y < len(self.layout.rows)):
# No need to raise an exception, really.
# It must have been a honest mistake.
return
self.layout.rows[y].add_cell()
self.save_layout()
def dispatch_add_row(self, y=None, **kwargs):
row = self.layout.insert_row(y)
row.add_cell() # For convenience, add a cell to the row.
self.save_layout()
def dispatch_del_row(self, y, **kwargs):
self.layout.delete_row(y)
self.save_layout()
def dispatch_move_row_to_index(self, from_y, to_y, **kwargs):
self.layout.move_row_to_index(from_y, to_y)
self.save_layout()
def dispatch_move_cell_to_position(self, from_x, from_y, to_x, to_y, **kwargs):
self.layout.move_cell_to_position(from_x, from_y, to_x, to_y)
self.save_layout()
def dispatch_del_cell(self, x, y, **kwargs):
self.layout.delete_cell(x, y)
self.save_layout()
def dispatch_change_plugin(self, plugin="", **kwargs):
if self.current_cell:
if not plugin:
plugin = None
self.current_cell.plugin_identifier = plugin
self.save_layout()
def dispatch_publish(self, **kwargs):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def dispatch_revert(self, **kwargs):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| xss | {
"code": [
" raise Problem(_(\"Unknown command: `%s`.\") % command)"
],
"line_no": [
73
]
} | {
"code": [
"from django.utils.html import escape",
" raise Problem(_(\"Unknown command: `%s`.\") % escape(command))"
],
"line_no": [
11,
74
]
} |
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
VAR_0 = 4
class CLASS_0(TemplateView):
VAR_1 = "shuup/xtheme/editor.jinja"
VAR_2 = False # We don't need the editing injection here, so opt-out
VAR_3 = False # Overridden in `FUNC_6`
def FUNC_0(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def FUNC_1(self, **VAR_4): # doccov: ignore
VAR_15 = super(CLASS_0, self).get_context_data(**VAR_4)
VAR_15["layout"] = self.layout
VAR_15["csrf_token_str"] = get_token(self.request)
VAR_15["current_cell_coords"] = self.current_cell_coords
VAR_15["current_cell"] = self.current_cell
VAR_15["form"] = self.form
VAR_15["changed"] = self.changed
VAR_15["cell_limit"] = VAR_0
return VAR_15
def FUNC_2(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
if not could_edit(VAR_5):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
VAR_23 = dict(self.request.GET.items())
VAR_23.pop("default_config", None)
VAR_19 = VAR_23.pop("global_type", None)
if VAR_19:
VAR_23["view"] = XTHEME_GLOBAL_VIEW_NAME
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(VAR_23)))
return super(CLASS_0, self).dispatch(VAR_5, *VAR_6, **VAR_4)
def FUNC_3(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
VAR_16 = VAR_5.POST.get("command")
if VAR_16:
VAR_24 = getattr(self, "dispatch_%s" % VAR_16, None)
if not callable(VAR_24):
raise Problem(_("Unknown VAR_16: `%s`.") % VAR_16)
VAR_25 = dict(VAR_5.POST.items())
VAR_26 = VAR_24(**VAR_25)
if VAR_26:
return VAR_26
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(CLASS_0, self).get(VAR_5, *VAR_6, **VAR_4)
if VAR_5.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
self.build_form()
if VAR_5.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(VAR_5, *VAR_6, **VAR_4)
def FUNC_4(self):
VAR_17 = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not VAR_17:
raise Problem(_("Unable to determine the current VAR_17."))
VAR_18 = self.request.GET["view"]
VAR_19 = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
VAR_17=theme,
shop=self.request.shop,
VAR_18=view_name,
draft=True,
VAR_19=global_type,
)
self.layout_data_key = self.request.GET.get("layout_data_key", None)
VAR_20 = self.request.GET.get("layout_identifier", None)
VAR_21 = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == VAR_20:
VAR_21 = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
VAR_21=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(VAR_13, VAR_8) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(VAR_13=x, VAR_8=y)
self.build_form()
def FUNC_5(self):
if not self.current_cell:
self.form = None
return
VAR_4 = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
VAR_4["data"] = self.request.POST
VAR_4["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**VAR_4)
def FUNC_6(self, VAR_7=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, VAR_7=(layout or self.layout))
self.changed = True
def FUNC_7(self, VAR_8, **VAR_4):
VAR_8 = int(VAR_8)
if len(self.layout.rows[VAR_8].cells) >= VAR_0:
raise ValueError(_("Can't add more than %d cells in one VAR_22.") % VAR_0)
if not (0 <= VAR_8 < len(self.layout.rows)):
return
self.layout.rows[VAR_8].add_cell()
self.save_layout()
def FUNC_8(self, VAR_8=None, **VAR_4):
VAR_22 = self.layout.insert_row(VAR_8)
VAR_22.add_cell() # For convenience, add a cell to the VAR_22.
self.save_layout()
def FUNC_9(self, VAR_8, **VAR_4):
self.layout.delete_row(VAR_8)
self.save_layout()
def FUNC_10(self, VAR_9, VAR_10, **VAR_4):
self.layout.move_row_to_index(VAR_9, VAR_10)
self.save_layout()
def FUNC_11(self, VAR_11, VAR_9, VAR_12, VAR_10, **VAR_4):
self.layout.move_cell_to_position(VAR_11, VAR_9, VAR_12, VAR_10)
self.save_layout()
def FUNC_12(self, VAR_13, VAR_8, **VAR_4):
self.layout.delete_cell(VAR_13, VAR_8)
self.save_layout()
def FUNC_13(self, VAR_14="", **VAR_4):
if self.current_cell:
if not VAR_14:
plugin = None
self.current_cell.plugin_identifier = VAR_14
self.save_layout()
def FUNC_14(self, **VAR_4):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def FUNC_15(self, **VAR_4):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
|
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
VAR_0 = 4
class CLASS_0(TemplateView):
VAR_1 = "shuup/xtheme/editor.jinja"
VAR_2 = False # We don't need the editing injection here, so opt-out
VAR_3 = False # Overridden in `FUNC_6`
def FUNC_0(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def FUNC_1(self, **VAR_4): # doccov: ignore
VAR_15 = super(CLASS_0, self).get_context_data(**VAR_4)
VAR_15["layout"] = self.layout
VAR_15["csrf_token_str"] = get_token(self.request)
VAR_15["current_cell_coords"] = self.current_cell_coords
VAR_15["current_cell"] = self.current_cell
VAR_15["form"] = self.form
VAR_15["changed"] = self.changed
VAR_15["cell_limit"] = VAR_0
return VAR_15
def FUNC_2(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
if not could_edit(VAR_5):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
VAR_23 = dict(self.request.GET.items())
VAR_23.pop("default_config", None)
VAR_19 = VAR_23.pop("global_type", None)
if VAR_19:
VAR_23["view"] = XTHEME_GLOBAL_VIEW_NAME
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(VAR_23)))
return super(CLASS_0, self).dispatch(VAR_5, *VAR_6, **VAR_4)
def FUNC_3(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
VAR_16 = VAR_5.POST.get("command")
if VAR_16:
VAR_24 = getattr(self, "dispatch_%s" % VAR_16, None)
if not callable(VAR_24):
raise Problem(_("Unknown VAR_16: `%s`.") % escape(VAR_16))
VAR_25 = dict(VAR_5.POST.items())
VAR_26 = VAR_24(**VAR_25)
if VAR_26:
return VAR_26
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(CLASS_0, self).get(VAR_5, *VAR_6, **VAR_4)
if VAR_5.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
self.build_form()
if VAR_5.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(VAR_5, *VAR_6, **VAR_4)
def FUNC_4(self):
VAR_17 = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not VAR_17:
raise Problem(_("Unable to determine the current VAR_17."))
VAR_18 = self.request.GET["view"]
VAR_19 = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
VAR_17=theme,
shop=self.request.shop,
VAR_18=view_name,
draft=True,
VAR_19=global_type,
)
self.layout_data_key = self.request.GET.get("layout_data_key", None)
VAR_20 = self.request.GET.get("layout_identifier", None)
VAR_21 = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == VAR_20:
VAR_21 = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
VAR_21=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(VAR_13, VAR_8) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(VAR_13=x, VAR_8=y)
self.build_form()
def FUNC_5(self):
if not self.current_cell:
self.form = None
return
VAR_4 = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
VAR_4["data"] = self.request.POST
VAR_4["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**VAR_4)
def FUNC_6(self, VAR_7=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, VAR_7=(layout or self.layout))
self.changed = True
def FUNC_7(self, VAR_8, **VAR_4):
VAR_8 = int(VAR_8)
if len(self.layout.rows[VAR_8].cells) >= VAR_0:
raise ValueError(_("Can't add more than %d cells in one VAR_22.") % VAR_0)
if not (0 <= VAR_8 < len(self.layout.rows)):
return
self.layout.rows[VAR_8].add_cell()
self.save_layout()
def FUNC_8(self, VAR_8=None, **VAR_4):
VAR_22 = self.layout.insert_row(VAR_8)
VAR_22.add_cell() # For convenience, add a cell to the VAR_22.
self.save_layout()
def FUNC_9(self, VAR_8, **VAR_4):
self.layout.delete_row(VAR_8)
self.save_layout()
def FUNC_10(self, VAR_9, VAR_10, **VAR_4):
self.layout.move_row_to_index(VAR_9, VAR_10)
self.save_layout()
def FUNC_11(self, VAR_11, VAR_9, VAR_12, VAR_10, **VAR_4):
self.layout.move_cell_to_position(VAR_11, VAR_9, VAR_12, VAR_10)
self.save_layout()
def FUNC_12(self, VAR_13, VAR_8, **VAR_4):
self.layout.delete_cell(VAR_13, VAR_8)
self.save_layout()
def FUNC_13(self, VAR_14="", **VAR_4):
if self.current_cell:
if not VAR_14:
plugin = None
self.current_cell.plugin_identifier = VAR_14
self.save_layout()
def FUNC_14(self, **VAR_4):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def FUNC_15(self, **VAR_4):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| [
1,
2,
3,
4,
5,
6,
7,
14,
23,
24,
25,
27,
28,
33,
39,
44,
51,
58,
64,
67,
81,
85,
86,
87,
89,
92,
94,
108,
109,
111,
112,
113,
119,
134,
144,
148,
153,
155,
156,
160,
165,
169,
173,
177,
181,
188,
192,
196
] | [
1,
2,
3,
4,
5,
6,
7,
15,
24,
25,
26,
28,
29,
34,
40,
45,
52,
59,
65,
68,
82,
86,
87,
88,
90,
93,
95,
109,
110,
112,
113,
114,
120,
135,
145,
149,
154,
156,
157,
161,
166,
170,
174,
178,
182,
189,
193,
197
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Search
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, unique, cint
from frappe.permissions import has_permission
from frappe.handler import is_whitelisted
from frappe import _
from six import string_types
import re
import wrapt
UNTRANSLATED_DOCTYPES = ["DocType", "Role"]
def sanitize_searchfield(searchfield):
blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def _raise_exception(searchfield):
frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.DataError)
if len(searchfield) == 1:
# do not allow special characters to pass as searchfields
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if regex.match(searchfield):
_raise_exception(searchfield)
if len(searchfield) >= 3:
# to avoid 1=1
if '=' in searchfield:
_raise_exception(searchfield)
# in mysql -- is used for commenting the query
elif ' --' in searchfield:
_raise_exception(searchfield)
# to avoid and, or and like
elif any(' {0} '.format(keyword) in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
# to avoid select, delete, drop, update and case
elif any(keyword in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
else:
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(regex.match(f) for f in searchfield.split()):
_raise_exception(searchfield)
# this is called by the Link Field
@frappe.whitelist()
def search_link(doctype, txt, query=None, filters=None, page_length=20, searchfield=None, reference_doctype=None, ignore_user_permissions=False):
search_widget(doctype, txt.strip(), query, searchfield=searchfield, page_length=page_length, filters=filters, reference_doctype=reference_doctype, ignore_user_permissions=ignore_user_permissions)
frappe.response['results'] = build_for_autosuggest(frappe.response["values"])
del frappe.response["values"]
# this is called by the search box
@frappe.whitelist()
def search_widget(doctype, txt, query=None, searchfield=None, start=0,
page_length=20, filters=None, filter_fields=None, as_dict=False, reference_doctype=None, ignore_user_permissions=False):
start = cint(start)
if isinstance(filters, string_types):
filters = json.loads(filters)
if searchfield:
sanitize_searchfield(searchfield)
if not searchfield:
searchfield = "name"
standard_queries = frappe.get_hooks().standard_queries or {}
if query and query.split()[0].lower()!="select":
# by method
try:
is_whitelisted(frappe.get_attr(query))
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
except frappe.exceptions.PermissionError as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not query and doctype in standard_queries:
# from standard queries
search_widget(doctype, txt, standard_queries[doctype][0],
searchfield, start, page_length, filters)
else:
meta = frappe.get_meta(doctype)
if query:
frappe.throw(_("This query style is discontinued"))
# custom query
# frappe.response["values"] = frappe.db.sql(scrub_custom_query(query, searchfield, txt))
else:
if isinstance(filters, dict):
filters_items = filters.items()
filters = []
for f in filters_items:
if isinstance(f[1], (list, tuple)):
filters.append([doctype, f[0], f[1][0], f[1][1]])
else:
filters.append([doctype, f[0], "=", f[1]])
if filters==None:
filters = []
or_filters = []
# build from doctype
if txt:
search_fields = ["name"]
if meta.title_field:
search_fields.append(meta.title_field)
if meta.search_fields:
search_fields.extend(meta.get_search_fields())
for f in search_fields:
fmeta = meta.get_field(f.strip())
if (doctype not in UNTRANSLATED_DOCTYPES) and (f == "name" or (fmeta and fmeta.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
or_filters.append([doctype, f.strip(), "like", "%{0}%".format(txt)])
if meta.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
filters.append([doctype, "enabled", "=", 1])
if meta.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
filters.append([doctype, "disabled", "!=", 1])
# format a list of fields combining search fields and filter fields
fields = get_std_fields_list(meta, searchfield or "name")
if filter_fields:
fields = list(set(fields + json.loads(filter_fields)))
formatted_fields = ['`tab%s`.`%s`' % (meta.name, f.strip()) for f in fields]
# find relevance as location of search term from the beginning of string `name`. used for sorting results.
formatted_fields.append("""locate({_txt}, `tab{doctype}`.`name`) as `_relevance`""".format(
_txt=frappe.db.escape((txt or "").replace("%", "").replace("@", "")), doctype=doctype))
# In order_by, `idx` gets second priority, because it stores link count
from frappe.model.db_query import get_order_by
order_by_based_on_meta = get_order_by(doctype, meta)
# 2 is the index of _relevance column
order_by = "_relevance, {0}, `tab{1}`.idx desc".format(order_by_based_on_meta, doctype)
ptype = 'select' if frappe.only_has_select_perm(doctype) else 'read'
ignore_permissions = True if doctype == "DocType" else (cint(ignore_user_permissions) and has_permission(doctype, ptype=ptype))
if doctype in UNTRANSLATED_DOCTYPES:
page_length = None
values = frappe.get_list(doctype,
filters=filters,
fields=formatted_fields,
or_filters=or_filters,
limit_start=start,
limit_page_length=page_length,
order_by=order_by,
ignore_permissions=ignore_permissions,
reference_doctype=reference_doctype,
as_list=not as_dict,
strict=False)
if doctype in UNTRANSLATED_DOCTYPES:
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
# remove _relevance from results
if as_dict:
for r in values:
r.pop("_relevance")
frappe.response["values"] = values
else:
frappe.response["values"] = [r[:-1] for r in values]
def get_std_fields_list(meta, key):
# get additional search fields
sflist = ["name"]
if meta.search_fields:
for d in meta.search_fields.split(","):
if d.strip() not in sflist:
sflist.append(d.strip())
if meta.title_field and meta.title_field not in sflist:
sflist.append(meta.title_field)
if key not in sflist:
sflist.append(key)
return sflist
def build_for_autosuggest(res):
results = []
for r in res:
out = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
results.append(out)
return results
def scrub_custom_query(query, key, txt):
if '%(key)s' in query:
query = query.replace('%(key)s', key)
if '%s' in query:
query = query.replace('%s', ((txt or '') + '%'))
return query
@wrapt.decorator
def validate_and_sanitize_search_inputs(fn, instance, args, kwargs):
kwargs.update(dict(zip(fn.__code__.co_varnames, args)))
sanitize_searchfield(kwargs['searchfield'])
kwargs['start'] = cint(kwargs['start'])
kwargs['page_len'] = cint(kwargs['page_len'])
if kwargs['doctype'] and not frappe.db.exists('DocType', kwargs['doctype']):
return []
return fn(**kwargs) | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Search
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, unique, cint
from frappe.permissions import has_permission
from frappe import _, is_whitelisted
from six import string_types
import re
import wrapt
UNTRANSLATED_DOCTYPES = ["DocType", "Role"]
def sanitize_searchfield(searchfield):
blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def _raise_exception(searchfield):
frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.DataError)
if len(searchfield) == 1:
# do not allow special characters to pass as searchfields
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if regex.match(searchfield):
_raise_exception(searchfield)
if len(searchfield) >= 3:
# to avoid 1=1
if '=' in searchfield:
_raise_exception(searchfield)
# in mysql -- is used for commenting the query
elif ' --' in searchfield:
_raise_exception(searchfield)
# to avoid and, or and like
elif any(' {0} '.format(keyword) in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
# to avoid select, delete, drop, update and case
elif any(keyword in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
else:
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(regex.match(f) for f in searchfield.split()):
_raise_exception(searchfield)
# this is called by the Link Field
@frappe.whitelist()
def search_link(doctype, txt, query=None, filters=None, page_length=20, searchfield=None, reference_doctype=None, ignore_user_permissions=False):
search_widget(doctype, txt.strip(), query, searchfield=searchfield, page_length=page_length, filters=filters, reference_doctype=reference_doctype, ignore_user_permissions=ignore_user_permissions)
frappe.response['results'] = build_for_autosuggest(frappe.response["values"])
del frappe.response["values"]
# this is called by the search box
@frappe.whitelist()
def search_widget(doctype, txt, query=None, searchfield=None, start=0,
page_length=20, filters=None, filter_fields=None, as_dict=False, reference_doctype=None, ignore_user_permissions=False):
start = cint(start)
if isinstance(filters, string_types):
filters = json.loads(filters)
if searchfield:
sanitize_searchfield(searchfield)
if not searchfield:
searchfield = "name"
standard_queries = frappe.get_hooks().standard_queries or {}
if query and query.split()[0].lower()!="select":
# by method
try:
is_whitelisted(frappe.get_attr(query))
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
except frappe.exceptions.PermissionError as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not query and doctype in standard_queries:
# from standard queries
search_widget(doctype, txt, standard_queries[doctype][0],
searchfield, start, page_length, filters)
else:
meta = frappe.get_meta(doctype)
if query:
frappe.throw(_("This query style is discontinued"))
# custom query
# frappe.response["values"] = frappe.db.sql(scrub_custom_query(query, searchfield, txt))
else:
if isinstance(filters, dict):
filters_items = filters.items()
filters = []
for f in filters_items:
if isinstance(f[1], (list, tuple)):
filters.append([doctype, f[0], f[1][0], f[1][1]])
else:
filters.append([doctype, f[0], "=", f[1]])
if filters==None:
filters = []
or_filters = []
# build from doctype
if txt:
search_fields = ["name"]
if meta.title_field:
search_fields.append(meta.title_field)
if meta.search_fields:
search_fields.extend(meta.get_search_fields())
for f in search_fields:
fmeta = meta.get_field(f.strip())
if (doctype not in UNTRANSLATED_DOCTYPES) and (f == "name" or (fmeta and fmeta.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
or_filters.append([doctype, f.strip(), "like", "%{0}%".format(txt)])
if meta.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
filters.append([doctype, "enabled", "=", 1])
if meta.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
filters.append([doctype, "disabled", "!=", 1])
# format a list of fields combining search fields and filter fields
fields = get_std_fields_list(meta, searchfield or "name")
if filter_fields:
fields = list(set(fields + json.loads(filter_fields)))
formatted_fields = ['`tab%s`.`%s`' % (meta.name, f.strip()) for f in fields]
# find relevance as location of search term from the beginning of string `name`. used for sorting results.
formatted_fields.append("""locate({_txt}, `tab{doctype}`.`name`) as `_relevance`""".format(
_txt=frappe.db.escape((txt or "").replace("%", "").replace("@", "")), doctype=doctype))
# In order_by, `idx` gets second priority, because it stores link count
from frappe.model.db_query import get_order_by
order_by_based_on_meta = get_order_by(doctype, meta)
# 2 is the index of _relevance column
order_by = "_relevance, {0}, `tab{1}`.idx desc".format(order_by_based_on_meta, doctype)
ptype = 'select' if frappe.only_has_select_perm(doctype) else 'read'
ignore_permissions = True if doctype == "DocType" else (cint(ignore_user_permissions) and has_permission(doctype, ptype=ptype))
if doctype in UNTRANSLATED_DOCTYPES:
page_length = None
values = frappe.get_list(doctype,
filters=filters,
fields=formatted_fields,
or_filters=or_filters,
limit_start=start,
limit_page_length=page_length,
order_by=order_by,
ignore_permissions=ignore_permissions,
reference_doctype=reference_doctype,
as_list=not as_dict,
strict=False)
if doctype in UNTRANSLATED_DOCTYPES:
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
# remove _relevance from results
if as_dict:
for r in values:
r.pop("_relevance")
frappe.response["values"] = values
else:
frappe.response["values"] = [r[:-1] for r in values]
def get_std_fields_list(meta, key):
# get additional search fields
sflist = ["name"]
if meta.search_fields:
for d in meta.search_fields.split(","):
if d.strip() not in sflist:
sflist.append(d.strip())
if meta.title_field and meta.title_field not in sflist:
sflist.append(meta.title_field)
if key not in sflist:
sflist.append(key)
return sflist
def build_for_autosuggest(res):
results = []
for r in res:
out = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
results.append(out)
return results
def scrub_custom_query(query, key, txt):
if '%(key)s' in query:
query = query.replace('%(key)s', key)
if '%s' in query:
query = query.replace('%s', ((txt or '') + '%'))
return query
@wrapt.decorator
def validate_and_sanitize_search_inputs(fn, instance, args, kwargs):
kwargs.update(dict(zip(fn.__code__.co_varnames, args)))
sanitize_searchfield(kwargs['searchfield'])
kwargs['start'] = cint(kwargs['start'])
kwargs['page_len'] = cint(kwargs['page_len'])
if kwargs['doctype'] and not frappe.db.exists('DocType', kwargs['doctype']):
return []
return fn(**kwargs)
| xss | {
"code": [
"from frappe.handler import is_whitelisted",
"from frappe import _",
"\treturn fn(**kwargs)"
],
"line_no": [
9,
10,
224
]
} | {
"code": [
"from frappe import _, is_whitelisted"
],
"line_no": [
9
]
} |
from __future__ import unicode_literals
import .frappe, json
from VAR_20.utils import cstr, unique, cint
from VAR_20.permissions import has_permission
from VAR_20.handler import is_whitelisted
from VAR_20 import _
from six import string_types
import re
import wrapt
VAR_0 = ["DocType", "Role"]
def FUNC_0(VAR_1):
VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def FUNC_7(VAR_1):
VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)
if len(VAR_1) == 1:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if VAR_24.match(VAR_1):
FUNC_7(VAR_1)
if len(VAR_1) >= 3:
if '=' in VAR_1:
FUNC_7(VAR_1)
elif ' --' in VAR_1:
FUNC_7(VAR_1)
elif any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
elif any(keyword in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
else:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(VAR_24.match(f) for f in VAR_1.split()):
FUNC_7(VAR_1)
@VAR_20.whitelist()
def FUNC_1(VAR_2, VAR_3, VAR_4=None, VAR_5=None, VAR_6=20, VAR_1=None, VAR_7=None, VAR_8=False):
FUNC_2(VAR_2, VAR_3.strip(), VAR_4, VAR_1=searchfield, VAR_6=page_length, VAR_5=filters, VAR_7=reference_doctype, VAR_8=ignore_user_permissions)
VAR_20.response['results'] = FUNC_4(VAR_20.response["values"])
del VAR_20.response["values"]
@VAR_20.whitelist()
def FUNC_2(VAR_2, VAR_3, VAR_4=None, VAR_1=None, VAR_9=0,
VAR_6=20, VAR_5=None, VAR_10=None, VAR_11=False, VAR_7=None, VAR_8=False):
VAR_9 = cint(VAR_9)
if isinstance(VAR_5, string_types):
VAR_5 = json.loads(VAR_5)
if VAR_1:
FUNC_0(VAR_1)
if not VAR_1:
searchfield = "name"
VAR_21 = VAR_20.get_hooks().standard_queries or {}
if VAR_4 and VAR_4.split()[0].lower()!="select":
try:
is_whitelisted(VAR_20.get_attr(VAR_4))
VAR_20.response["values"] = VAR_20.call(VAR_4, VAR_2, VAR_3,
VAR_1, VAR_9, VAR_6, VAR_5, VAR_11=as_dict)
except VAR_20.exceptions.PermissionError as e:
if VAR_20.local.conf.developer_mode:
raise e
else:
VAR_20.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not VAR_4 and VAR_2 in VAR_21:
FUNC_2(VAR_2, VAR_3, VAR_21[VAR_2][0],
VAR_1, VAR_9, VAR_6, VAR_5)
else:
VAR_12 = VAR_20.get_meta(VAR_2)
if VAR_4:
VAR_20.throw(_("This VAR_4 style is discontinued"))
else:
if isinstance(VAR_5, dict):
VAR_34 = VAR_5.items()
VAR_5 = []
for f in VAR_34:
if isinstance(f[1], (list, tuple)):
VAR_5.append([VAR_2, f[0], f[1][0], f[1][1]])
else:
VAR_5.append([VAR_2, f[0], "=", f[1]])
if VAR_5==None:
VAR_5 = []
VAR_26 = []
if VAR_3:
VAR_35 = ["name"]
if VAR_12.title_field:
VAR_35.append(VAR_12.title_field)
if VAR_12.search_fields:
VAR_35.extend(VAR_12.get_search_fields())
for f in VAR_35:
VAR_36 = VAR_12.get_field(f.strip())
if (VAR_2 not in VAR_0) and (f == "name" or (VAR_36 and VAR_36.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
VAR_26.append([VAR_2, f.strip(), "like", "%{0}%".format(VAR_3)])
if VAR_12.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "enabled", "=", 1])
if VAR_12.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "disabled", "!=", 1])
fields = FUNC_3(VAR_12, VAR_1 or "name")
if VAR_10:
VAR_27 = list(set(VAR_27 + json.loads(VAR_10)))
VAR_28 = ['`tab%s`.`%s`' % (VAR_12.name, f.strip()) for f in VAR_27]
VAR_28.append("""locate({_txt}, `tab{VAR_2}`.`name`) as `_relevance`""".format(
_txt=VAR_20.db.escape((VAR_3 or "").replace("%", "").replace("@", "")), VAR_2=doctype))
from VAR_20.model.db_query import get_order_by
VAR_29 = get_order_by(VAR_2, VAR_12)
VAR_30 = "_relevance, {0}, `tab{1}`.idx desc".format(VAR_29, VAR_2)
VAR_31 = 'select' if VAR_20.only_has_select_perm(VAR_2) else 'read'
VAR_32 = True if VAR_2 == "DocType" else (cint(VAR_8) and has_permission(VAR_2, VAR_31=ptype))
if VAR_2 in VAR_0:
VAR_6 = None
VAR_33 = VAR_20.get_list(VAR_2,
VAR_5=filters,
VAR_27=VAR_28,
VAR_26=or_filters,
limit_start=VAR_9,
limit_page_length=VAR_6,
VAR_30=order_by,
VAR_32=ignore_permissions,
VAR_7=reference_doctype,
as_list=not VAR_11,
strict=False)
if VAR_2 in VAR_0:
VAR_33 = tuple([v for v in list(VAR_33) if re.search(re.escape(VAR_3)+".*", (_(v.name) if VAR_11 else _(v[0])), re.IGNORECASE)])
if VAR_11:
for r in VAR_33:
r.pop("_relevance")
VAR_20.response["values"] = VAR_33
else:
VAR_20.response["values"] = [r[:-1] for r in VAR_33]
def FUNC_3(VAR_12, VAR_13):
VAR_22 = ["name"]
if VAR_12.search_fields:
for d in VAR_12.search_fields.split(","):
if d.strip() not in VAR_22:
sflist.append(d.strip())
if VAR_12.title_field and VAR_12.title_field not in VAR_22:
sflist.append(VAR_12.title_field)
if VAR_13 not in VAR_22:
sflist.append(VAR_13)
return VAR_22
def FUNC_4(VAR_14):
VAR_23 = []
for r in VAR_14:
VAR_25 = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
VAR_23.append(VAR_25)
return VAR_23
def FUNC_5(VAR_4, VAR_13, VAR_3):
if '%(VAR_13)s' in VAR_4:
query = VAR_4.replace('%(VAR_13)s', VAR_13)
if '%s' in VAR_4:
query = VAR_4.replace('%s', ((VAR_3 or '') + '%'))
return VAR_4
@wrapt.decorator
def FUNC_6(VAR_15, VAR_16, VAR_17, VAR_18):
kwargs.update(dict(zip(VAR_15.__code__.co_varnames, VAR_17)))
FUNC_0(VAR_18['searchfield'])
VAR_18['start'] = cint(VAR_18['start'])
VAR_18['page_len'] = cint(VAR_18['page_len'])
if VAR_18['doctype'] and not VAR_20.db.exists('DocType', VAR_18['doctype']):
return []
return VAR_15(**VAR_18) |
from __future__ import unicode_literals
import .frappe, json
from VAR_20.utils import cstr, unique, cint
from VAR_20.permissions import has_permission
from VAR_20 import _, is_whitelisted
from six import string_types
import re
import wrapt
VAR_0 = ["DocType", "Role"]
def FUNC_0(VAR_1):
VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def FUNC_7(VAR_1):
VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)
if len(VAR_1) == 1:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if VAR_24.match(VAR_1):
FUNC_7(VAR_1)
if len(VAR_1) >= 3:
if '=' in VAR_1:
FUNC_7(VAR_1)
elif ' --' in VAR_1:
FUNC_7(VAR_1)
elif any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
elif any(keyword in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
else:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(VAR_24.match(f) for f in VAR_1.split()):
FUNC_7(VAR_1)
@VAR_20.whitelist()
def FUNC_1(VAR_2, VAR_3, VAR_4=None, VAR_5=None, VAR_6=20, VAR_1=None, VAR_7=None, VAR_8=False):
FUNC_2(VAR_2, VAR_3.strip(), VAR_4, VAR_1=searchfield, VAR_6=page_length, VAR_5=filters, VAR_7=reference_doctype, VAR_8=ignore_user_permissions)
VAR_20.response['results'] = FUNC_4(VAR_20.response["values"])
del VAR_20.response["values"]
@VAR_20.whitelist()
def FUNC_2(VAR_2, VAR_3, VAR_4=None, VAR_1=None, VAR_9=0,
VAR_6=20, VAR_5=None, VAR_10=None, VAR_11=False, VAR_7=None, VAR_8=False):
VAR_9 = cint(VAR_9)
if isinstance(VAR_5, string_types):
VAR_5 = json.loads(VAR_5)
if VAR_1:
FUNC_0(VAR_1)
if not VAR_1:
searchfield = "name"
VAR_21 = VAR_20.get_hooks().standard_queries or {}
if VAR_4 and VAR_4.split()[0].lower()!="select":
try:
is_whitelisted(VAR_20.get_attr(VAR_4))
VAR_20.response["values"] = VAR_20.call(VAR_4, VAR_2, VAR_3,
VAR_1, VAR_9, VAR_6, VAR_5, VAR_11=as_dict)
except VAR_20.exceptions.PermissionError as e:
if VAR_20.local.conf.developer_mode:
raise e
else:
VAR_20.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not VAR_4 and VAR_2 in VAR_21:
FUNC_2(VAR_2, VAR_3, VAR_21[VAR_2][0],
VAR_1, VAR_9, VAR_6, VAR_5)
else:
VAR_12 = VAR_20.get_meta(VAR_2)
if VAR_4:
VAR_20.throw(_("This VAR_4 style is discontinued"))
else:
if isinstance(VAR_5, dict):
VAR_34 = VAR_5.items()
VAR_5 = []
for f in VAR_34:
if isinstance(f[1], (list, tuple)):
VAR_5.append([VAR_2, f[0], f[1][0], f[1][1]])
else:
VAR_5.append([VAR_2, f[0], "=", f[1]])
if VAR_5==None:
VAR_5 = []
VAR_26 = []
if VAR_3:
VAR_35 = ["name"]
if VAR_12.title_field:
VAR_35.append(VAR_12.title_field)
if VAR_12.search_fields:
VAR_35.extend(VAR_12.get_search_fields())
for f in VAR_35:
VAR_36 = VAR_12.get_field(f.strip())
if (VAR_2 not in VAR_0) and (f == "name" or (VAR_36 and VAR_36.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
VAR_26.append([VAR_2, f.strip(), "like", "%{0}%".format(VAR_3)])
if VAR_12.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "enabled", "=", 1])
if VAR_12.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "disabled", "!=", 1])
fields = FUNC_3(VAR_12, VAR_1 or "name")
if VAR_10:
VAR_27 = list(set(VAR_27 + json.loads(VAR_10)))
VAR_28 = ['`tab%s`.`%s`' % (VAR_12.name, f.strip()) for f in VAR_27]
VAR_28.append("""locate({_txt}, `tab{VAR_2}`.`name`) as `_relevance`""".format(
_txt=VAR_20.db.escape((VAR_3 or "").replace("%", "").replace("@", "")), VAR_2=doctype))
from VAR_20.model.db_query import get_order_by
VAR_29 = get_order_by(VAR_2, VAR_12)
VAR_30 = "_relevance, {0}, `tab{1}`.idx desc".format(VAR_29, VAR_2)
VAR_31 = 'select' if VAR_20.only_has_select_perm(VAR_2) else 'read'
VAR_32 = True if VAR_2 == "DocType" else (cint(VAR_8) and has_permission(VAR_2, VAR_31=ptype))
if VAR_2 in VAR_0:
VAR_6 = None
VAR_33 = VAR_20.get_list(VAR_2,
VAR_5=filters,
VAR_27=VAR_28,
VAR_26=or_filters,
limit_start=VAR_9,
limit_page_length=VAR_6,
VAR_30=order_by,
VAR_32=ignore_permissions,
VAR_7=reference_doctype,
as_list=not VAR_11,
strict=False)
if VAR_2 in VAR_0:
VAR_33 = tuple([v for v in list(VAR_33) if re.search(re.escape(VAR_3)+".*", (_(v.name) if VAR_11 else _(v[0])), re.IGNORECASE)])
if VAR_11:
for r in VAR_33:
r.pop("_relevance")
VAR_20.response["values"] = VAR_33
else:
VAR_20.response["values"] = [r[:-1] for r in VAR_33]
def FUNC_3(VAR_12, VAR_13):
VAR_22 = ["name"]
if VAR_12.search_fields:
for d in VAR_12.search_fields.split(","):
if d.strip() not in VAR_22:
sflist.append(d.strip())
if VAR_12.title_field and VAR_12.title_field not in VAR_22:
sflist.append(VAR_12.title_field)
if VAR_13 not in VAR_22:
sflist.append(VAR_13)
return VAR_22
def FUNC_4(VAR_14):
VAR_23 = []
for r in VAR_14:
VAR_25 = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
VAR_23.append(VAR_25)
return VAR_23
def FUNC_5(VAR_4, VAR_13, VAR_3):
if '%(VAR_13)s' in VAR_4:
query = VAR_4.replace('%(VAR_13)s', VAR_13)
if '%s' in VAR_4:
query = VAR_4.replace('%s', ((VAR_3 or '') + '%'))
return VAR_4
@wrapt.decorator
def FUNC_6(VAR_15, VAR_16, VAR_17, VAR_18):
kwargs.update(dict(zip(VAR_15.__code__.co_varnames, VAR_17)))
FUNC_0(VAR_18['searchfield'])
VAR_18['start'] = cint(VAR_18['start'])
VAR_18['page_len'] = cint(VAR_18['page_len'])
if VAR_18['doctype'] and not VAR_20.db.exists('DocType', VAR_18['doctype']):
return []
return VAR_15(**VAR_18)
| [
1,
2,
3,
4,
14,
16,
19,
22,
24,
28,
30,
31,
34,
35,
38,
39,
42,
43,
46,
51,
52,
58,
59,
63,
65,
68,
71,
74,
76,
78,
93,
98,
101,
102,
112,
116,
117,
118,
123,
126,
132,
137,
138,
143,
144,
147,
148,
149,
152,
154,
157,
160,
172,
175,
176,
183,
185,
191,
194,
197,
199,
206,
213,
220,
223
] | [
1,
2,
3,
4,
13,
15,
18,
21,
23,
27,
29,
30,
33,
34,
37,
38,
41,
42,
45,
50,
51,
57,
58,
62,
64,
67,
70,
73,
75,
77,
92,
97,
100,
101,
111,
115,
116,
117,
122,
125,
131,
136,
137,
142,
143,
146,
147,
148,
151,
153,
156,
159,
171,
174,
175,
182,
184,
190,
193,
196,
198,
205,
212,
219,
222,
224
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
User = get_user_model()
# I wish django would not force its crappy CBV on me
class _CustomPasswordResetView(django_views.PasswordResetView):
template_name = 'spirit/user/auth/password_reset_form.html'
email_template_name = 'spirit/user/auth/password_reset_email.html'
subject_template_name = 'spirit/user/auth/password_reset_subject.txt'
success_url = reverse_lazy('spirit:user:auth:password-reset-done')
form_class = CustomPasswordResetForm
class _CustomPasswordResetConfirmView(django_views.PasswordResetConfirmView):
template_name = 'spirit/user/auth/password_reset_confirm.html'
success_url = reverse_lazy('spirit:user:auth:password-reset-complete')
class _CustomPasswordResetCompleteView(django_views.PasswordResetCompleteView):
template_name = 'spirit/user/auth/password_reset_complete.html'
class _CustomPasswordResetDoneView(django_views.PasswordResetDoneView):
template_name = 'spirit/user/auth/password_reset_done.html'
class _CustomLoginView(django_views.LoginView):
template_name = 'spirit/user/auth/login.html'
# Make views sane again
_login_view = _CustomLoginView.as_view()
_logout_view = django_views.LogoutView.as_view()
_password_reset_view = _CustomPasswordResetView.as_view()
custom_password_reset_confirm = _CustomPasswordResetConfirmView.as_view()
custom_password_reset_complete = _CustomPasswordResetCompleteView.as_view()
custom_password_reset_done = _CustomPasswordResetDoneView.as_view()
@ratelimit(field='username', rate='5/5m')
# TODO: @guest_only
def custom_login(request, **kwargs):
# Currently, Django 1.5 login view does not redirect somewhere if the user is logged in
if request.user.is_authenticated:
return redirect(request.GET.get('next', request.user.st.get_absolute_url()))
if request.method == "POST" and request.is_limited():
return redirect(request.get_full_path())
return _login_view(request, authentication_form=LoginForm, **kwargs)
# TODO: @login_required ?
def custom_logout(request, **kwargs):
if not request.user.is_authenticated:
return redirect(request.GET.get('next', reverse(settings.LOGIN_URL)))
if request.method == 'POST':
return _logout_view(request, **kwargs)
return render(request, 'spirit/user/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def custom_password_reset(request, **kwargs):
if request.method == "POST" and request.is_limited():
return redirect(reverse("spirit:user:auth:password-reset"))
return _password_reset_view(request, **kwargs)
@ratelimit(rate='2/10s')
# TODO: @guest_only
def register(request, registration_form=RegistrationForm):
if request.user.is_authenticated:
return redirect(request.GET.get('next', reverse('spirit:user:update')))
form = registration_form(data=post_data(request))
if (is_post(request) and
not request.is_limited() and
form.is_valid()):
user = form.save()
send_activation_email(request, user)
messages.info(
request, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': form.get_email()})
# TODO: email-less activation
# if not settings.REGISTER_EMAIL_ACTIVATION_REQUIRED:
# login(request, user)
# return redirect(request.GET.get('next', reverse('spirit:user:update')))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/register.html',
context={'form': form})
def registration_activation(request, pk, token):
user = get_object_or_404(User, pk=pk)
activation = UserActivationTokenGenerator()
if activation.is_valid(user, token):
user.st.is_verified = True
user.is_active = True
user.save()
messages.info(request, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
# TODO: @guest_only
def resend_activation_email(request):
if request.user.is_authenticated:
return redirect(request.GET.get('next', reverse('spirit:user:update')))
form = ResendActivationForm(data=post_data(request))
if is_post(request):
if not request.is_limited() and form.is_valid():
user = form.get_user()
send_activation_email(request, user)
# TODO: show if is_valid only
messages.info(
request, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/activation_resend.html',
context={'form': form})
| # -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
User = get_user_model()
# I wish django would not force its crappy CBV on me
class _CustomPasswordResetView(django_views.PasswordResetView):
template_name = 'spirit/user/auth/password_reset_form.html'
email_template_name = 'spirit/user/auth/password_reset_email.html'
subject_template_name = 'spirit/user/auth/password_reset_subject.txt'
success_url = reverse_lazy('spirit:user:auth:password-reset-done')
form_class = CustomPasswordResetForm
class _CustomPasswordResetConfirmView(django_views.PasswordResetConfirmView):
template_name = 'spirit/user/auth/password_reset_confirm.html'
success_url = reverse_lazy('spirit:user:auth:password-reset-complete')
class _CustomPasswordResetCompleteView(django_views.PasswordResetCompleteView):
template_name = 'spirit/user/auth/password_reset_complete.html'
class _CustomPasswordResetDoneView(django_views.PasswordResetDoneView):
template_name = 'spirit/user/auth/password_reset_done.html'
class _CustomLoginView(django_views.LoginView):
template_name = 'spirit/user/auth/login.html'
# Make views sane again
_login_view = _CustomLoginView.as_view()
_logout_view = django_views.LogoutView.as_view()
_password_reset_view = _CustomPasswordResetView.as_view()
custom_password_reset_confirm = _CustomPasswordResetConfirmView.as_view()
custom_password_reset_complete = _CustomPasswordResetCompleteView.as_view()
custom_password_reset_done = _CustomPasswordResetDoneView.as_view()
@ratelimit(field='username', rate='5/5m')
# TODO: @guest_only
def custom_login(request, **kwargs):
# Currently, Django 1.5 login view does not redirect somewhere if the user is logged in
if request.user.is_authenticated:
return safe_redirect(
request, 'next', request.user.st.get_absolute_url())
if request.method == "POST" and request.is_limited():
return redirect(request.get_full_path())
return _login_view(request, authentication_form=LoginForm, **kwargs)
# TODO: @login_required ?
def custom_logout(request, **kwargs):
if not request.user.is_authenticated:
return safe_redirect(request, 'next', reverse(settings.LOGIN_URL))
if request.method == 'POST':
return _logout_view(request, **kwargs)
return render(request, 'spirit/user/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def custom_password_reset(request, **kwargs):
if request.method == "POST" and request.is_limited():
return redirect(reverse("spirit:user:auth:password-reset"))
return _password_reset_view(request, **kwargs)
@ratelimit(rate='2/10s')
# TODO: @guest_only
def register(request, registration_form=RegistrationForm):
if request.user.is_authenticated:
return safe_redirect(request, 'next', reverse('spirit:user:update'))
form = registration_form(data=post_data(request))
if (is_post(request) and
not request.is_limited() and
form.is_valid()):
user = form.save()
send_activation_email(request, user)
messages.info(
request, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': form.get_email()})
# TODO: email-less activation
# if not settings.REGISTER_EMAIL_ACTIVATION_REQUIRED:
# login(request, user)
# return safe_redirect(request, 'next', reverse('spirit:user:update'))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/register.html',
context={'form': form})
def registration_activation(request, pk, token):
user = get_object_or_404(User, pk=pk)
activation = UserActivationTokenGenerator()
if activation.is_valid(user, token):
user.st.is_verified = True
user.is_active = True
user.save()
messages.info(request, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
# TODO: @guest_only
def resend_activation_email(request):
if request.user.is_authenticated:
return safe_redirect(request, 'next', reverse('spirit:user:update'))
form = ResendActivationForm(data=post_data(request))
if is_post(request):
if not request.is_limited() and form.is_valid():
user = form.get_user()
send_activation_email(request, user)
# TODO: show if is_valid only
messages.info(
request, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/activation_resend.html',
context={'form': form})
| open_redirect | {
"code": [
" return redirect(request.GET.get('next', request.user.st.get_absolute_url()))",
" return redirect(request.GET.get('next', reverse(settings.LOGIN_URL)))",
" return redirect(request.GET.get('next', reverse('spirit:user:update')))",
" return redirect(request.GET.get('next', reverse('spirit:user:update')))"
],
"line_no": [
65,
76,
96,
138
]
} | {
"code": [
"from spirit.core.utils.http import safe_redirect",
" request, 'next', request.user.st.get_absolute_url())",
" return safe_redirect(request, 'next', reverse(settings.LOGIN_URL))",
" return safe_redirect(request, 'next', reverse('spirit:user:update'))",
" return safe_redirect(request, 'next', reverse('spirit:user:update'))"
],
"line_no": [
12,
67,
78,
98,
140
]
} |
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import .UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
VAR_0 = get_user_model()
class CLASS_0(django_views.PasswordResetView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_form.html'
VAR_13 = 'spirit/VAR_18/auth/password_reset_email.html'
VAR_14 = 'spirit/VAR_18/auth/password_reset_subject.txt'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-done')
VAR_16 = CustomPasswordResetForm
class CLASS_1(django_views.PasswordResetConfirmView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_confirm.html'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-complete')
class CLASS_2(django_views.PasswordResetCompleteView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_complete.html'
class CLASS_3(django_views.PasswordResetDoneView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_done.html'
class CLASS_4(django_views.LoginView):
VAR_12 = 'spirit/VAR_18/auth/login.html'
VAR_1 = CLASS_4.as_view()
VAR_2 = django_views.LogoutView.as_view()
VAR_3 = CLASS_0.as_view()
VAR_4 = CLASS_1.as_view()
VAR_5 = CLASS_2.as_view()
VAR_6 = CLASS_3.as_view()
@ratelimit(field='username', rate='5/5m')
def FUNC_0(VAR_7, **VAR_8):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', VAR_7.user.st.get_absolute_url()))
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(VAR_7.get_full_path())
return VAR_1(VAR_7, authentication_form=LoginForm, **VAR_8)
def FUNC_1(VAR_7, **VAR_8):
if not VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse(settings.LOGIN_URL)))
if VAR_7.method == 'POST':
return VAR_2(VAR_7, **VAR_8)
return render(VAR_7, 'spirit/VAR_18/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def FUNC_2(VAR_7, **VAR_8):
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(reverse("spirit:VAR_18:auth:password-reset"))
return VAR_3(VAR_7, **VAR_8)
@ratelimit(rate='2/10s')
def FUNC_3(VAR_7, VAR_9=RegistrationForm):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse('spirit:VAR_18:update')))
VAR_17 = VAR_9(data=post_data(VAR_7))
if (is_post(VAR_7) and
not VAR_7.is_limited() and
VAR_17.is_valid()):
VAR_18 = VAR_17.save()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': VAR_17.get_email()})
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/FUNC_3.html',
context={'form': VAR_17})
def FUNC_4(VAR_7, VAR_10, VAR_11):
VAR_18 = get_object_or_404(VAR_0, VAR_10=pk)
VAR_19 = UserActivationTokenGenerator()
if VAR_19.is_valid(VAR_18, VAR_11):
VAR_18.st.is_verified = True
VAR_18.is_active = True
VAR_18.save()
messages.info(VAR_7, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
def FUNC_5(VAR_7):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse('spirit:VAR_18:update')))
VAR_17 = ResendActivationForm(data=post_data(VAR_7))
if is_post(VAR_7):
if not VAR_7.is_limited() and VAR_17.is_valid():
VAR_18 = VAR_17.get_user()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/activation_resend.html',
context={'form': VAR_17})
|
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import .UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
VAR_0 = get_user_model()
class CLASS_0(django_views.PasswordResetView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_form.html'
VAR_13 = 'spirit/VAR_18/auth/password_reset_email.html'
VAR_14 = 'spirit/VAR_18/auth/password_reset_subject.txt'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-done')
VAR_16 = CustomPasswordResetForm
class CLASS_1(django_views.PasswordResetConfirmView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_confirm.html'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-complete')
class CLASS_2(django_views.PasswordResetCompleteView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_complete.html'
class CLASS_3(django_views.PasswordResetDoneView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_done.html'
class CLASS_4(django_views.LoginView):
VAR_12 = 'spirit/VAR_18/auth/login.html'
VAR_1 = CLASS_4.as_view()
VAR_2 = django_views.LogoutView.as_view()
VAR_3 = CLASS_0.as_view()
VAR_4 = CLASS_1.as_view()
VAR_5 = CLASS_2.as_view()
VAR_6 = CLASS_3.as_view()
@ratelimit(field='username', rate='5/5m')
def FUNC_0(VAR_7, **VAR_8):
if VAR_7.user.is_authenticated:
return safe_redirect(
VAR_7, 'next', VAR_7.user.st.get_absolute_url())
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(VAR_7.get_full_path())
return VAR_1(VAR_7, authentication_form=LoginForm, **VAR_8)
def FUNC_1(VAR_7, **VAR_8):
if not VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse(settings.LOGIN_URL))
if VAR_7.method == 'POST':
return VAR_2(VAR_7, **VAR_8)
return render(VAR_7, 'spirit/VAR_18/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def FUNC_2(VAR_7, **VAR_8):
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(reverse("spirit:VAR_18:auth:password-reset"))
return VAR_3(VAR_7, **VAR_8)
@ratelimit(rate='2/10s')
def FUNC_3(VAR_7, VAR_9=RegistrationForm):
if VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse('spirit:VAR_18:update'))
VAR_17 = VAR_9(data=post_data(VAR_7))
if (is_post(VAR_7) and
not VAR_7.is_limited() and
VAR_17.is_valid()):
VAR_18 = VAR_17.save()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': VAR_17.get_email()})
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/FUNC_3.html',
context={'form': VAR_17})
def FUNC_4(VAR_7, VAR_10, VAR_11):
VAR_18 = get_object_or_404(VAR_0, VAR_10=pk)
VAR_19 = UserActivationTokenGenerator()
if VAR_19.is_valid(VAR_18, VAR_11):
VAR_18.st.is_verified = True
VAR_18.is_active = True
VAR_18.save()
messages.info(VAR_7, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
def FUNC_5(VAR_7):
if VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse('spirit:VAR_18:update'))
VAR_17 = ResendActivationForm(data=post_data(VAR_7))
if is_post(VAR_7):
if not VAR_7.is_limited() and VAR_17.is_valid():
VAR_18 = VAR_17.get_user()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/activation_resend.html',
context={'form': VAR_17})
| [
1,
2,
10,
21,
23,
24,
25,
32,
33,
37,
38,
41,
42,
45,
46,
49,
50,
51,
58,
59,
61,
63,
66,
69,
71,
72,
73,
77,
80,
82,
83,
88,
90,
91,
93,
97,
108,
109,
110,
111,
112,
113,
119,
120,
124,
130,
132,
133,
135,
139,
145,
146,
156
] | [
1,
2,
10,
22,
24,
25,
26,
33,
34,
38,
39,
42,
43,
46,
47,
50,
51,
52,
59,
60,
62,
64,
68,
71,
73,
74,
75,
79,
82,
84,
85,
90,
92,
93,
95,
99,
110,
111,
112,
113,
114,
115,
121,
122,
126,
132,
134,
135,
137,
141,
147,
148,
158
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import json, datetime
from frappe import _, scrub
import frappe.desk.query_report
from frappe.utils import cint, cstr
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
from frappe.modules import make_boilerplate
from frappe.core.doctype.page.page import delete_custom_role
from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles
from frappe.desk.reportview import append_totals_row
from six import iteritems
from frappe.utils.safe_exec import safe_exec
class Report(Document):
def validate(self):
"""only administrator can save standard report"""
if not self.module:
self.module = frappe.db.get_value("DocType", self.ref_doctype, "module")
if not self.is_standard:
self.is_standard = "No"
if frappe.session.user=="Administrator" and getattr(frappe.local.conf, 'developer_mode',0)==1:
self.is_standard = "Yes"
if self.is_standard == "No":
# allow only script manager to edit scripts
if self.report_type != 'Report Builder':
frappe.only_for('Script Manager', True)
if frappe.db.get_value("Report", self.name, "is_standard") == "Yes":
frappe.throw(_("Cannot edit a standard report. Please duplicate and create a new report"))
if self.is_standard == "Yes" and frappe.session.user!="Administrator":
frappe.throw(_("Only Administrator can save a standard report. Please rename and save."))
if self.report_type == "Report Builder":
self.update_report_json()
def before_insert(self):
self.set_doctype_roles()
def on_update(self):
self.export_doc()
def on_trash(self):
if (self.is_standard == 'Yes'
and not cint(getattr(frappe.local.conf, 'developer_mode', 0))
and not frappe.flags.in_patch):
frappe.throw(_("You are not allowed to delete Standard Report"))
delete_custom_role('report', self.name)
def get_columns(self):
return [d.as_dict(no_default_fields = True) for d in self.columns]
def set_doctype_roles(self):
if not self.get('roles') and self.is_standard == 'No':
meta = frappe.get_meta(self.ref_doctype)
if not meta.istable:
roles = [{'role': d.role} for d in meta.permissions if d.permlevel==0]
self.set('roles', roles)
def is_permitted(self):
"""Returns true if Has Role is not set or the user is allowed."""
from frappe.utils import has_common
allowed = [d.role for d in frappe.get_all("Has Role", fields=["role"],
filters={"parent": self.name})]
custom_roles = get_custom_allowed_roles('report', self.name)
allowed.extend(custom_roles)
if not allowed:
return True
if has_common(frappe.get_roles(), allowed):
return True
def update_report_json(self):
if not self.json:
self.json = '{}'
def export_doc(self):
if frappe.flags.in_import:
return
if self.is_standard == 'Yes' and (frappe.local.conf.get('developer_mode') or 0) == 1:
export_to_files(record_list=[['Report', self.name]],
record_module=self.module, create_init=True)
self.create_report_py()
def create_report_py(self):
if self.report_type == "Script Report":
make_boilerplate("controller.py", self, {"name": self.name})
make_boilerplate("controller.js", self, {"name": self.name})
def execute_query_report(self, filters):
if not self.query:
frappe.throw(_("Must specify a Query to run"), title=_('Report Document Error'))
if not self.query.lower().startswith("select"):
frappe.throw(_("Query must be a SELECT"), title=_('Report Document Error'))
result = [list(t) for t in frappe.db.sql(self.query, filters, debug=True)]
columns = self.get_columns() or [cstr(c[0]) for c in frappe.db.get_description()]
return [columns, result]
def execute_script_report(self, filters):
# save the timestamp to automatically set to prepared
threshold = 30
res = []
start_time = datetime.datetime.now()
# The JOB
if self.is_standard == 'Yes':
res = self.execute_module(filters)
else:
res = self.execute_script(filters)
# automatically set as prepared
execution_time = (datetime.datetime.now() - start_time).total_seconds()
if execution_time > threshold and not self.prepared_report:
self.db_set('prepared_report', 1)
frappe.cache().hset('report_execution_time', self.name, execution_time)
return res
def execute_module(self, filters):
# report in python module
module = self.module or frappe.db.get_value("DocType", self.ref_doctype, "module")
method_name = get_report_module_dotted_path(module, self.name) + ".execute"
return frappe.get_attr(method_name)(frappe._dict(filters))
def execute_script(self, filters):
# server script
loc = {"filters": frappe._dict(filters), 'data':None, 'result':None}
safe_exec(self.report_script, None, loc)
if loc['data']:
return loc['data']
else:
return self.get_columns(), loc['result']
def get_data(self, filters=None, limit=None, user=None, as_dict=False, ignore_prepared_report=False):
if self.report_type in ('Query Report', 'Script Report', 'Custom Report'):
columns, result = self.run_query_report(filters, user, ignore_prepared_report)
else:
columns, result = self.run_standard_report(filters, limit, user)
if as_dict:
result = self.build_data_dict(result, columns)
return columns, result
def run_query_report(self, filters, user, ignore_prepared_report=False):
columns, result = [], []
data = frappe.desk.query_report.run(self.name,
filters=filters, user=user, ignore_prepared_report=ignore_prepared_report)
for d in data.get('columns'):
if isinstance(d, dict):
col = frappe._dict(d)
if not col.fieldname:
col.fieldname = col.label
columns.append(col)
else:
fieldtype, options = "Data", None
parts = d.split(':')
if len(parts) > 1:
if parts[1]:
fieldtype, options = parts[1], None
if fieldtype and '/' in fieldtype:
fieldtype, options = fieldtype.split('/')
columns.append(frappe._dict(label=parts[0], fieldtype=fieldtype, fieldname=parts[0], options=options))
result += data.get('result')
return columns, result
def run_standard_report(self, filters, limit, user):
params = json.loads(self.json)
columns = self.get_standard_report_columns(params)
result = []
order_by, group_by, group_by_args = self.get_standard_report_order_by(params)
_result = frappe.get_list(self.ref_doctype,
fields = [
get_group_by_field(group_by_args, c[1]) if c[0] == '_aggregate_column' and group_by_args
else Report._format([c[1], c[0]]) for c in columns
],
filters = self.get_standard_report_filters(params, filters),
order_by = order_by,
group_by = group_by,
as_list = True,
limit = limit,
user = user)
columns = self.build_standard_report_columns(columns, group_by_args)
result = result + [list(d) for d in _result]
if params.get('add_totals_row'):
result = append_totals_row(result)
return columns, result
@staticmethod
def _format(parts):
# sort by is saved as DocType.fieldname, covert it to sql
return '`tab{0}`.`{1}`'.format(*parts)
def get_standard_report_columns(self, params):
if params.get('fields'):
columns = params.get('fields')
elif params.get('columns'):
columns = params.get('columns')
elif params.get('fields'):
columns = params.get('fields')
else:
columns = [['name', self.ref_doctype]]
for df in frappe.get_meta(self.ref_doctype).fields:
if df.in_list_view:
columns.append([df.fieldname, self.ref_doctype])
return columns
def get_standard_report_filters(self, params, filters):
_filters = params.get('filters') or []
if filters:
for key, value in iteritems(filters):
condition, _value = '=', value
if isinstance(value, (list, tuple)):
condition, _value = value
_filters.append([key, condition, _value])
return _filters
def get_standard_report_order_by(self, params):
group_by_args = None
if params.get('sort_by'):
order_by = Report._format(params.get('sort_by').split('.')) + ' ' + params.get('sort_order')
elif params.get('order_by'):
order_by = params.get('order_by')
else:
order_by = Report._format([self.ref_doctype, 'modified']) + ' desc'
if params.get('sort_by_next'):
order_by += ', ' + Report._format(params.get('sort_by_next').split('.')) + ' ' + params.get('sort_order_next')
group_by = None
if params.get('group_by'):
group_by_args = frappe._dict(params['group_by'])
group_by = group_by_args['group_by']
order_by = '_aggregate_column desc'
return order_by, group_by, group_by_args
def build_standard_report_columns(self, columns, group_by_args):
_columns = []
for (fieldname, doctype) in columns:
meta = frappe.get_meta(doctype)
if meta.get_field(fieldname):
field = meta.get_field(fieldname)
else:
if fieldname == '_aggregate_column':
label = get_group_by_column_label(group_by_args, meta)
else:
label = meta.get_label(fieldname)
field = frappe._dict(fieldname=fieldname, label=label)
# since name is the primary key for a document, it will always be a Link datatype
if fieldname == "name":
field.fieldtype = "Link"
field.options = doctype
_columns.append(field)
return _columns
def build_data_dict(self, result, columns):
data = []
for row in result:
if isinstance(row, (list, tuple)):
_row = frappe._dict()
for i, val in enumerate(row):
_row[columns[i].get('fieldname')] = val
elif isinstance(row, dict):
# no need to convert from dict to dict
_row = frappe._dict(row)
data.append(_row)
return data
@Document.whitelist
def toggle_disable(self, disable):
self.db_set("disabled", cint(disable))
@frappe.whitelist()
def is_prepared_report_disabled(report):
return frappe.db.get_value('Report',
report, 'disable_prepared_report') or 0
def get_report_module_dotted_path(module, report_name):
return frappe.local.module_app[scrub(module)] + "." + scrub(module) \
+ ".report." + scrub(report_name) + "." + scrub(report_name)
def get_group_by_field(args, doctype):
if args['aggregate_function'] == 'count':
group_by_field = 'count(*) as _aggregate_column'
else:
group_by_field = '{0}(`tab{1}`.{2}) as _aggregate_column'.format(
args.aggregate_function,
doctype,
args.aggregate_on
)
return group_by_field
def get_group_by_column_label(args, meta):
if args['aggregate_function'] == 'count':
label = 'Count'
else:
sql_fn_map = {
'avg': 'Average',
'sum': 'Sum'
}
aggregate_on_label = meta.get_label(args.aggregate_on)
label = _('{function} of {fieldlabel}').format(
function=sql_fn_map[args.aggregate_function],
fieldlabel = aggregate_on_label
)
return label
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import json, datetime
from frappe import _, scrub
import frappe.desk.query_report
from frappe.utils import cint, cstr
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
from frappe.modules import make_boilerplate
from frappe.core.doctype.page.page import delete_custom_role
from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles
from frappe.desk.reportview import append_totals_row
from six import iteritems
from frappe.utils.safe_exec import safe_exec
class Report(Document):
def validate(self):
"""only administrator can save standard report"""
if not self.module:
self.module = frappe.db.get_value("DocType", self.ref_doctype, "module")
if not self.is_standard:
self.is_standard = "No"
if frappe.session.user=="Administrator" and getattr(frappe.local.conf, 'developer_mode',0)==1:
self.is_standard = "Yes"
if self.is_standard == "No":
# allow only script manager to edit scripts
if self.report_type != 'Report Builder':
frappe.only_for('Script Manager', True)
if frappe.db.get_value("Report", self.name, "is_standard") == "Yes":
frappe.throw(_("Cannot edit a standard report. Please duplicate and create a new report"))
if self.is_standard == "Yes" and frappe.session.user!="Administrator":
frappe.throw(_("Only Administrator can save a standard report. Please rename and save."))
if self.report_type == "Report Builder":
self.update_report_json()
def before_insert(self):
self.set_doctype_roles()
def on_update(self):
self.export_doc()
def on_trash(self):
if (self.is_standard == 'Yes'
and not cint(getattr(frappe.local.conf, 'developer_mode', 0))
and not frappe.flags.in_patch):
frappe.throw(_("You are not allowed to delete Standard Report"))
delete_custom_role('report', self.name)
def get_columns(self):
return [d.as_dict(no_default_fields = True) for d in self.columns]
@frappe.whitelist()
def set_doctype_roles(self):
if not self.get('roles') and self.is_standard == 'No':
meta = frappe.get_meta(self.ref_doctype)
if not meta.istable:
roles = [{'role': d.role} for d in meta.permissions if d.permlevel==0]
self.set('roles', roles)
def is_permitted(self):
"""Returns true if Has Role is not set or the user is allowed."""
from frappe.utils import has_common
allowed = [d.role for d in frappe.get_all("Has Role", fields=["role"],
filters={"parent": self.name})]
custom_roles = get_custom_allowed_roles('report', self.name)
allowed.extend(custom_roles)
if not allowed:
return True
if has_common(frappe.get_roles(), allowed):
return True
def update_report_json(self):
if not self.json:
self.json = '{}'
def export_doc(self):
if frappe.flags.in_import:
return
if self.is_standard == 'Yes' and (frappe.local.conf.get('developer_mode') or 0) == 1:
export_to_files(record_list=[['Report', self.name]],
record_module=self.module, create_init=True)
self.create_report_py()
def create_report_py(self):
if self.report_type == "Script Report":
make_boilerplate("controller.py", self, {"name": self.name})
make_boilerplate("controller.js", self, {"name": self.name})
def execute_query_report(self, filters):
if not self.query:
frappe.throw(_("Must specify a Query to run"), title=_('Report Document Error'))
if not self.query.lower().startswith("select"):
frappe.throw(_("Query must be a SELECT"), title=_('Report Document Error'))
result = [list(t) for t in frappe.db.sql(self.query, filters, debug=True)]
columns = self.get_columns() or [cstr(c[0]) for c in frappe.db.get_description()]
return [columns, result]
def execute_script_report(self, filters):
# save the timestamp to automatically set to prepared
threshold = 30
res = []
start_time = datetime.datetime.now()
# The JOB
if self.is_standard == 'Yes':
res = self.execute_module(filters)
else:
res = self.execute_script(filters)
# automatically set as prepared
execution_time = (datetime.datetime.now() - start_time).total_seconds()
if execution_time > threshold and not self.prepared_report:
self.db_set('prepared_report', 1)
frappe.cache().hset('report_execution_time', self.name, execution_time)
return res
def execute_module(self, filters):
# report in python module
module = self.module or frappe.db.get_value("DocType", self.ref_doctype, "module")
method_name = get_report_module_dotted_path(module, self.name) + ".execute"
return frappe.get_attr(method_name)(frappe._dict(filters))
def execute_script(self, filters):
# server script
loc = {"filters": frappe._dict(filters), 'data':None, 'result':None}
safe_exec(self.report_script, None, loc)
if loc['data']:
return loc['data']
else:
return self.get_columns(), loc['result']
def get_data(self, filters=None, limit=None, user=None, as_dict=False, ignore_prepared_report=False):
if self.report_type in ('Query Report', 'Script Report', 'Custom Report'):
columns, result = self.run_query_report(filters, user, ignore_prepared_report)
else:
columns, result = self.run_standard_report(filters, limit, user)
if as_dict:
result = self.build_data_dict(result, columns)
return columns, result
def run_query_report(self, filters, user, ignore_prepared_report=False):
columns, result = [], []
data = frappe.desk.query_report.run(self.name,
filters=filters, user=user, ignore_prepared_report=ignore_prepared_report)
for d in data.get('columns'):
if isinstance(d, dict):
col = frappe._dict(d)
if not col.fieldname:
col.fieldname = col.label
columns.append(col)
else:
fieldtype, options = "Data", None
parts = d.split(':')
if len(parts) > 1:
if parts[1]:
fieldtype, options = parts[1], None
if fieldtype and '/' in fieldtype:
fieldtype, options = fieldtype.split('/')
columns.append(frappe._dict(label=parts[0], fieldtype=fieldtype, fieldname=parts[0], options=options))
result += data.get('result')
return columns, result
def run_standard_report(self, filters, limit, user):
params = json.loads(self.json)
columns = self.get_standard_report_columns(params)
result = []
order_by, group_by, group_by_args = self.get_standard_report_order_by(params)
_result = frappe.get_list(self.ref_doctype,
fields = [
get_group_by_field(group_by_args, c[1]) if c[0] == '_aggregate_column' and group_by_args
else Report._format([c[1], c[0]]) for c in columns
],
filters = self.get_standard_report_filters(params, filters),
order_by = order_by,
group_by = group_by,
as_list = True,
limit = limit,
user = user)
columns = self.build_standard_report_columns(columns, group_by_args)
result = result + [list(d) for d in _result]
if params.get('add_totals_row'):
result = append_totals_row(result)
return columns, result
@staticmethod
def _format(parts):
# sort by is saved as DocType.fieldname, covert it to sql
return '`tab{0}`.`{1}`'.format(*parts)
def get_standard_report_columns(self, params):
if params.get('fields'):
columns = params.get('fields')
elif params.get('columns'):
columns = params.get('columns')
elif params.get('fields'):
columns = params.get('fields')
else:
columns = [['name', self.ref_doctype]]
for df in frappe.get_meta(self.ref_doctype).fields:
if df.in_list_view:
columns.append([df.fieldname, self.ref_doctype])
return columns
def get_standard_report_filters(self, params, filters):
_filters = params.get('filters') or []
if filters:
for key, value in iteritems(filters):
condition, _value = '=', value
if isinstance(value, (list, tuple)):
condition, _value = value
_filters.append([key, condition, _value])
return _filters
def get_standard_report_order_by(self, params):
group_by_args = None
if params.get('sort_by'):
order_by = Report._format(params.get('sort_by').split('.')) + ' ' + params.get('sort_order')
elif params.get('order_by'):
order_by = params.get('order_by')
else:
order_by = Report._format([self.ref_doctype, 'modified']) + ' desc'
if params.get('sort_by_next'):
order_by += ', ' + Report._format(params.get('sort_by_next').split('.')) + ' ' + params.get('sort_order_next')
group_by = None
if params.get('group_by'):
group_by_args = frappe._dict(params['group_by'])
group_by = group_by_args['group_by']
order_by = '_aggregate_column desc'
return order_by, group_by, group_by_args
def build_standard_report_columns(self, columns, group_by_args):
_columns = []
for (fieldname, doctype) in columns:
meta = frappe.get_meta(doctype)
if meta.get_field(fieldname):
field = meta.get_field(fieldname)
else:
if fieldname == '_aggregate_column':
label = get_group_by_column_label(group_by_args, meta)
else:
label = meta.get_label(fieldname)
field = frappe._dict(fieldname=fieldname, label=label)
# since name is the primary key for a document, it will always be a Link datatype
if fieldname == "name":
field.fieldtype = "Link"
field.options = doctype
_columns.append(field)
return _columns
def build_data_dict(self, result, columns):
data = []
for row in result:
if isinstance(row, (list, tuple)):
_row = frappe._dict()
for i, val in enumerate(row):
_row[columns[i].get('fieldname')] = val
elif isinstance(row, dict):
# no need to convert from dict to dict
_row = frappe._dict(row)
data.append(_row)
return data
@frappe.whitelist()
def toggle_disable(self, disable):
self.db_set("disabled", cint(disable))
@frappe.whitelist()
def is_prepared_report_disabled(report):
return frappe.db.get_value('Report',
report, 'disable_prepared_report') or 0
def get_report_module_dotted_path(module, report_name):
return frappe.local.module_app[scrub(module)] + "." + scrub(module) \
+ ".report." + scrub(report_name) + "." + scrub(report_name)
def get_group_by_field(args, doctype):
if args['aggregate_function'] == 'count':
group_by_field = 'count(*) as _aggregate_column'
else:
group_by_field = '{0}(`tab{1}`.{2}) as _aggregate_column'.format(
args.aggregate_function,
doctype,
args.aggregate_on
)
return group_by_field
def get_group_by_column_label(args, meta):
if args['aggregate_function'] == 'count':
label = 'Count'
else:
sql_fn_map = {
'avg': 'Average',
'sum': 'Sum'
}
aggregate_on_label = meta.get_label(args.aggregate_on)
label = _('{function} of {fieldlabel}').format(
function=sql_fn_map[args.aggregate_function],
fieldlabel = aggregate_on_label
)
return label
| xss | {
"code": [
"\t@Document.whitelist"
],
"line_no": [
307
]
} | {
"code": [
"\t@frappe.whitelist()",
"\t@frappe.whitelist()"
],
"line_no": [
61,
308
]
} |
from __future__ import unicode_literals
import frappe
import json, datetime
from frappe import _, scrub
import frappe.desk.query_report
from frappe.utils import cint, cstr
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
from frappe.modules import make_boilerplate
from frappe.core.doctype.page.page import delete_custom_role
from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles
from frappe.desk.reportview import append_totals_row
from six import .iteritems
from frappe.utils.safe_exec import safe_exec
class CLASS_0(Document):
def FUNC_4(self):
if not self.module:
self.module = frappe.db.get_value("DocType", self.ref_doctype, "module")
if not self.is_standard:
self.is_standard = "No"
if frappe.session.user=="Administrator" and getattr(frappe.local.conf, 'developer_mode',0)==1:
self.is_standard = "Yes"
if self.is_standard == "No":
if self.report_type != 'Report Builder':
frappe.only_for('Script Manager', True)
if frappe.db.get_value("Report", self.name, "is_standard") == "Yes":
frappe.throw(_("Cannot edit a standard VAR_0. Please duplicate and create a new report"))
if self.is_standard == "Yes" and frappe.session.user!="Administrator":
frappe.throw(_("Only Administrator can save a standard VAR_0. Please rename and save."))
if self.report_type == "Report Builder":
self.update_report_json()
def FUNC_5(self):
self.set_doctype_roles()
def FUNC_6(self):
self.export_doc()
def FUNC_7(self):
if (self.is_standard == 'Yes'
and not cint(getattr(frappe.local.conf, 'developer_mode', 0))
and not frappe.flags.in_patch):
frappe.throw(_("You are not VAR_17 to delete Standard Report"))
delete_custom_role('report', self.name)
def FUNC_8(self):
return [d.as_dict(no_default_fields = True) for d in self.columns]
def FUNC_9(self):
if not self.get('roles') and self.is_standard == 'No':
VAR_5 = frappe.get_meta(self.ref_doctype)
if not VAR_5.istable:
VAR_35 = [{'role': d.role} for d in VAR_5.permissions if d.permlevel==0]
self.set('roles', VAR_35)
def FUNC_10(self):
from frappe.utils import has_common
VAR_17 = [d.role for d in frappe.get_all("Has Role", fields=["role"],
VAR_6={"parent": self.name})]
VAR_18 = get_custom_allowed_roles('report', self.name)
VAR_17.extend(VAR_18)
if not VAR_17:
return True
if has_common(frappe.get_roles(), VAR_17):
return True
def FUNC_11(self):
if not self.json:
self.json = '{}'
def FUNC_12(self):
if frappe.flags.in_import:
return
if self.is_standard == 'Yes' and (frappe.local.conf.get('developer_mode') or 0) == 1:
export_to_files(record_list=[['Report', self.name]],
record_module=self.module, create_init=True)
self.create_report_py()
def FUNC_13(self):
if self.report_type == "Script Report":
make_boilerplate("controller.py", self, {"name": self.name})
make_boilerplate("controller.js", self, {"name": self.name})
def FUNC_14(self, VAR_6):
if not self.query:
frappe.throw(_("Must specify a Query to run"), title=_('Report Document Error'))
if not self.query.lower().startswith("select"):
frappe.throw(_("Query must be a SELECT"), title=_('Report Document Error'))
VAR_15 = [list(t) for t in frappe.db.sql(self.query, VAR_6, debug=True)]
VAR_13 = self.get_columns() or [cstr(c[0]) for c in frappe.db.get_description()]
return [VAR_13, VAR_15]
def FUNC_15(self, VAR_6):
VAR_19 = 30
VAR_20 = []
VAR_21 = datetime.datetime.now()
if self.is_standard == 'Yes':
VAR_20 = self.execute_module(VAR_6)
else:
VAR_20 = self.execute_script(VAR_6)
VAR_22 = (datetime.datetime.now() - VAR_21).total_seconds()
if VAR_22 > VAR_19 and not self.prepared_report:
self.db_set('prepared_report', 1)
frappe.cache().hset('report_execution_time', self.name, VAR_22)
return VAR_20
def FUNC_16(self, VAR_6):
module = self.module or frappe.db.get_value("DocType", self.ref_doctype, "module")
VAR_23 = FUNC_1(VAR_1, self.name) + ".execute"
return frappe.get_attr(VAR_23)(frappe._dict(VAR_6))
def FUNC_17(self, VAR_6):
VAR_24 = {"filters": frappe._dict(VAR_6), 'data':None, 'result':None}
safe_exec(self.report_script, None, VAR_24)
if VAR_24['data']:
return VAR_24['data']
else:
return self.get_columns(), VAR_24['result']
def FUNC_18(self, VAR_6=None, VAR_7=None, VAR_8=None, VAR_9=False, VAR_10=False):
if self.report_type in ('Query Report', 'Script Report', 'Custom Report'):
VAR_13, VAR_15 = self.run_query_report(VAR_6, VAR_8, VAR_10)
else:
VAR_13, VAR_15 = self.run_standard_report(VAR_6, VAR_7, VAR_8)
if VAR_9:
VAR_15 = self.build_data_dict(VAR_15, VAR_13)
return VAR_13, VAR_15
def FUNC_19(self, VAR_6, VAR_8, VAR_10=False):
VAR_13, VAR_15 = [], []
VAR_25 = frappe.desk.query_report.run(self.name,
VAR_6=filters, VAR_8=user, VAR_10=ignore_prepared_report)
for d in VAR_25.get('columns'):
if isinstance(d, dict):
VAR_36 = frappe._dict(d)
if not VAR_36.fieldname:
VAR_36.fieldname = VAR_36.label
VAR_13.append(VAR_36)
else:
VAR_37, VAR_38 = "Data", None
VAR_11 = d.split(':')
if len(VAR_11) > 1:
if VAR_11[1]:
VAR_37, VAR_38 = VAR_11[1], None
if VAR_37 and '/' in VAR_37:
fieldtype, VAR_38 = VAR_37.split('/')
VAR_13.append(frappe._dict(VAR_32=VAR_11[0], VAR_37=fieldtype, fieldname=VAR_11[0], VAR_38=options))
VAR_15 += VAR_25.get('result')
return VAR_13, VAR_15
def FUNC_20(self, VAR_6, VAR_7, VAR_8):
VAR_12 = json.loads(self.json)
VAR_13 = self.get_standard_report_columns(VAR_12)
VAR_15 = []
VAR_26, VAR_27, VAR_14 = self.get_standard_report_order_by(VAR_12)
VAR_28 = frappe.get_list(self.ref_doctype,
fields = [
FUNC_2(VAR_14, c[1]) if c[0] == '_aggregate_column' and VAR_14
else CLASS_0._format([c[1], c[0]]) for c in VAR_13
],
VAR_6 = self.get_standard_report_filters(VAR_12, VAR_6),
VAR_26 = order_by,
VAR_27 = group_by,
as_list = True,
VAR_7 = limit,
VAR_8 = user)
VAR_13 = self.build_standard_report_columns(VAR_13, VAR_14)
VAR_15 = result + [list(d) for d in VAR_28]
if VAR_12.get('add_totals_row'):
VAR_15 = append_totals_row(VAR_15)
return VAR_13, VAR_15
@staticmethod
def FUNC_21(VAR_11):
return '`tab{0}`.`{1}`'.format(*VAR_11)
def FUNC_22(self, VAR_12):
if VAR_12.get('fields'):
VAR_13 = VAR_12.get('fields')
elif VAR_12.get('columns'):
VAR_13 = VAR_12.get('columns')
elif VAR_12.get('fields'):
VAR_13 = VAR_12.get('fields')
else:
VAR_13 = [['name', self.ref_doctype]]
for df in frappe.get_meta(self.ref_doctype).fields:
if df.in_list_view:
VAR_13.append([df.fieldname, self.ref_doctype])
return VAR_13
def FUNC_23(self, VAR_12, VAR_6):
VAR_29 = VAR_12.get('filters') or []
if VAR_6:
for key, value in iteritems(VAR_6):
VAR_39, VAR_40 = '=', value
if isinstance(value, (list, tuple)):
VAR_39, VAR_40 = value
VAR_29.append([key, VAR_39, VAR_40])
return VAR_29
def FUNC_24(self, VAR_12):
VAR_14 = None
if VAR_12.get('sort_by'):
VAR_26 = CLASS_0._format(VAR_12.get('sort_by').split('.')) + ' ' + VAR_12.get('sort_order')
elif VAR_12.get('order_by'):
VAR_26 = VAR_12.get('order_by')
else:
VAR_26 = CLASS_0._format([self.ref_doctype, 'modified']) + ' desc'
if VAR_12.get('sort_by_next'):
VAR_26 += ', ' + CLASS_0._format(VAR_12.get('sort_by_next').split('.')) + ' ' + VAR_12.get('sort_order_next')
VAR_27 = None
if VAR_12.get('group_by'):
VAR_14 = frappe._dict(VAR_12['group_by'])
VAR_27 = VAR_14['group_by']
VAR_26 = '_aggregate_column desc'
return VAR_26, VAR_27, VAR_14
def FUNC_25(self, VAR_13, VAR_14):
VAR_30 = []
for (fieldname, VAR_4) in VAR_13:
VAR_5 = frappe.get_meta(VAR_4)
if VAR_5.get_field(fieldname):
VAR_41 = VAR_5.get_field(fieldname)
else:
if fieldname == '_aggregate_column':
VAR_32 = FUNC_3(VAR_14, VAR_5)
else:
VAR_32 = VAR_5.get_label(fieldname)
VAR_41 = frappe._dict(fieldname=fieldname, VAR_32=label)
if fieldname == "name":
VAR_41.fieldtype = "Link"
VAR_41.options = VAR_4
VAR_30.append(VAR_41)
return VAR_30
def FUNC_26(self, VAR_15, VAR_13):
VAR_25 = []
for row in VAR_15:
if isinstance(row, (list, tuple)):
VAR_42 = frappe._dict()
for VAR_43, val in enumerate(row):
VAR_42[VAR_13[VAR_43].get('fieldname')] = val
elif isinstance(row, dict):
VAR_42 = frappe._dict(row)
VAR_25.append(VAR_42)
return VAR_25
@Document.whitelist
def FUNC_27(self, VAR_16):
self.db_set("disabled", cint(VAR_16))
@frappe.whitelist()
def FUNC_0(VAR_0):
return frappe.db.get_value('Report',
VAR_0, 'disable_prepared_report') or 0
def FUNC_1(VAR_1, VAR_2):
return frappe.local.module_app[scrub(VAR_1)] + "." + scrub(VAR_1) \
+ ".report." + scrub(VAR_2) + "." + scrub(VAR_2)
def FUNC_2(VAR_3, VAR_4):
if VAR_3['aggregate_function'] == 'count':
VAR_31 = 'count(*) as _aggregate_column'
else:
VAR_31 = '{0}(`tab{1}`.{2}) as _aggregate_column'.format(
VAR_3.aggregate_function,
VAR_4,
VAR_3.aggregate_on
)
return VAR_31
def FUNC_3(VAR_3, VAR_5):
if VAR_3['aggregate_function'] == 'count':
VAR_32 = 'Count'
else:
VAR_33 = {
'avg': 'Average',
'sum': 'Sum'
}
VAR_34 = VAR_5.get_label(VAR_3.aggregate_on)
VAR_32 = _('{function} of {fieldlabel}').format(
function=VAR_33[VAR_3.aggregate_function],
fieldlabel = VAR_34
)
return VAR_32
|
from __future__ import unicode_literals
import frappe
import json, datetime
from frappe import _, scrub
import frappe.desk.query_report
from frappe.utils import cint, cstr
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
from frappe.modules import make_boilerplate
from frappe.core.doctype.page.page import delete_custom_role
from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles
from frappe.desk.reportview import append_totals_row
from six import .iteritems
from frappe.utils.safe_exec import safe_exec
class CLASS_0(Document):
def FUNC_4(self):
if not self.module:
self.module = frappe.db.get_value("DocType", self.ref_doctype, "module")
if not self.is_standard:
self.is_standard = "No"
if frappe.session.user=="Administrator" and getattr(frappe.local.conf, 'developer_mode',0)==1:
self.is_standard = "Yes"
if self.is_standard == "No":
if self.report_type != 'Report Builder':
frappe.only_for('Script Manager', True)
if frappe.db.get_value("Report", self.name, "is_standard") == "Yes":
frappe.throw(_("Cannot edit a standard VAR_0. Please duplicate and create a new report"))
if self.is_standard == "Yes" and frappe.session.user!="Administrator":
frappe.throw(_("Only Administrator can save a standard VAR_0. Please rename and save."))
if self.report_type == "Report Builder":
self.update_report_json()
def FUNC_5(self):
self.set_doctype_roles()
def FUNC_6(self):
self.export_doc()
def FUNC_7(self):
if (self.is_standard == 'Yes'
and not cint(getattr(frappe.local.conf, 'developer_mode', 0))
and not frappe.flags.in_patch):
frappe.throw(_("You are not VAR_17 to delete Standard Report"))
delete_custom_role('report', self.name)
def FUNC_8(self):
return [d.as_dict(no_default_fields = True) for d in self.columns]
@frappe.whitelist()
def FUNC_9(self):
if not self.get('roles') and self.is_standard == 'No':
VAR_5 = frappe.get_meta(self.ref_doctype)
if not VAR_5.istable:
VAR_35 = [{'role': d.role} for d in VAR_5.permissions if d.permlevel==0]
self.set('roles', VAR_35)
def FUNC_10(self):
from frappe.utils import has_common
VAR_17 = [d.role for d in frappe.get_all("Has Role", fields=["role"],
VAR_6={"parent": self.name})]
VAR_18 = get_custom_allowed_roles('report', self.name)
VAR_17.extend(VAR_18)
if not VAR_17:
return True
if has_common(frappe.get_roles(), VAR_17):
return True
def FUNC_11(self):
if not self.json:
self.json = '{}'
def FUNC_12(self):
if frappe.flags.in_import:
return
if self.is_standard == 'Yes' and (frappe.local.conf.get('developer_mode') or 0) == 1:
export_to_files(record_list=[['Report', self.name]],
record_module=self.module, create_init=True)
self.create_report_py()
def FUNC_13(self):
if self.report_type == "Script Report":
make_boilerplate("controller.py", self, {"name": self.name})
make_boilerplate("controller.js", self, {"name": self.name})
def FUNC_14(self, VAR_6):
if not self.query:
frappe.throw(_("Must specify a Query to run"), title=_('Report Document Error'))
if not self.query.lower().startswith("select"):
frappe.throw(_("Query must be a SELECT"), title=_('Report Document Error'))
VAR_15 = [list(t) for t in frappe.db.sql(self.query, VAR_6, debug=True)]
VAR_13 = self.get_columns() or [cstr(c[0]) for c in frappe.db.get_description()]
return [VAR_13, VAR_15]
def FUNC_15(self, VAR_6):
VAR_19 = 30
VAR_20 = []
VAR_21 = datetime.datetime.now()
if self.is_standard == 'Yes':
VAR_20 = self.execute_module(VAR_6)
else:
VAR_20 = self.execute_script(VAR_6)
VAR_22 = (datetime.datetime.now() - VAR_21).total_seconds()
if VAR_22 > VAR_19 and not self.prepared_report:
self.db_set('prepared_report', 1)
frappe.cache().hset('report_execution_time', self.name, VAR_22)
return VAR_20
def FUNC_16(self, VAR_6):
module = self.module or frappe.db.get_value("DocType", self.ref_doctype, "module")
VAR_23 = FUNC_1(VAR_1, self.name) + ".execute"
return frappe.get_attr(VAR_23)(frappe._dict(VAR_6))
def FUNC_17(self, VAR_6):
VAR_24 = {"filters": frappe._dict(VAR_6), 'data':None, 'result':None}
safe_exec(self.report_script, None, VAR_24)
if VAR_24['data']:
return VAR_24['data']
else:
return self.get_columns(), VAR_24['result']
def FUNC_18(self, VAR_6=None, VAR_7=None, VAR_8=None, VAR_9=False, VAR_10=False):
if self.report_type in ('Query Report', 'Script Report', 'Custom Report'):
VAR_13, VAR_15 = self.run_query_report(VAR_6, VAR_8, VAR_10)
else:
VAR_13, VAR_15 = self.run_standard_report(VAR_6, VAR_7, VAR_8)
if VAR_9:
VAR_15 = self.build_data_dict(VAR_15, VAR_13)
return VAR_13, VAR_15
def FUNC_19(self, VAR_6, VAR_8, VAR_10=False):
VAR_13, VAR_15 = [], []
VAR_25 = frappe.desk.query_report.run(self.name,
VAR_6=filters, VAR_8=user, VAR_10=ignore_prepared_report)
for d in VAR_25.get('columns'):
if isinstance(d, dict):
VAR_36 = frappe._dict(d)
if not VAR_36.fieldname:
VAR_36.fieldname = VAR_36.label
VAR_13.append(VAR_36)
else:
VAR_37, VAR_38 = "Data", None
VAR_11 = d.split(':')
if len(VAR_11) > 1:
if VAR_11[1]:
VAR_37, VAR_38 = VAR_11[1], None
if VAR_37 and '/' in VAR_37:
fieldtype, VAR_38 = VAR_37.split('/')
VAR_13.append(frappe._dict(VAR_32=VAR_11[0], VAR_37=fieldtype, fieldname=VAR_11[0], VAR_38=options))
VAR_15 += VAR_25.get('result')
return VAR_13, VAR_15
def FUNC_20(self, VAR_6, VAR_7, VAR_8):
VAR_12 = json.loads(self.json)
VAR_13 = self.get_standard_report_columns(VAR_12)
VAR_15 = []
VAR_26, VAR_27, VAR_14 = self.get_standard_report_order_by(VAR_12)
VAR_28 = frappe.get_list(self.ref_doctype,
fields = [
FUNC_2(VAR_14, c[1]) if c[0] == '_aggregate_column' and VAR_14
else CLASS_0._format([c[1], c[0]]) for c in VAR_13
],
VAR_6 = self.get_standard_report_filters(VAR_12, VAR_6),
VAR_26 = order_by,
VAR_27 = group_by,
as_list = True,
VAR_7 = limit,
VAR_8 = user)
VAR_13 = self.build_standard_report_columns(VAR_13, VAR_14)
VAR_15 = result + [list(d) for d in VAR_28]
if VAR_12.get('add_totals_row'):
VAR_15 = append_totals_row(VAR_15)
return VAR_13, VAR_15
@staticmethod
def FUNC_21(VAR_11):
return '`tab{0}`.`{1}`'.format(*VAR_11)
def FUNC_22(self, VAR_12):
if VAR_12.get('fields'):
VAR_13 = VAR_12.get('fields')
elif VAR_12.get('columns'):
VAR_13 = VAR_12.get('columns')
elif VAR_12.get('fields'):
VAR_13 = VAR_12.get('fields')
else:
VAR_13 = [['name', self.ref_doctype]]
for df in frappe.get_meta(self.ref_doctype).fields:
if df.in_list_view:
VAR_13.append([df.fieldname, self.ref_doctype])
return VAR_13
def FUNC_23(self, VAR_12, VAR_6):
VAR_29 = VAR_12.get('filters') or []
if VAR_6:
for key, value in iteritems(VAR_6):
VAR_39, VAR_40 = '=', value
if isinstance(value, (list, tuple)):
VAR_39, VAR_40 = value
VAR_29.append([key, VAR_39, VAR_40])
return VAR_29
def FUNC_24(self, VAR_12):
VAR_14 = None
if VAR_12.get('sort_by'):
VAR_26 = CLASS_0._format(VAR_12.get('sort_by').split('.')) + ' ' + VAR_12.get('sort_order')
elif VAR_12.get('order_by'):
VAR_26 = VAR_12.get('order_by')
else:
VAR_26 = CLASS_0._format([self.ref_doctype, 'modified']) + ' desc'
if VAR_12.get('sort_by_next'):
VAR_26 += ', ' + CLASS_0._format(VAR_12.get('sort_by_next').split('.')) + ' ' + VAR_12.get('sort_order_next')
VAR_27 = None
if VAR_12.get('group_by'):
VAR_14 = frappe._dict(VAR_12['group_by'])
VAR_27 = VAR_14['group_by']
VAR_26 = '_aggregate_column desc'
return VAR_26, VAR_27, VAR_14
def FUNC_25(self, VAR_13, VAR_14):
VAR_30 = []
for (fieldname, VAR_4) in VAR_13:
VAR_5 = frappe.get_meta(VAR_4)
if VAR_5.get_field(fieldname):
VAR_41 = VAR_5.get_field(fieldname)
else:
if fieldname == '_aggregate_column':
VAR_32 = FUNC_3(VAR_14, VAR_5)
else:
VAR_32 = VAR_5.get_label(fieldname)
VAR_41 = frappe._dict(fieldname=fieldname, VAR_32=label)
if fieldname == "name":
VAR_41.fieldtype = "Link"
VAR_41.options = VAR_4
VAR_30.append(VAR_41)
return VAR_30
def FUNC_26(self, VAR_15, VAR_13):
VAR_25 = []
for row in VAR_15:
if isinstance(row, (list, tuple)):
VAR_42 = frappe._dict()
for VAR_43, val in enumerate(row):
VAR_42[VAR_13[VAR_43].get('fieldname')] = val
elif isinstance(row, dict):
VAR_42 = frappe._dict(row)
VAR_25.append(VAR_42)
return VAR_25
@frappe.whitelist()
def FUNC_27(self, VAR_16):
self.db_set("disabled", cint(VAR_16))
@frappe.whitelist()
def FUNC_0(VAR_0):
return frappe.db.get_value('Report',
VAR_0, 'disable_prepared_report') or 0
def FUNC_1(VAR_1, VAR_2):
return frappe.local.module_app[scrub(VAR_1)] + "." + scrub(VAR_1) \
+ ".report." + scrub(VAR_2) + "." + scrub(VAR_2)
def FUNC_2(VAR_3, VAR_4):
if VAR_3['aggregate_function'] == 'count':
VAR_31 = 'count(*) as _aggregate_column'
else:
VAR_31 = '{0}(`tab{1}`.{2}) as _aggregate_column'.format(
VAR_3.aggregate_function,
VAR_4,
VAR_3.aggregate_on
)
return VAR_31
def FUNC_3(VAR_3, VAR_5):
if VAR_3['aggregate_function'] == 'count':
VAR_32 = 'Count'
else:
VAR_33 = {
'avg': 'Average',
'sum': 'Sum'
}
VAR_34 = VAR_5.get_label(VAR_3.aggregate_on)
VAR_32 = _('{function} of {fieldlabel}').format(
function=VAR_33[VAR_3.aggregate_function],
fieldlabel = VAR_34
)
return VAR_32
| [
1,
2,
3,
18,
19,
25,
30,
32,
35,
38,
41,
44,
47,
50,
57,
60,
67,
71,
74,
77,
80,
83,
87,
91,
95,
97,
102,
106,
109,
112,
114,
116,
119,
121,
122,
127,
128,
132,
134,
136,
138,
142,
144,
151,
157,
160,
162,
167,
182,
184,
186,
188,
194,
206,
208,
210,
213,
215,
218,
220,
233,
235,
238,
245,
247,
252,
257,
260,
266,
268,
271,
274,
282,
284,
285,
289,
292,
301,
304,
306,
310,
315,
319,
329,
331,
346,
22,
69
] | [
1,
2,
3,
18,
19,
25,
30,
32,
35,
38,
41,
44,
47,
50,
57,
60,
68,
72,
75,
78,
81,
84,
88,
92,
96,
98,
103,
107,
110,
113,
115,
117,
120,
122,
123,
128,
129,
133,
135,
137,
139,
143,
145,
152,
158,
161,
163,
168,
183,
185,
187,
189,
195,
207,
209,
211,
214,
216,
219,
221,
234,
236,
239,
246,
248,
253,
258,
261,
267,
269,
272,
275,
283,
285,
286,
290,
293,
302,
305,
307,
311,
316,
320,
330,
332,
347,
22,
70
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import presence
from synapse.types import UserID
from tests import unittest
class PresenceTestCase(unittest.HomeserverTestCase):
""" Tests presence REST API. """
user_id = "@sid:red"
user = UserID.from_string(user_id)
servlets = [presence.register_servlets]
def make_homeserver(self, reactor, clock):
presence_handler = Mock()
presence_handler.set_state.return_value = defer.succeed(None)
hs = self.setup_test_homeserver(
"red",
http_client=None,
federation_client=Mock(),
presence_handler=presence_handler,
)
return hs
def test_put_presence(self):
"""
PUT to the status endpoint with use_presence enabled will call
set_state on the presence handler.
"""
self.hs.config.use_presence = True
body = {"presence": "here", "status_msg": "beep boop"}
request, channel = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), body
)
self.assertEqual(channel.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)
def test_put_presence_disabled(self):
"""
PUT to the status endpoint with use_presence disabled will NOT call
set_state on the presence handler.
"""
self.hs.config.use_presence = False
body = {"presence": "here", "status_msg": "beep boop"}
request, channel = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), body
)
self.assertEqual(channel.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 0)
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import presence
from synapse.types import UserID
from tests import unittest
class PresenceTestCase(unittest.HomeserverTestCase):
""" Tests presence REST API. """
user_id = "@sid:red"
user = UserID.from_string(user_id)
servlets = [presence.register_servlets]
def make_homeserver(self, reactor, clock):
presence_handler = Mock()
presence_handler.set_state.return_value = defer.succeed(None)
hs = self.setup_test_homeserver(
"red",
federation_http_client=None,
federation_client=Mock(),
presence_handler=presence_handler,
)
return hs
def test_put_presence(self):
"""
PUT to the status endpoint with use_presence enabled will call
set_state on the presence handler.
"""
self.hs.config.use_presence = True
body = {"presence": "here", "status_msg": "beep boop"}
request, channel = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), body
)
self.assertEqual(channel.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)
def test_put_presence_disabled(self):
"""
PUT to the status endpoint with use_presence disabled will NOT call
set_state on the presence handler.
"""
self.hs.config.use_presence = False
body = {"presence": "here", "status_msg": "beep boop"}
request, channel = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), body
)
self.assertEqual(channel.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 0)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
41
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
41
]
} |
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import presence
from synapse.types import UserID
from tests import unittest
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = "@sid:red"
VAR_1 = UserID.from_string(VAR_0)
VAR_2 = [presence.register_servlets]
def FUNC_0(self, VAR_3, VAR_4):
VAR_5 = Mock()
VAR_5.set_state.return_value = defer.succeed(None)
VAR_6 = self.setup_test_homeserver(
"red",
http_client=None,
federation_client=Mock(),
VAR_5=presence_handler,
)
return VAR_6
def FUNC_1(self):
self.hs.config.use_presence = True
VAR_7 = {"presence": "here", "status_msg": "beep boop"}
VAR_8, VAR_9 = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), VAR_7
)
self.assertEqual(VAR_9.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)
def FUNC_2(self):
self.hs.config.use_presence = False
VAR_7 = {"presence": "here", "status_msg": "beep boop"}
VAR_8, VAR_9 = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), VAR_7
)
self.assertEqual(VAR_9.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 0)
|
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import presence
from synapse.types import UserID
from tests import unittest
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = "@sid:red"
VAR_1 = UserID.from_string(VAR_0)
VAR_2 = [presence.register_servlets]
def FUNC_0(self, VAR_3, VAR_4):
VAR_5 = Mock()
VAR_5.set_state.return_value = defer.succeed(None)
VAR_6 = self.setup_test_homeserver(
"red",
federation_http_client=None,
federation_client=Mock(),
VAR_5=presence_handler,
)
return VAR_6
def FUNC_1(self):
self.hs.config.use_presence = True
VAR_7 = {"presence": "here", "status_msg": "beep boop"}
VAR_8, VAR_9 = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), VAR_7
)
self.assertEqual(VAR_9.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)
def FUNC_2(self):
self.hs.config.use_presence = False
VAR_7 = {"presence": "here", "status_msg": "beep boop"}
VAR_8, VAR_9 = self.make_request(
"PUT", "/presence/%s/status" % (self.user_id,), VAR_7
)
self.assertEqual(VAR_9.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 0)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
24,
25,
28,
30,
33,
35,
38,
45,
47,
54,
59,
62,
69,
74,
77,
27,
49,
50,
51,
52,
64,
65,
66,
67
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
24,
25,
28,
30,
33,
35,
38,
45,
47,
54,
59,
62,
69,
74,
77,
27,
49,
50,
51,
52,
64,
65,
66,
67
] |
1CWE-79
| from warcio.timeutils import timestamp_to_datetime, timestamp_to_sec
from warcio.timeutils import timestamp_now
from pywb.utils.loaders import load
from six.moves.urllib.parse import urlsplit, quote
from jinja2 import Environment, TemplateNotFound, contextfunction
from jinja2 import FileSystemLoader, PackageLoader, ChoiceLoader
from babel.support import Translations
from webassets.ext.jinja2 import AssetsExtension
from webassets.loaders import YAMLLoader
from webassets.env import Resolver
from pkg_resources import resource_filename
import os
try:
import ujson as json
except ImportError: # pragma: no cover
import json
# ============================================================================
class RelEnvironment(Environment):
"""Override join_path() to enable relative template paths."""
def join_path(self, template, parent):
return os.path.join(os.path.dirname(parent), template)
# ============================================================================
class JinjaEnv(object):
"""Pywb JinjaEnv class that provides utility functions used by the templates,
configured template loaders and template paths, and contains the actual Jinja
env used by each template."""
def __init__(self, paths=None,
packages=None,
assets_path=None,
globals=None,
overlay=None,
extensions=None,
env_template_params_key='pywb.template_params',
env_template_dir_key='pywb.templates_dir'):
"""Construct a new JinjaEnv.
:param list[str] paths: List of paths to search for templates
:param list[str] packages: List of assets package names
:param str assets_path: Path to a yaml file containing assets
:param dict[str, str] globals: Dictionary of additional globals available during template rendering
:param overlay:
:param list extensions: List of webassets extension classes
:param str env_template_params_key: The full pywb package key for the template params
:param str env_template_dir_key: The full pywb package key for the template directory
"""
if paths is None:
paths = ['templates', '.', '/']
if packages is None:
packages = ['pywb']
self._init_filters()
loader = ChoiceLoader(self._make_loaders(paths, packages))
self.env_template_params_key = env_template_params_key
self.env_template_dir_key = env_template_dir_key
extensions = extensions or []
if assets_path:
extensions.append(AssetsExtension)
if overlay:
jinja_env = overlay.jinja_env.overlay(loader=loader,
trim_blocks=True,
extensions=extensions)
else:
jinja_env = RelEnvironment(loader=loader,
trim_blocks=True,
extensions=extensions)
jinja_env.filters.update(self.filters)
if globals:
jinja_env.globals.update(globals)
self.jinja_env = jinja_env
# init assets
if assets_path:
assets_loader = YAMLLoader(load(assets_path))
assets_env = assets_loader.load_environment()
assets_env.resolver = PkgResResolver()
jinja_env.assets_environment = assets_env
self.default_locale = ''
def _make_loaders(self, paths, packages):
"""Initialize the template loaders based on the supplied paths and packages.
:param list[str] paths: List of paths to search for templates
:param list[str] packages: List of assets package names
:return: A list of loaders to be used for loading the template assets
:rtype: list[FileSystemLoader|PackageLoader]
"""
loaders = []
# add loaders for paths
for path in paths:
loaders.append(FileSystemLoader(path))
# add loaders for all specified packages
for package in packages:
loaders.append(PackageLoader(package))
return loaders
def init_loc(self, locales_root_dir, locales, loc_map, default_locale):
locales = locales or []
locales_root_dir = locales_root_dir or os.path.join('i18n', 'translations')
default_locale = default_locale or 'en'
self.default_locale = default_locale
if locales_root_dir:
for loc in locales:
loc_map[loc] = Translations.load(locales_root_dir, [loc, default_locale])
#jinja_env.jinja_env.install_gettext_translations(translations)
def get_translate(context):
loc = context.get('env', {}).get('pywb_lang', default_locale)
return loc_map.get(loc)
def override_func(jinja_env, name):
@contextfunction
def get_override(context, text):
translate = get_translate(context)
if not translate:
return text
func = getattr(translate, name)
return func(text)
jinja_env.globals[name] = get_override
# standard gettext() translation function
override_func(self.jinja_env, 'gettext')
# single/plural form translation function
override_func(self.jinja_env, 'ngettext')
# Special _Q() function to return %-encoded text, necessary for use
# with text in banner
@contextfunction
def quote_gettext(context, text):
translate = get_translate(context)
if not translate:
return text
text = translate.gettext(text)
return quote(text, safe='/: ')
self.jinja_env.globals['locales'] = list(loc_map.keys())
self.jinja_env.globals['_Q'] = quote_gettext
self.jinja_env.globals['default_locale'] = default_locale
@contextfunction
def switch_locale(context, locale):
environ = context.get('env')
curr_loc = environ.get('pywb_lang', '')
request_uri = environ.get('REQUEST_URI', environ.get('PATH_INFO'))
if curr_loc:
return request_uri.replace(curr_loc, locale, 1)
app_prefix = environ.get('pywb.app_prefix', '')
if app_prefix and request_uri.startswith(app_prefix):
request_uri = request_uri.replace(app_prefix, '')
return app_prefix + '/' + locale + request_uri
@contextfunction
def get_locale_prefixes(context):
environ = context.get('env')
locale_prefixes = {}
orig_prefix = environ.get('pywb.app_prefix', '')
coll = environ.get('SCRIPT_NAME', '')
if orig_prefix:
coll = coll[len(orig_prefix):]
curr_loc = environ.get('pywb_lang', '')
if curr_loc:
coll = coll[len(curr_loc) + 1:]
for locale in loc_map.keys():
locale_prefixes[locale] = orig_prefix + '/' + locale + coll + '/'
return locale_prefixes
self.jinja_env.globals['switch_locale'] = switch_locale
self.jinja_env.globals['get_locale_prefixes'] = get_locale_prefixes
def template_filter(self, param=None):
"""Returns a decorator that adds the wrapped function to dictionary of template filters.
The wrapped function is keyed by either the supplied param (if supplied)
or by the wrapped functions name.
:param param: Optional name to use instead of the name of the function to be wrapped
:return: A decorator to wrap a template filter function
:rtype: callable
"""
def deco(func):
name = param or func.__name__
self.filters[name] = func
return func
return deco
def _init_filters(self):
"""Initialize the default pywb provided Jninja filters available during template rendering"""
self.filters = {}
@self.template_filter()
def format_ts(value, format_='%a, %b %d %Y %H:%M:%S'):
"""Formats the supplied timestamp using format_
:param str value: The timestamp to be formatted
:param str format_: The format string
:return: The correctly formatted timestamp as determined by format_
:rtype: str
"""
if format_ == '%s':
return timestamp_to_sec(value)
else:
value = timestamp_to_datetime(value)
return value.strftime(format_)
@self.template_filter('urlsplit')
def get_urlsplit(url):
"""Splits the supplied URL
:param str url: The url to be split
:return: The split url
:rtype: urllib.parse.SplitResult
"""
split = urlsplit(url)
return split
@self.template_filter()
def tojson(obj):
"""Converts the supplied object/array/any to a JSON string if it can be JSONified
:param any obj: The value to be converted to a JSON string
:return: The JSON string representation of the supplied value
:rtype: str
"""
return json.dumps(obj)
@self.template_filter()
def tobool(bool_val):
"""Converts a python boolean to a JS "true" or "false" string
:param any obj: A value to be evaluated as a boolean
:return: The string "true" or "false" to be inserted into JS
"""
return 'true' if bool_val else 'false'
# ============================================================================
class BaseInsertView(object):
"""Base class of all template views used by Pywb"""
def __init__(self, jenv, insert_file, banner_view=None):
"""Create a new BaseInsertView.
:param JinjaEnv jenv: The instance of pywb.rewrite.templateview.JinjaEnv to be used
:param str insert_file: The name of the template file
:param BaseInsertView banner_view: The banner_view property of pywb.apps.RewriterApp
"""
self.jenv = jenv
self.insert_file = insert_file
self.banner_view = banner_view
def render_to_string(self, env, **kwargs):
"""Render this template.
:param dict env: The WSGI environment associated with the request causing this template to be rendered
:param any kwargs: The keyword arguments to be supplied to the Jninja template render method
:return: The rendered template
:rtype: str
"""
template = None
template_path = env.get(self.jenv.env_template_dir_key)
if template_path:
# jinja paths are not os paths, always use '/' as separator
# https://github.com/pallets/jinja/issues/411
template_path = template_path + '/' + self.insert_file
try:
template = self.jenv.jinja_env.get_template(template_path)
except TemplateNotFound as te:
pass
if not template:
template = self.jenv.jinja_env.get_template(self.insert_file)
params = env.get(self.jenv.env_template_params_key)
if params:
kwargs.update(params)
kwargs['env'] = env
kwargs['static_prefix'] = env.get('pywb.host_prefix', '') + env.get('pywb.app_prefix', '') + '/static'
return template.render(**kwargs)
# ============================================================================
class HeadInsertView(BaseInsertView):
"""The template view class associated with rendering the HTML inserted
into the head of the pages replayed (WB Insert)."""
def create_insert_func(self, wb_url,
wb_prefix,
host_prefix,
top_url,
env,
is_framed,
coll='',
include_ts=True,
**kwargs):
"""Create the function used to render the header insert template for the current request.
:param rewrite.wburl.WbUrl wb_url: The WbUrl for the request this template is being rendered for
:param str wb_prefix: The URL prefix pywb is serving the content using (e.g. http://localhost:8080/live/)
:param str host_prefix: The host URL prefix pywb is running on (e.g. http://localhost:8080)
:param str top_url: The full URL for this request (e.g. http://localhost:8080/live/http://example.com)
:param dict env: The WSGI environment dictionary for this request
:param bool is_framed: Is pywb or a specific collection running in framed mode
:param str coll: The name of the collection this request is associated with
:param bool include_ts: Should a timestamp be included in the rendered template
:param kwargs: Additional keyword arguments to be supplied to the Jninja template render method
:return: A function to be used to render the header insert for the request this template is being rendered for
:rtype: callable
"""
params = kwargs
params['host_prefix'] = host_prefix
params['wb_prefix'] = wb_prefix
params['wb_url'] = wb_url
params['top_url'] = top_url
params['coll'] = coll
params['is_framed'] = is_framed
def make_head_insert(rule, cdx):
params['wombat_ts'] = cdx['timestamp'] if include_ts else ''
params['wombat_sec'] = timestamp_to_sec(cdx['timestamp'])
params['is_live'] = cdx.get('is_live')
if self.banner_view:
banner_html = self.banner_view.render_to_string(env, cdx=cdx, **params)
params['banner_html'] = banner_html
return self.render_to_string(env, cdx=cdx, **params)
return make_head_insert
# ============================================================================
class TopFrameView(BaseInsertView):
"""The template view class associated with rendering the replay iframe"""
def get_top_frame(self, wb_url,
wb_prefix,
host_prefix,
env,
frame_mod,
replay_mod,
coll='',
extra_params=None):
"""
:param rewrite.wburl.WbUrl wb_url: The WbUrl for the request this template is being rendered for
:param str wb_prefix: The URL prefix pywb is serving the content using (e.g. http://localhost:8080/live/)
:param str host_prefix: The host URL prefix pywb is running on (e.g. http://localhost:8080)
:param dict env: The WSGI environment dictionary for the request this template is being rendered for
:param str frame_mod: The modifier to be used for framing (e.g. if_)
:param str replay_mod: The modifier to be used in the URL of the page being replayed (e.g. mp_)
:param str coll: The name of the collection this template is being rendered for
:param dict extra_params: Additional parameters to be supplied to the Jninja template render method
:return: The frame insert string
:rtype: str
"""
embed_url = wb_url.to_str(mod=replay_mod)
if wb_url.timestamp:
timestamp = wb_url.timestamp
else:
timestamp = timestamp_now()
is_proxy = 'wsgiprox.proxy_host' in env
params = {'host_prefix': host_prefix,
'wb_prefix': wb_prefix,
'wb_url': wb_url,
'coll': coll,
'options': {'frame_mod': frame_mod,
'replay_mod': replay_mod},
'embed_url': embed_url,
'is_proxy': is_proxy,
'timestamp': timestamp,
'url': wb_url.get_url()
}
if extra_params:
params.update(extra_params)
if self.banner_view:
banner_html = self.banner_view.render_to_string(env, **params)
params['banner_html'] = banner_html
return self.render_to_string(env, **params)
# ============================================================================
class PkgResResolver(Resolver):
"""Class for resolving pywb package resources when install via pypi or setup.py"""
def get_pkg_path(self, item):
"""Get the package path for the
:param str item: A resources full package path
:return: The netloc and path from the items package path
:rtype: tuple[str, str]
"""
if not isinstance(item, str):
return None
parts = urlsplit(item)
if parts.scheme == 'pkg' and parts.netloc:
return (parts.netloc, parts.path)
return None
def resolve_source(self, ctx, item):
pkg = self.get_pkg_path(item)
if pkg:
filename = resource_filename(pkg[0], pkg[1])
if filename:
return filename
return super(PkgResResolver, self).resolve_source(ctx, item)
| from warcio.timeutils import timestamp_to_datetime, timestamp_to_sec
from warcio.timeutils import timestamp_now
from pywb.utils.loaders import load
from six.moves.urllib.parse import urlsplit, quote
from jinja2 import Environment, TemplateNotFound, contextfunction, select_autoescape
from jinja2 import FileSystemLoader, PackageLoader, ChoiceLoader
from babel.support import Translations
from webassets.ext.jinja2 import AssetsExtension
from webassets.loaders import YAMLLoader
from webassets.env import Resolver
from pkg_resources import resource_filename
import os
try:
import ujson as json
except ImportError: # pragma: no cover
import json
# ============================================================================
class RelEnvironment(Environment):
"""Override join_path() to enable relative template paths."""
def join_path(self, template, parent):
return os.path.join(os.path.dirname(parent), template)
# ============================================================================
class JinjaEnv(object):
"""Pywb JinjaEnv class that provides utility functions used by the templates,
configured template loaders and template paths, and contains the actual Jinja
env used by each template."""
def __init__(self, paths=None,
packages=None,
assets_path=None,
globals=None,
overlay=None,
extensions=None,
env_template_params_key='pywb.template_params',
env_template_dir_key='pywb.templates_dir'):
"""Construct a new JinjaEnv.
:param list[str] paths: List of paths to search for templates
:param list[str] packages: List of assets package names
:param str assets_path: Path to a yaml file containing assets
:param dict[str, str] globals: Dictionary of additional globals available during template rendering
:param overlay:
:param list extensions: List of webassets extension classes
:param str env_template_params_key: The full pywb package key for the template params
:param str env_template_dir_key: The full pywb package key for the template directory
"""
if paths is None:
paths = ['templates', '.', '/']
if packages is None:
packages = ['pywb']
self._init_filters()
loader = ChoiceLoader(self._make_loaders(paths, packages))
self.env_template_params_key = env_template_params_key
self.env_template_dir_key = env_template_dir_key
extensions = extensions or []
if assets_path:
extensions.append(AssetsExtension)
if overlay:
jinja_env = overlay.jinja_env.overlay(loader=loader,
autoescape=select_autoescape(),
trim_blocks=True,
extensions=extensions)
else:
jinja_env = RelEnvironment(loader=loader,
autoescape=select_autoescape(),
trim_blocks=True,
extensions=extensions)
jinja_env.filters.update(self.filters)
if globals:
jinja_env.globals.update(globals)
self.jinja_env = jinja_env
# init assets
if assets_path:
assets_loader = YAMLLoader(load(assets_path))
assets_env = assets_loader.load_environment()
assets_env.resolver = PkgResResolver()
jinja_env.assets_environment = assets_env
self.default_locale = ''
def _make_loaders(self, paths, packages):
"""Initialize the template loaders based on the supplied paths and packages.
:param list[str] paths: List of paths to search for templates
:param list[str] packages: List of assets package names
:return: A list of loaders to be used for loading the template assets
:rtype: list[FileSystemLoader|PackageLoader]
"""
loaders = []
# add loaders for paths
for path in paths:
loaders.append(FileSystemLoader(path))
# add loaders for all specified packages
for package in packages:
loaders.append(PackageLoader(package))
return loaders
def init_loc(self, locales_root_dir, locales, loc_map, default_locale):
locales = locales or []
locales_root_dir = locales_root_dir or os.path.join('i18n', 'translations')
default_locale = default_locale or 'en'
self.default_locale = default_locale
if locales_root_dir:
for loc in locales:
loc_map[loc] = Translations.load(locales_root_dir, [loc, default_locale])
#jinja_env.jinja_env.install_gettext_translations(translations)
def get_translate(context):
loc = context.get('env', {}).get('pywb_lang', default_locale)
return loc_map.get(loc)
def override_func(jinja_env, name):
@contextfunction
def get_override(context, text):
translate = get_translate(context)
if not translate:
return text
func = getattr(translate, name)
return func(text)
jinja_env.globals[name] = get_override
# standard gettext() translation function
override_func(self.jinja_env, 'gettext')
# single/plural form translation function
override_func(self.jinja_env, 'ngettext')
# Special _Q() function to return %-encoded text, necessary for use
# with text in banner
@contextfunction
def quote_gettext(context, text):
translate = get_translate(context)
if not translate:
return text
text = translate.gettext(text)
return quote(text, safe='/: ')
self.jinja_env.globals['locales'] = list(loc_map.keys())
self.jinja_env.globals['_Q'] = quote_gettext
self.jinja_env.globals['default_locale'] = default_locale
@contextfunction
def switch_locale(context, locale):
environ = context.get('env')
curr_loc = environ.get('pywb_lang', '')
request_uri = environ.get('REQUEST_URI', environ.get('PATH_INFO'))
if curr_loc:
return request_uri.replace(curr_loc, locale, 1)
app_prefix = environ.get('pywb.app_prefix', '')
if app_prefix and request_uri.startswith(app_prefix):
request_uri = request_uri.replace(app_prefix, '')
return app_prefix + '/' + locale + request_uri
@contextfunction
def get_locale_prefixes(context):
environ = context.get('env')
locale_prefixes = {}
orig_prefix = environ.get('pywb.app_prefix', '')
coll = environ.get('SCRIPT_NAME', '')
if orig_prefix:
coll = coll[len(orig_prefix):]
curr_loc = environ.get('pywb_lang', '')
if curr_loc:
coll = coll[len(curr_loc) + 1:]
for locale in loc_map.keys():
locale_prefixes[locale] = orig_prefix + '/' + locale + coll + '/'
return locale_prefixes
self.jinja_env.globals['switch_locale'] = switch_locale
self.jinja_env.globals['get_locale_prefixes'] = get_locale_prefixes
def template_filter(self, param=None):
"""Returns a decorator that adds the wrapped function to dictionary of template filters.
The wrapped function is keyed by either the supplied param (if supplied)
or by the wrapped functions name.
:param param: Optional name to use instead of the name of the function to be wrapped
:return: A decorator to wrap a template filter function
:rtype: callable
"""
def deco(func):
name = param or func.__name__
self.filters[name] = func
return func
return deco
def _init_filters(self):
"""Initialize the default pywb provided Jninja filters available during template rendering"""
self.filters = {}
@self.template_filter()
def format_ts(value, format_='%a, %b %d %Y %H:%M:%S'):
"""Formats the supplied timestamp using format_
:param str value: The timestamp to be formatted
:param str format_: The format string
:return: The correctly formatted timestamp as determined by format_
:rtype: str
"""
if format_ == '%s':
return timestamp_to_sec(value)
else:
value = timestamp_to_datetime(value)
return value.strftime(format_)
@self.template_filter('urlsplit')
def get_urlsplit(url):
"""Splits the supplied URL
:param str url: The url to be split
:return: The split url
:rtype: urllib.parse.SplitResult
"""
split = urlsplit(url)
return split
@self.template_filter()
def tojson(obj):
"""Converts the supplied object/array/any to a JSON string if it can be JSONified
:param any obj: The value to be converted to a JSON string
:return: The JSON string representation of the supplied value
:rtype: str
"""
return json.dumps(obj)
@self.template_filter()
def tobool(bool_val):
"""Converts a python boolean to a JS "true" or "false" string
:param any obj: A value to be evaluated as a boolean
:return: The string "true" or "false" to be inserted into JS
"""
return 'true' if bool_val else 'false'
# ============================================================================
class BaseInsertView(object):
"""Base class of all template views used by Pywb"""
def __init__(self, jenv, insert_file, banner_view=None):
"""Create a new BaseInsertView.
:param JinjaEnv jenv: The instance of pywb.rewrite.templateview.JinjaEnv to be used
:param str insert_file: The name of the template file
:param BaseInsertView banner_view: The banner_view property of pywb.apps.RewriterApp
"""
self.jenv = jenv
self.insert_file = insert_file
self.banner_view = banner_view
def render_to_string(self, env, **kwargs):
"""Render this template.
:param dict env: The WSGI environment associated with the request causing this template to be rendered
:param any kwargs: The keyword arguments to be supplied to the Jninja template render method
:return: The rendered template
:rtype: str
"""
template = None
template_path = env.get(self.jenv.env_template_dir_key)
if template_path:
# jinja paths are not os paths, always use '/' as separator
# https://github.com/pallets/jinja/issues/411
template_path = template_path + '/' + self.insert_file
try:
template = self.jenv.jinja_env.get_template(template_path)
except TemplateNotFound as te:
pass
if not template:
template = self.jenv.jinja_env.get_template(self.insert_file)
params = env.get(self.jenv.env_template_params_key)
if params:
kwargs.update(params)
kwargs['env'] = env
kwargs['static_prefix'] = env.get('pywb.host_prefix', '') + env.get('pywb.app_prefix', '') + '/static'
return template.render(**kwargs)
# ============================================================================
class HeadInsertView(BaseInsertView):
"""The template view class associated with rendering the HTML inserted
into the head of the pages replayed (WB Insert)."""
def create_insert_func(self, wb_url,
wb_prefix,
host_prefix,
top_url,
env,
is_framed,
coll='',
include_ts=True,
**kwargs):
"""Create the function used to render the header insert template for the current request.
:param rewrite.wburl.WbUrl wb_url: The WbUrl for the request this template is being rendered for
:param str wb_prefix: The URL prefix pywb is serving the content using (e.g. http://localhost:8080/live/)
:param str host_prefix: The host URL prefix pywb is running on (e.g. http://localhost:8080)
:param str top_url: The full URL for this request (e.g. http://localhost:8080/live/http://example.com)
:param dict env: The WSGI environment dictionary for this request
:param bool is_framed: Is pywb or a specific collection running in framed mode
:param str coll: The name of the collection this request is associated with
:param bool include_ts: Should a timestamp be included in the rendered template
:param kwargs: Additional keyword arguments to be supplied to the Jninja template render method
:return: A function to be used to render the header insert for the request this template is being rendered for
:rtype: callable
"""
params = kwargs
params['host_prefix'] = host_prefix
params['wb_prefix'] = wb_prefix
params['wb_url'] = wb_url
params['top_url'] = top_url
params['coll'] = coll
params['is_framed'] = is_framed
def make_head_insert(rule, cdx):
params['wombat_ts'] = cdx['timestamp'] if include_ts else ''
params['wombat_sec'] = timestamp_to_sec(cdx['timestamp'])
params['is_live'] = cdx.get('is_live')
if self.banner_view:
banner_html = self.banner_view.render_to_string(env, cdx=cdx, **params)
params['banner_html'] = banner_html
return self.render_to_string(env, cdx=cdx, **params)
return make_head_insert
# ============================================================================
class TopFrameView(BaseInsertView):
"""The template view class associated with rendering the replay iframe"""
def get_top_frame(self, wb_url,
wb_prefix,
host_prefix,
env,
frame_mod,
replay_mod,
coll='',
extra_params=None):
"""
:param rewrite.wburl.WbUrl wb_url: The WbUrl for the request this template is being rendered for
:param str wb_prefix: The URL prefix pywb is serving the content using (e.g. http://localhost:8080/live/)
:param str host_prefix: The host URL prefix pywb is running on (e.g. http://localhost:8080)
:param dict env: The WSGI environment dictionary for the request this template is being rendered for
:param str frame_mod: The modifier to be used for framing (e.g. if_)
:param str replay_mod: The modifier to be used in the URL of the page being replayed (e.g. mp_)
:param str coll: The name of the collection this template is being rendered for
:param dict extra_params: Additional parameters to be supplied to the Jninja template render method
:return: The frame insert string
:rtype: str
"""
embed_url = wb_url.to_str(mod=replay_mod)
if wb_url.timestamp:
timestamp = wb_url.timestamp
else:
timestamp = timestamp_now()
is_proxy = 'wsgiprox.proxy_host' in env
params = {'host_prefix': host_prefix,
'wb_prefix': wb_prefix,
'wb_url': wb_url,
'coll': coll,
'options': {'frame_mod': frame_mod,
'replay_mod': replay_mod},
'embed_url': embed_url,
'is_proxy': is_proxy,
'timestamp': timestamp,
'url': wb_url.get_url()
}
if extra_params:
params.update(extra_params)
if self.banner_view:
banner_html = self.banner_view.render_to_string(env, **params)
params['banner_html'] = banner_html
return self.render_to_string(env, **params)
# ============================================================================
class PkgResResolver(Resolver):
"""Class for resolving pywb package resources when install via pypi or setup.py"""
def get_pkg_path(self, item):
"""Get the package path for the
:param str item: A resources full package path
:return: The netloc and path from the items package path
:rtype: tuple[str, str]
"""
if not isinstance(item, str):
return None
parts = urlsplit(item)
if parts.scheme == 'pkg' and parts.netloc:
return (parts.netloc, parts.path)
return None
def resolve_source(self, ctx, item):
pkg = self.get_pkg_path(item)
if pkg:
filename = resource_filename(pkg[0], pkg[1])
if filename:
return filename
return super(PkgResResolver, self).resolve_source(ctx, item)
| xss | {
"code": [
"from jinja2 import Environment, TemplateNotFound, contextfunction"
],
"line_no": [
8
]
} | {
"code": [
"from jinja2 import Environment, TemplateNotFound, contextfunction, select_autoescape",
" autoescape=select_autoescape(),",
" autoescape=select_autoescape(),"
],
"line_no": [
8,
80,
85
]
} | from warcio.timeutils import .timestamp_to_datetime, timestamp_to_sec
from warcio.timeutils import .timestamp_now
from pywb.utils.loaders import load
from six.moves.urllib.parse import .urlsplit, quote
from jinja2 import Environment, TemplateNotFound, contextfunction
from jinja2 import FileSystemLoader, PackageLoader, ChoiceLoader
from babel.support import Translations
from webassets.ext.jinja2 import AssetsExtension
from webassets.loaders import YAMLLoader
from webassets.env import Resolver
from pkg_resources import resource_filename
import os
try:
import ujson as json
except ImportError: # pragma: no cover
import json
class CLASS_0(Environment):
def FUNC_0(self, VAR_0, VAR_1):
return os.path.join(os.path.dirname(VAR_1), VAR_0)
class CLASS_1(object):
def __init__(self, VAR_2=None,
VAR_3=None,
VAR_4=None,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8='pywb.template_params',
VAR_9='pywb.templates_dir'):
if VAR_2 is None:
VAR_2 = ['templates', '.', '/']
if VAR_3 is None:
VAR_3 = ['pywb']
self._init_filters()
VAR_32 = ChoiceLoader(self._make_loaders(VAR_2, VAR_3))
self.env_template_params_key = VAR_8
self.env_template_dir_key = VAR_9
VAR_7 = extensions or []
if VAR_4:
VAR_7.append(AssetsExtension)
if VAR_6:
VAR_35 = VAR_6.jinja_env.overlay(VAR_32=loader,
trim_blocks=True,
VAR_7=extensions)
else:
VAR_35 = CLASS_0(VAR_32=loader,
trim_blocks=True,
VAR_7=extensions)
VAR_35.filters.update(self.filters)
if VAR_5:
VAR_35.globals.update(VAR_5)
self.jinja_env = VAR_35
if VAR_4:
VAR_53 = YAMLLoader(load(VAR_4))
VAR_54 = VAR_53.load_environment()
VAR_54.resolver = CLASS_5()
VAR_35.assets_environment = VAR_54
self.default_locale = ''
def FUNC_1(self, VAR_2, VAR_3):
VAR_33 = []
for path in VAR_2:
VAR_33.append(FileSystemLoader(path))
for package in VAR_3:
VAR_33.append(PackageLoader(package))
return VAR_33
def FUNC_2(self, VAR_10, VAR_11, VAR_12, VAR_13):
VAR_11 = locales or []
VAR_10 = locales_root_dir or os.path.join('i18n', 'translations')
VAR_13 = default_locale or 'en'
self.default_locale = VAR_13
if VAR_10:
for VAR_55 in VAR_11:
VAR_12[VAR_55] = Translations.load(VAR_10, [VAR_55, VAR_13])
def FUNC_10(VAR_34):
VAR_55 = VAR_34.get('env', {}).get('pywb_lang', VAR_13)
return VAR_12.get(VAR_55)
def FUNC_11(VAR_35, VAR_36):
@contextfunction
def FUNC_21(VAR_34, VAR_37):
VAR_56 = FUNC_10(VAR_34)
if not VAR_56:
return VAR_37
VAR_39 = getattr(VAR_56, VAR_36)
return VAR_39(VAR_37)
VAR_35.globals[VAR_36] = FUNC_21
FUNC_11(self.jinja_env, 'gettext')
FUNC_11(self.jinja_env, 'ngettext')
@contextfunction
def FUNC_12(VAR_34, VAR_37):
VAR_56 = FUNC_10(VAR_34)
if not VAR_56:
return VAR_37
VAR_37 = VAR_56.gettext(VAR_37)
return quote(VAR_37, safe='/: ')
self.jinja_env.globals['locales'] = list(VAR_12.keys())
self.jinja_env.globals['_Q'] = FUNC_12
self.jinja_env.globals['default_locale'] = VAR_13
@contextfunction
def FUNC_13(VAR_34, VAR_38):
VAR_57 = VAR_34.get('env')
VAR_58 = VAR_57.get('pywb_lang', '')
VAR_59 = VAR_57.get('REQUEST_URI', VAR_57.get('PATH_INFO'))
if VAR_58:
return VAR_59.replace(VAR_58, VAR_38, 1)
VAR_60 = VAR_57.get('pywb.app_prefix', '')
if VAR_60 and VAR_59.startswith(VAR_60):
VAR_59 = VAR_59.replace(VAR_60, '')
return VAR_60 + '/' + VAR_38 + VAR_59
@contextfunction
def FUNC_14(VAR_34):
VAR_57 = VAR_34.get('env')
VAR_61 = {}
VAR_62 = VAR_57.get('pywb.app_prefix', '')
VAR_25 = VAR_57.get('SCRIPT_NAME', '')
if VAR_62:
VAR_25 = coll[len(VAR_62):]
VAR_58 = VAR_57.get('pywb_lang', '')
if VAR_58:
VAR_25 = coll[len(VAR_58) + 1:]
for VAR_38 in VAR_12.keys():
VAR_61[VAR_38] = VAR_62 + '/' + VAR_38 + VAR_25 + '/'
return VAR_61
self.jinja_env.globals['switch_locale'] = FUNC_13
self.jinja_env.globals['get_locale_prefixes'] = FUNC_14
def FUNC_3(self, VAR_14=None):
def FUNC_15(VAR_39):
VAR_36 = VAR_14 or VAR_39.__name__
self.filters[VAR_36] = VAR_39
return VAR_39
return FUNC_15
def FUNC_4(self):
self.filters = {}
@self.template_filter()
def FUNC_16(VAR_40, VAR_41='%a, %b %d %Y %H:%M:%S'):
if VAR_41 == '%s':
return timestamp_to_sec(VAR_40)
else:
VAR_40 = timestamp_to_datetime(VAR_40)
return VAR_40.strftime(VAR_41)
@self.template_filter('urlsplit')
def FUNC_17(VAR_42):
VAR_63 = urlsplit(VAR_42)
return VAR_63
@self.template_filter()
def FUNC_18(VAR_43):
return json.dumps(VAR_43)
@self.template_filter()
def FUNC_19(VAR_44):
return 'true' if VAR_44 else 'false'
class CLASS_2(object):
def __init__(self, VAR_15, VAR_16, VAR_17=None):
self.jenv = VAR_15
self.insert_file = VAR_16
self.banner_view = VAR_17
def FUNC_5(self, VAR_18, **VAR_19):
VAR_0 = None
VAR_45 = VAR_18.get(self.jenv.env_template_dir_key)
if VAR_45:
VAR_45 = template_path + '/' + self.insert_file
try:
VAR_0 = self.jenv.jinja_env.get_template(VAR_45)
except TemplateNotFound as te:
pass
if not VAR_0:
VAR_0 = self.jenv.jinja_env.get_template(self.insert_file)
VAR_46 = VAR_18.get(self.jenv.env_template_params_key)
if VAR_46:
VAR_19.update(VAR_46)
VAR_19['env'] = VAR_18
VAR_19['static_prefix'] = VAR_18.get('pywb.host_prefix', '') + VAR_18.get('pywb.app_prefix', '') + '/static'
return VAR_0.render(**VAR_19)
class CLASS_3(CLASS_2):
def FUNC_6(self, VAR_20,
VAR_21,
VAR_22,
VAR_23,
VAR_18,
VAR_24,
VAR_25='',
VAR_26=True,
**VAR_19):
VAR_46 = VAR_19
VAR_46['host_prefix'] = VAR_22
VAR_46['wb_prefix'] = VAR_21
VAR_46['wb_url'] = VAR_20
VAR_46['top_url'] = VAR_23
VAR_46['coll'] = VAR_25
VAR_46['is_framed'] = VAR_24
def FUNC_20(VAR_47, VAR_48):
VAR_46['wombat_ts'] = VAR_48['timestamp'] if VAR_26 else ''
VAR_46['wombat_sec'] = timestamp_to_sec(VAR_48['timestamp'])
VAR_46['is_live'] = VAR_48.get('is_live')
if self.banner_view:
VAR_65 = self.banner_view.render_to_string(VAR_18, VAR_48=cdx, **VAR_46)
params['banner_html'] = VAR_65
return self.render_to_string(VAR_18, VAR_48=cdx, **VAR_46)
return FUNC_20
class CLASS_4(CLASS_2):
def FUNC_7(self, VAR_20,
VAR_21,
VAR_22,
VAR_18,
VAR_27,
VAR_28,
VAR_25='',
VAR_29=None):
VAR_49 = VAR_20.to_str(mod=VAR_28)
if VAR_20.timestamp:
VAR_64 = VAR_20.timestamp
else:
VAR_64 = timestamp_now()
VAR_50 = 'wsgiprox.proxy_host' in VAR_18
VAR_46 = {'host_prefix': VAR_22,
'wb_prefix': VAR_21,
'wb_url': VAR_20,
'coll': VAR_25,
'options': {'frame_mod': VAR_27,
'replay_mod': VAR_28},
'embed_url': VAR_49,
'is_proxy': VAR_50,
'timestamp': VAR_64,
'url': VAR_20.get_url()
}
if VAR_29:
VAR_46.update(VAR_29)
if self.banner_view:
VAR_65 = self.banner_view.render_to_string(VAR_18, **VAR_46)
VAR_46['banner_html'] = VAR_65
return self.render_to_string(VAR_18, **VAR_46)
class CLASS_5(Resolver):
def FUNC_8(self, VAR_30):
if not isinstance(VAR_30, str):
return None
VAR_51 = urlsplit(VAR_30)
if VAR_51.scheme == 'pkg' and VAR_51.netloc:
return (VAR_51.netloc, VAR_51.path)
return None
def FUNC_9(self, VAR_31, VAR_30):
VAR_52 = self.get_pkg_path(VAR_30)
if VAR_52:
VAR_66 = resource_filename(VAR_52[0], VAR_52[1])
if VAR_66:
return VAR_66
return super(CLASS_5, self).resolve_source(VAR_31, VAR_30)
| from warcio.timeutils import .timestamp_to_datetime, timestamp_to_sec
from warcio.timeutils import .timestamp_now
from pywb.utils.loaders import load
from six.moves.urllib.parse import .urlsplit, quote
from jinja2 import Environment, TemplateNotFound, contextfunction, select_autoescape
from jinja2 import FileSystemLoader, PackageLoader, ChoiceLoader
from babel.support import Translations
from webassets.ext.jinja2 import AssetsExtension
from webassets.loaders import YAMLLoader
from webassets.env import Resolver
from pkg_resources import resource_filename
import os
try:
import ujson as json
except ImportError: # pragma: no cover
import json
class CLASS_0(Environment):
def FUNC_0(self, VAR_0, VAR_1):
return os.path.join(os.path.dirname(VAR_1), VAR_0)
class CLASS_1(object):
def __init__(self, VAR_2=None,
VAR_3=None,
VAR_4=None,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8='pywb.template_params',
VAR_9='pywb.templates_dir'):
if VAR_2 is None:
VAR_2 = ['templates', '.', '/']
if VAR_3 is None:
VAR_3 = ['pywb']
self._init_filters()
VAR_32 = ChoiceLoader(self._make_loaders(VAR_2, VAR_3))
self.env_template_params_key = VAR_8
self.env_template_dir_key = VAR_9
VAR_7 = extensions or []
if VAR_4:
VAR_7.append(AssetsExtension)
if VAR_6:
VAR_35 = VAR_6.jinja_env.overlay(VAR_32=loader,
autoescape=select_autoescape(),
trim_blocks=True,
VAR_7=extensions)
else:
VAR_35 = CLASS_0(VAR_32=loader,
autoescape=select_autoescape(),
trim_blocks=True,
VAR_7=extensions)
VAR_35.filters.update(self.filters)
if VAR_5:
VAR_35.globals.update(VAR_5)
self.jinja_env = VAR_35
if VAR_4:
VAR_53 = YAMLLoader(load(VAR_4))
VAR_54 = VAR_53.load_environment()
VAR_54.resolver = CLASS_5()
VAR_35.assets_environment = VAR_54
self.default_locale = ''
def FUNC_1(self, VAR_2, VAR_3):
VAR_33 = []
for path in VAR_2:
VAR_33.append(FileSystemLoader(path))
for package in VAR_3:
VAR_33.append(PackageLoader(package))
return VAR_33
def FUNC_2(self, VAR_10, VAR_11, VAR_12, VAR_13):
VAR_11 = locales or []
VAR_10 = locales_root_dir or os.path.join('i18n', 'translations')
VAR_13 = default_locale or 'en'
self.default_locale = VAR_13
if VAR_10:
for VAR_55 in VAR_11:
VAR_12[VAR_55] = Translations.load(VAR_10, [VAR_55, VAR_13])
def FUNC_10(VAR_34):
VAR_55 = VAR_34.get('env', {}).get('pywb_lang', VAR_13)
return VAR_12.get(VAR_55)
def FUNC_11(VAR_35, VAR_36):
@contextfunction
def FUNC_21(VAR_34, VAR_37):
VAR_56 = FUNC_10(VAR_34)
if not VAR_56:
return VAR_37
VAR_39 = getattr(VAR_56, VAR_36)
return VAR_39(VAR_37)
VAR_35.globals[VAR_36] = FUNC_21
FUNC_11(self.jinja_env, 'gettext')
FUNC_11(self.jinja_env, 'ngettext')
@contextfunction
def FUNC_12(VAR_34, VAR_37):
VAR_56 = FUNC_10(VAR_34)
if not VAR_56:
return VAR_37
VAR_37 = VAR_56.gettext(VAR_37)
return quote(VAR_37, safe='/: ')
self.jinja_env.globals['locales'] = list(VAR_12.keys())
self.jinja_env.globals['_Q'] = FUNC_12
self.jinja_env.globals['default_locale'] = VAR_13
@contextfunction
def FUNC_13(VAR_34, VAR_38):
VAR_57 = VAR_34.get('env')
VAR_58 = VAR_57.get('pywb_lang', '')
VAR_59 = VAR_57.get('REQUEST_URI', VAR_57.get('PATH_INFO'))
if VAR_58:
return VAR_59.replace(VAR_58, VAR_38, 1)
VAR_60 = VAR_57.get('pywb.app_prefix', '')
if VAR_60 and VAR_59.startswith(VAR_60):
VAR_59 = VAR_59.replace(VAR_60, '')
return VAR_60 + '/' + VAR_38 + VAR_59
@contextfunction
def FUNC_14(VAR_34):
VAR_57 = VAR_34.get('env')
VAR_61 = {}
VAR_62 = VAR_57.get('pywb.app_prefix', '')
VAR_25 = VAR_57.get('SCRIPT_NAME', '')
if VAR_62:
VAR_25 = coll[len(VAR_62):]
VAR_58 = VAR_57.get('pywb_lang', '')
if VAR_58:
VAR_25 = coll[len(VAR_58) + 1:]
for VAR_38 in VAR_12.keys():
VAR_61[VAR_38] = VAR_62 + '/' + VAR_38 + VAR_25 + '/'
return VAR_61
self.jinja_env.globals['switch_locale'] = FUNC_13
self.jinja_env.globals['get_locale_prefixes'] = FUNC_14
def FUNC_3(self, VAR_14=None):
def FUNC_15(VAR_39):
VAR_36 = VAR_14 or VAR_39.__name__
self.filters[VAR_36] = VAR_39
return VAR_39
return FUNC_15
def FUNC_4(self):
self.filters = {}
@self.template_filter()
def FUNC_16(VAR_40, VAR_41='%a, %b %d %Y %H:%M:%S'):
if VAR_41 == '%s':
return timestamp_to_sec(VAR_40)
else:
VAR_40 = timestamp_to_datetime(VAR_40)
return VAR_40.strftime(VAR_41)
@self.template_filter('urlsplit')
def FUNC_17(VAR_42):
VAR_63 = urlsplit(VAR_42)
return VAR_63
@self.template_filter()
def FUNC_18(VAR_43):
return json.dumps(VAR_43)
@self.template_filter()
def FUNC_19(VAR_44):
return 'true' if VAR_44 else 'false'
class CLASS_2(object):
def __init__(self, VAR_15, VAR_16, VAR_17=None):
self.jenv = VAR_15
self.insert_file = VAR_16
self.banner_view = VAR_17
def FUNC_5(self, VAR_18, **VAR_19):
VAR_0 = None
VAR_45 = VAR_18.get(self.jenv.env_template_dir_key)
if VAR_45:
VAR_45 = template_path + '/' + self.insert_file
try:
VAR_0 = self.jenv.jinja_env.get_template(VAR_45)
except TemplateNotFound as te:
pass
if not VAR_0:
VAR_0 = self.jenv.jinja_env.get_template(self.insert_file)
VAR_46 = VAR_18.get(self.jenv.env_template_params_key)
if VAR_46:
VAR_19.update(VAR_46)
VAR_19['env'] = VAR_18
VAR_19['static_prefix'] = VAR_18.get('pywb.host_prefix', '') + VAR_18.get('pywb.app_prefix', '') + '/static'
return VAR_0.render(**VAR_19)
class CLASS_3(CLASS_2):
def FUNC_6(self, VAR_20,
VAR_21,
VAR_22,
VAR_23,
VAR_18,
VAR_24,
VAR_25='',
VAR_26=True,
**VAR_19):
VAR_46 = VAR_19
VAR_46['host_prefix'] = VAR_22
VAR_46['wb_prefix'] = VAR_21
VAR_46['wb_url'] = VAR_20
VAR_46['top_url'] = VAR_23
VAR_46['coll'] = VAR_25
VAR_46['is_framed'] = VAR_24
def FUNC_20(VAR_47, VAR_48):
VAR_46['wombat_ts'] = VAR_48['timestamp'] if VAR_26 else ''
VAR_46['wombat_sec'] = timestamp_to_sec(VAR_48['timestamp'])
VAR_46['is_live'] = VAR_48.get('is_live')
if self.banner_view:
VAR_65 = self.banner_view.render_to_string(VAR_18, VAR_48=cdx, **VAR_46)
params['banner_html'] = VAR_65
return self.render_to_string(VAR_18, VAR_48=cdx, **VAR_46)
return FUNC_20
class CLASS_4(CLASS_2):
def FUNC_7(self, VAR_20,
VAR_21,
VAR_22,
VAR_18,
VAR_27,
VAR_28,
VAR_25='',
VAR_29=None):
VAR_49 = VAR_20.to_str(mod=VAR_28)
if VAR_20.timestamp:
VAR_64 = VAR_20.timestamp
else:
VAR_64 = timestamp_now()
VAR_50 = 'wsgiprox.proxy_host' in VAR_18
VAR_46 = {'host_prefix': VAR_22,
'wb_prefix': VAR_21,
'wb_url': VAR_20,
'coll': VAR_25,
'options': {'frame_mod': VAR_27,
'replay_mod': VAR_28},
'embed_url': VAR_49,
'is_proxy': VAR_50,
'timestamp': VAR_64,
'url': VAR_20.get_url()
}
if VAR_29:
VAR_46.update(VAR_29)
if self.banner_view:
VAR_65 = self.banner_view.render_to_string(VAR_18, **VAR_46)
VAR_46['banner_html'] = VAR_65
return self.render_to_string(VAR_18, **VAR_46)
class CLASS_5(Resolver):
def FUNC_8(self, VAR_30):
if not isinstance(VAR_30, str):
return None
VAR_51 = urlsplit(VAR_30)
if VAR_51.scheme == 'pkg' and VAR_51.netloc:
return (VAR_51.netloc, VAR_51.path)
return None
def FUNC_9(self, VAR_31, VAR_30):
VAR_52 = self.get_pkg_path(VAR_30)
if VAR_52:
VAR_66 = resource_filename(VAR_52[0], VAR_52[1])
if VAR_66:
return VAR_66
return super(CLASS_5, self).resolve_source(VAR_31, VAR_30)
| [
3,
5,
7,
10,
12,
16,
18,
20,
25,
26,
27,
32,
33,
34,
39,
49,
59,
62,
65,
67,
69,
72,
74,
77,
86,
88,
91,
93,
94,
100,
102,
105,
112,
115,
116,
119,
121,
127,
131,
132,
136,
143,
146,
148,
149,
151,
152,
154,
155,
156,
162,
165,
169,
174,
176,
179,
181,
184,
186,
191,
194,
197,
201,
204,
206,
209,
212,
215,
224,
226,
230,
234,
245,
249,
256,
260,
266,
273,
275,
276,
277,
280,
283,
291,
294,
302,
304,
305,
307,
312,
315,
319,
322,
323,
325,
326,
327,
331,
342,
362,
367,
371,
373,
375,
376,
377,
380,
401,
403,
408,
410,
415,
418,
424,
427,
431,
433,
434,
435,
438,
441,
448,
452,
454,
461,
463,
464,
465,
29,
36,
37,
38,
279,
329,
330,
379,
437,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
104,
105,
106,
107,
108,
109,
110,
211,
212,
213,
214,
215,
216,
217,
218,
219,
228,
282,
283,
284,
285,
286,
287,
293,
294,
295,
296,
297,
298,
299,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
440,
441,
442,
443,
444,
445,
233,
234,
235,
236,
237,
238,
239,
248,
249,
250,
251,
252,
253,
259,
260,
261,
262,
263,
264,
269,
270,
271,
272
] | [
3,
5,
7,
10,
12,
16,
18,
20,
25,
26,
27,
32,
33,
34,
39,
49,
59,
62,
65,
67,
69,
72,
74,
77,
88,
90,
93,
95,
96,
102,
104,
107,
114,
117,
118,
121,
123,
129,
133,
134,
138,
145,
148,
150,
151,
153,
154,
156,
157,
158,
164,
167,
171,
176,
178,
181,
183,
186,
188,
193,
196,
199,
203,
206,
208,
211,
214,
217,
226,
228,
232,
236,
247,
251,
258,
262,
268,
275,
277,
278,
279,
282,
285,
293,
296,
304,
306,
307,
309,
314,
317,
321,
324,
325,
327,
328,
329,
333,
344,
364,
369,
373,
375,
377,
378,
379,
382,
403,
405,
410,
412,
417,
420,
426,
429,
433,
435,
436,
437,
440,
443,
450,
454,
456,
463,
465,
466,
467,
29,
36,
37,
38,
281,
331,
332,
381,
439,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
106,
107,
108,
109,
110,
111,
112,
213,
214,
215,
216,
217,
218,
219,
220,
221,
230,
284,
285,
286,
287,
288,
289,
295,
296,
297,
298,
299,
300,
301,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
442,
443,
444,
445,
446,
447,
235,
236,
237,
238,
239,
240,
241,
250,
251,
252,
253,
254,
255,
261,
262,
263,
264,
265,
266,
271,
272,
273,
274
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.utils import timezone
from django.core.exceptions import PermissionDenied
from djconfig import config
from ...core import utils
from ...core.utils.paginator import yt_paginate
from .models import CommentPoll, CommentPollChoice, CommentPollVote
from .forms import PollVoteManyForm
@login_required
@require_POST
def close_or_open(request, pk, close=True):
# todo: moderators should be able to close it
poll = get_object_or_404(
CommentPoll,
pk=pk,
comment__user=request.user
)
if close:
close_at = timezone.now()
else:
close_at = None
(CommentPoll.objects
.filter(pk=poll.pk)
.update(close_at=close_at))
return redirect(request.GET.get('next', poll.get_absolute_url()))
@require_POST
def vote(request, pk):
# TODO: check if user has access to this topic/poll
poll = get_object_or_404(
CommentPoll.objects.unremoved(),
pk=pk
)
if not request.user.is_authenticated:
return redirect_to_login(next=poll.get_absolute_url())
form = PollVoteManyForm(user=request.user, poll=poll, data=request.POST)
if form.is_valid():
CommentPollChoice.decrease_vote_count(poll=poll, voter=request.user)
form.save_m2m()
CommentPollChoice.increase_vote_count(poll=poll, voter=request.user)
return redirect(request.POST.get('next', poll.get_absolute_url()))
messages.error(request, utils.render_form_errors(form))
return redirect(request.POST.get('next', poll.get_absolute_url()))
@login_required
def voters(request, pk):
# TODO: check if user has access to this topic/poll
choice = get_object_or_404(
CommentPollChoice.objects
.unremoved()
.select_related('poll'),
pk=pk
)
if not choice.poll.can_show_results:
raise PermissionDenied
choice_votes = (
CommentPollVote.objects
.unremoved()
.for_choice(choice=choice)
.select_related('voter__st'))
choice_votes = yt_paginate(
choice_votes,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1)
)
context = {
'choice': choice,
'votes': choice_votes
}
return render(request, 'spirit/comment/poll/voters.html', context)
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.utils import timezone
from django.core.exceptions import PermissionDenied
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core import utils
from spirit.core.utils.paginator import yt_paginate
from .models import CommentPoll, CommentPollChoice, CommentPollVote
from .forms import PollVoteManyForm
@login_required
@require_POST
def close_or_open(request, pk, close=True):
# todo: moderators should be able to close it
poll = get_object_or_404(
CommentPoll,
pk=pk,
comment__user=request.user
)
if close:
close_at = timezone.now()
else:
close_at = None
(CommentPoll.objects
.filter(pk=poll.pk)
.update(close_at=close_at))
return safe_redirect(request, 'next', poll.get_absolute_url())
@require_POST
def vote(request, pk):
# TODO: check if user has access to this topic/poll
poll = get_object_or_404(
CommentPoll.objects.unremoved(),
pk=pk
)
if not request.user.is_authenticated:
return redirect_to_login(next=poll.get_absolute_url())
form = PollVoteManyForm(user=request.user, poll=poll, data=request.POST)
if form.is_valid():
CommentPollChoice.decrease_vote_count(poll=poll, voter=request.user)
form.save_m2m()
CommentPollChoice.increase_vote_count(poll=poll, voter=request.user)
return safe_redirect(request, 'next', poll.get_absolute_url(), method='POST')
messages.error(request, utils.render_form_errors(form))
return safe_redirect(request, 'next', poll.get_absolute_url(), method='POST')
@login_required
def voters(request, pk):
# TODO: check if user has access to this topic/poll
choice = get_object_or_404(
CommentPollChoice.objects
.unremoved()
.select_related('poll'),
pk=pk
)
if not choice.poll.can_show_results:
raise PermissionDenied
choice_votes = (
CommentPollVote.objects
.unremoved()
.for_choice(choice=choice)
.select_related('voter__st'))
choice_votes = yt_paginate(
choice_votes,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1)
)
context = {
'choice': choice,
'votes': choice_votes
}
return render(request, 'spirit/comment/poll/voters.html', context)
| open_redirect | {
"code": [
"from django.shortcuts import render, redirect, get_object_or_404",
"from ...core import utils",
"from ...core.utils.paginator import yt_paginate",
" return redirect(request.GET.get('next', poll.get_absolute_url()))",
" return redirect(request.POST.get('next', poll.get_absolute_url()))",
" return redirect(request.POST.get('next', poll.get_absolute_url()))"
],
"line_no": [
4,
13,
14,
38,
58,
61
]
} | {
"code": [
"from django.shortcuts import render, get_object_or_404",
"from spirit.core.utils.http import safe_redirect",
"from spirit.core import utils",
"from spirit.core.utils.paginator import yt_paginate",
" return safe_redirect(request, 'next', poll.get_absolute_url(), method='POST')"
],
"line_no": [
4,
13,
14,
15,
59
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.utils import timezone
from django.core.exceptions import PermissionDenied
from djconfig import config
from ...core import utils
from ...core.utils.paginator import yt_paginate
from .models import CommentPoll, CommentPollChoice, CommentPollVote
from .forms import PollVoteManyForm
@login_required
@require_POST
def FUNC_0(VAR_0, VAR_1, VAR_2=True):
VAR_3 = get_object_or_404(
CommentPoll,
VAR_1=pk,
comment__user=VAR_0.user
)
if VAR_2:
VAR_8 = timezone.now()
else:
VAR_8 = None
(CommentPoll.objects
.filter(VAR_1=VAR_3.pk)
.update(VAR_8=close_at))
return redirect(VAR_0.GET.get('next', VAR_3.get_absolute_url()))
@require_POST
def FUNC_1(VAR_0, VAR_1):
VAR_3 = get_object_or_404(
CommentPoll.objects.unremoved(),
VAR_1=pk
)
if not VAR_0.user.is_authenticated:
return redirect_to_login(next=VAR_3.get_absolute_url())
VAR_4 = PollVoteManyForm(user=VAR_0.user, VAR_3=poll, data=VAR_0.POST)
if VAR_4.is_valid():
CommentPollChoice.decrease_vote_count(VAR_3=poll, voter=VAR_0.user)
VAR_4.save_m2m()
CommentPollChoice.increase_vote_count(VAR_3=poll, voter=VAR_0.user)
return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))
@login_required
def FUNC_2(VAR_0, VAR_1):
VAR_5 = get_object_or_404(
CommentPollChoice.objects
.unremoved()
.select_related('poll'),
VAR_1=pk
)
if not VAR_5.poll.can_show_results:
raise PermissionDenied
VAR_6 = (
CommentPollVote.objects
.unremoved()
.for_choice(VAR_5=choice)
.select_related('voter__st'))
VAR_6 = yt_paginate(
VAR_6,
per_page=config.topics_per_page,
page_number=VAR_0.GET.get('page', 1)
)
VAR_7 = {
'choice': VAR_5,
'votes': VAR_6
}
return render(VAR_0, 'spirit/comment/VAR_3/FUNC_2.html', VAR_7)
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.utils import timezone
from django.core.exceptions import PermissionDenied
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core import utils
from spirit.core.utils.paginator import yt_paginate
from .models import CommentPoll, CommentPollChoice, CommentPollVote
from .forms import PollVoteManyForm
@login_required
@require_POST
def FUNC_0(VAR_0, VAR_1, VAR_2=True):
VAR_3 = get_object_or_404(
CommentPoll,
VAR_1=pk,
comment__user=VAR_0.user
)
if VAR_2:
VAR_8 = timezone.now()
else:
VAR_8 = None
(CommentPoll.objects
.filter(VAR_1=VAR_3.pk)
.update(VAR_8=close_at))
return safe_redirect(VAR_0, 'next', VAR_3.get_absolute_url())
@require_POST
def FUNC_1(VAR_0, VAR_1):
VAR_3 = get_object_or_404(
CommentPoll.objects.unremoved(),
VAR_1=pk
)
if not VAR_0.user.is_authenticated:
return redirect_to_login(next=VAR_3.get_absolute_url())
VAR_4 = PollVoteManyForm(user=VAR_0.user, VAR_3=poll, data=VAR_0.POST)
if VAR_4.is_valid():
CommentPollChoice.decrease_vote_count(VAR_3=poll, voter=VAR_0.user)
VAR_4.save_m2m()
CommentPollChoice.increase_vote_count(VAR_3=poll, voter=VAR_0.user)
return safe_redirect(VAR_0, 'next', VAR_3.get_absolute_url(), method='POST')
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return safe_redirect(VAR_0, 'next', VAR_3.get_absolute_url(), method='POST')
@login_required
def FUNC_2(VAR_0, VAR_1):
VAR_5 = get_object_or_404(
CommentPollChoice.objects
.unremoved()
.select_related('poll'),
VAR_1=pk
)
if not VAR_5.poll.can_show_results:
raise PermissionDenied
VAR_6 = (
CommentPollVote.objects
.unremoved()
.for_choice(VAR_5=choice)
.select_related('voter__st'))
VAR_6 = yt_paginate(
VAR_6,
per_page=config.topics_per_page,
page_number=VAR_0.GET.get('page', 1)
)
VAR_7 = {
'choice': VAR_5,
'votes': VAR_6
}
return render(VAR_0, 'spirit/comment/VAR_3/FUNC_2.html', VAR_7)
| [
1,
2,
10,
12,
17,
18,
22,
28,
33,
37,
39,
40,
43,
48,
51,
53,
59,
62,
63,
66,
73,
76,
82,
88,
93,
95
] | [
1,
2,
10,
12,
18,
19,
23,
29,
34,
38,
40,
41,
44,
49,
52,
54,
60,
63,
64,
67,
74,
77,
83,
89,
94,
96
] |
1CWE-79
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import get_root_element
def test_get_root_element():
expected = "<div>test</div>"
component_html = "<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_comment():
expected = "<div>test</div>"
component_html = "<!-- some comment --><div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_blank_string():
expected = "<div>test</div>"
component_html = "\n<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_no_element():
expected = "<div>test</div>"
component_html = "\n"
soup = BeautifulSoup(component_html, features="html.parser")
with pytest.raises(Exception):
actual = get_root_element(soup)
assert str(actual) == expected
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import (
UnicornTemplateResponse,
get_root_element,
)
def test_get_root_element():
expected = "<div>test</div>"
component_html = "<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_comment():
expected = "<div>test</div>"
component_html = "<!-- some comment --><div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_blank_string():
expected = "<div>test</div>"
component_html = "\n<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_no_element():
expected = "<div>test</div>"
component_html = "\n"
soup = BeautifulSoup(component_html, features="html.parser")
with pytest.raises(Exception):
actual = get_root_element(soup)
assert str(actual) == expected
def test_desoupify():
html = "<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\n</div>\n\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
expected = "<div><a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>!\n</div>\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
soup = BeautifulSoup(html, "html.parser")
actual = UnicornTemplateResponse._desoupify(soup)
assert expected == actual
| xss | {
"code": [
"from django_unicorn.components.unicorn_template_response import get_root_element"
],
"line_no": [
4
]
} | {
"code": [
"from django_unicorn.components.unicorn_template_response import (",
" get_root_element,",
")",
"def test_desoupify():",
" html = \"<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\\n</div>\\n\\n<script type=\\\"application/javascript\\\">\\n window.addEventListener('DOMContentLoaded', (event) => {\\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\\n });\\n</script>\"",
" expected = \"<div><a><style>@keyframes x{}</style><a style=\\\"animation-name:x\\\" onanimationend=\\\"alert(1)\\\"></a>!\\n</div>\\n<script type=\\\"application/javascript\\\">\\n window.addEventListener('DOMContentLoaded', (event) => {\\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\\n });\\n</script>\"",
" soup = BeautifulSoup(html, \"html.parser\")",
" actual = UnicornTemplateResponse._desoupify(soup)",
" assert expected == actual"
],
"line_no": [
4,
6,
7,
52,
53,
54,
56,
58,
60
]
} | import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import get_root_element
def FUNC_0():
VAR_0 = "<div>test</div>"
VAR_1 = "<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_1():
VAR_0 = "<div>test</div>"
VAR_1 = "<!-- some comment --><div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_2():
VAR_0 = "<div>test</div>"
VAR_1 = "\n<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_3():
VAR_0 = "<div>test</div>"
VAR_1 = "\n"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
with pytest.raises(Exception):
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import (
UnicornTemplateResponse,
get_root_element,
)
def FUNC_0():
VAR_0 = "<div>test</div>"
VAR_1 = "<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_1():
VAR_0 = "<div>test</div>"
VAR_1 = "<!-- some comment --><div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_2():
VAR_0 = "<div>test</div>"
VAR_1 = "\n<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_3():
VAR_0 = "<div>test</div>"
VAR_1 = "\n"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
with pytest.raises(Exception):
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_4():
VAR_4 = "<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\n</div>\n\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
VAR_0 = "<div><a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>!\n</div>\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
VAR_2 = BeautifulSoup(VAR_4, "html.parser")
VAR_3 = UnicornTemplateResponse._desoupify(VAR_2)
assert VAR_0 == VAR_3
| [
3,
5,
6,
9,
13,
15,
16,
19,
23,
25,
26,
29,
33,
35,
36,
39,
42,
45,
47
] | [
3,
8,
9,
12,
16,
18,
19,
22,
26,
28,
29,
32,
36,
38,
39,
42,
45,
48,
50,
51,
55,
57,
59,
61
] |
1CWE-79
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
# Main parts, to generate relationships among others
trape = Trape()
app = Flask(__name__, template_folder='../templates', static_folder='../static')
# call database
db = Database()
# preview header tool in console
trape.header()
@app.route("/" + trape.stats_path)
def index():
return render_template("/login.html")
@app.route("/logout")
def logout():
return render_template("/login.html")
@app.route("/login", methods=["POST"])
def login():
id = request.form['id']
if id == trape.stats_key:
return json.dumps({'status':'OK', 'path' : trape.home_path, 'victim_path' : trape.victim_path, 'url_to_clone' : trape.url_to_clone, 'app_port' : trape.app_port, 'date_start' : trape.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@app.route("/get_data", methods=["POST"])
def home_get_dat():
d = db.sentences_stats('get_data')
n = db.sentences_stats('all_networks')
('clean_online')
rows = db.sentences_stats('get_clicks')
c = rows[0][0]
rows = db.sentences_stats('get_sessions')
s = rows[0][0]
rows = db.sentences_stats('get_online')
o = rows[0][0]
return json.dumps({'status' : 'OK', 'd' : d, 'n' : n, 'c' : c, 's' : s, 'o' : o});
@app.route("/get_preview", methods=["POST"])
def home_get_preview():
vId = request.form['vId']
d = db.sentences_stats('get_preview', vId)
n = db.sentences_stats('id_networks', vId)
return json.dumps({'status' : 'OK', 'vId' : vId, 'd' : d, 'n' : n});
@app.route("/get_title", methods=["POST"])
def home_get_title():
opener = urllib2.build_opener()
html = opener.open(trape.url_to_clone).read()
html = html[html.find('<title>') + 7 : html.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : html});
@app.route("/get_requests", methods=["POST"])
def home_get_requests():
d = db.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : d}); | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
# Main parts, to generate relationships among others
trape = Trape()
app = Flask(__name__, template_folder='../templates', static_folder='../static')
# call database
db = Database()
# preview header tool in console
trape.header()
@app.route("/" + trape.stats_path)
def index():
return render_template("/login.html")
@app.route("/logout")
def logout():
return render_template("/login.html")
@app.route("/login", methods=["POST"])
def login():
id = request.form['id']
if id == trape.stats_key:
return json.dumps({'status':'OK', 'path' : trape.home_path, 'victim_path' : trape.victim_path, 'url_to_clone' : trape.url_to_clone, 'app_port' : trape.app_port, 'date_start' : trape.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@app.route("/get_data", methods=["POST"])
def home_get_dat():
d = db.sentences_stats('get_data')
n = db.sentences_stats('all_networks')
rows = db.sentences_stats('get_clicks')
c = rows[0][0]
rows = db.sentences_stats('get_sessions')
s = rows[0][0]
vId = ('online', )
rows = db.sentences_stats('get_online', vId)
o = rows[0][0]
return json.dumps({'status' : 'OK', 'd' : d, 'n' : n, 'c' : c, 's' : s, 'o' : o});
@app.route("/get_preview", methods=["POST"])
def home_get_preview():
vId = request.form['vId']
t = (vId,)
d = db.sentences_stats('get_preview', t)
n = db.sentences_stats('id_networks', t)
return json.dumps({'status' : 'OK', 'vId' : vId, 'd' : d, 'n' : n});
@app.route("/get_title", methods=["POST"])
def home_get_title():
opener = urllib2.build_opener()
html = opener.open(trape.url_to_clone).read()
html = html[html.find('<title>') + 7 : html.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : html});
@app.route("/get_requests", methods=["POST"])
def home_get_requests():
d = db.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : d}); | xss | {
"code": [
" ('clean_online')",
" rows = db.sentences_stats('get_online')",
" d = db.sentences_stats('get_preview', vId)",
" n = db.sentences_stats('id_networks', vId)"
],
"line_no": [
50,
55,
63,
64
]
} | {
"code": [
" vId = ('online', )",
" rows = db.sentences_stats('get_online', vId)",
" t = (vId,)",
" d = db.sentences_stats('get_preview', t)",
" n = db.sentences_stats('id_networks', t)"
],
"line_no": [
54,
55,
63,
64,
65
]
} |
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
VAR_0 = Trape()
VAR_1 = Flask(__name__, template_folder='../templates', static_folder='../static')
VAR_2 = Database()
VAR_0.header()
@VAR_1.route("/" + VAR_0.stats_path)
def FUNC_0():
return render_template("/FUNC_2.html")
@VAR_1.route("/logout")
def FUNC_1():
return render_template("/FUNC_2.html")
@VAR_1.route("/login", methods=["POST"])
def FUNC_2():
VAR_3 = request.form['id']
if VAR_3 == VAR_0.stats_key:
return json.dumps({'status':'OK', 'path' : VAR_0.home_path, 'victim_path' : VAR_0.victim_path, 'url_to_clone' : VAR_0.url_to_clone, 'app_port' : VAR_0.app_port, 'date_start' : VAR_0.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@VAR_1.route("/get_data", methods=["POST"])
def FUNC_3():
VAR_4 = VAR_2.sentences_stats('get_data')
VAR_5 = VAR_2.sentences_stats('all_networks')
('clean_online')
VAR_6 = VAR_2.sentences_stats('get_clicks')
VAR_7 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_sessions')
VAR_8 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_online')
VAR_9 = VAR_6[0][0]
return json.dumps({'status' : 'OK', 'd' : VAR_4, 'n' : VAR_5, 'c' : VAR_7, 's' : VAR_8, 'o' : VAR_9});
@VAR_1.route("/get_preview", methods=["POST"])
def FUNC_4():
VAR_10 = request.form['vId']
VAR_4 = VAR_2.sentences_stats('get_preview', VAR_10)
VAR_5 = VAR_2.sentences_stats('id_networks', VAR_10)
return json.dumps({'status' : 'OK', 'vId' : VAR_10, 'd' : VAR_4, 'n' : VAR_5});
@VAR_1.route("/get_title", methods=["POST"])
def FUNC_5():
VAR_11 = urllib2.build_opener()
VAR_12 = VAR_11.open(VAR_0.url_to_clone).read()
VAR_12 = html[VAR_12.find('<title>') + 7 : VAR_12.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : VAR_12});
@VAR_1.route("/get_requests", methods=["POST"])
def FUNC_6():
VAR_4 = VAR_2.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : VAR_4}); |
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
VAR_0 = Trape()
VAR_1 = Flask(__name__, template_folder='../templates', static_folder='../static')
VAR_2 = Database()
VAR_0.header()
@VAR_1.route("/" + VAR_0.stats_path)
def FUNC_0():
return render_template("/FUNC_2.html")
@VAR_1.route("/logout")
def FUNC_1():
return render_template("/FUNC_2.html")
@VAR_1.route("/login", methods=["POST"])
def FUNC_2():
VAR_3 = request.form['id']
if VAR_3 == VAR_0.stats_key:
return json.dumps({'status':'OK', 'path' : VAR_0.home_path, 'victim_path' : VAR_0.victim_path, 'url_to_clone' : VAR_0.url_to_clone, 'app_port' : VAR_0.app_port, 'date_start' : VAR_0.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@VAR_1.route("/get_data", methods=["POST"])
def FUNC_3():
VAR_4 = VAR_2.sentences_stats('get_data')
VAR_5 = VAR_2.sentences_stats('all_networks')
VAR_6 = VAR_2.sentences_stats('get_clicks')
VAR_7 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_sessions')
VAR_8 = VAR_6[0][0]
VAR_9 = ('online', )
VAR_6 = VAR_2.sentences_stats('get_online', VAR_9)
VAR_10 = VAR_6[0][0]
return json.dumps({'status' : 'OK', 'd' : VAR_4, 'n' : VAR_5, 'c' : VAR_7, 's' : VAR_8, 'o' : VAR_10});
@VAR_1.route("/get_preview", methods=["POST"])
def FUNC_4():
VAR_9 = request.form['vId']
VAR_11 = (VAR_9,)
VAR_4 = VAR_2.sentences_stats('get_preview', VAR_11)
VAR_5 = VAR_2.sentences_stats('id_networks', VAR_11)
return json.dumps({'status' : 'OK', 'vId' : VAR_9, 'd' : VAR_4, 'n' : VAR_5});
@VAR_1.route("/get_title", methods=["POST"])
def FUNC_5():
VAR_12 = urllib2.build_opener()
VAR_13 = VAR_12.open(VAR_0.url_to_clone).read()
VAR_13 = html[VAR_13.find('<title>') + 7 : VAR_13.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : VAR_13});
@VAR_1.route("/get_requests", methods=["POST"])
def FUNC_6():
VAR_4 = VAR_2.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : VAR_4}); | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
19,
22,
23,
25,
26,
28,
32,
36,
44,
49,
57,
59,
66,
73,
77
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
19,
22,
23,
25,
26,
28,
32,
36,
44,
49,
57,
59,
67,
74,
78
] |
0CWE-22
| # pylint: disable=R1732
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class DataFileRequester:
def __init__(self, type=None, fallback=None):
self.type = type
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for DataFileRequester.type, got '{type}'.")
# fallback is another requester, used if the main one fails.
self.fallback = fallback
def fetch(self, *segs, **kwargs):
str = kwargs.get("str", False)
okayToFail = kwargs.get("okayToFail", False)
fileType = kwargs.get("type", self.type)
location = self._buildPath(segs=segs, fileType=fileType)
try:
if str:
with open(location, encoding="utf-8") as fh:
return fh.read()
else:
return open(location, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*segs, str=str, okayToFail=okayToFail)
except OSError:
return self._fail(location, str, okayToFail)
return self._fail(location, str, okayToFail)
def walkFiles(self, *segs, **kwargs):
fileType = kwargs.get("type", self.type)
for _, _, files in os.walk(self._buildPath(segs, fileType=fileType)):
yield from files
def _buildPath(self, segs, fileType=None):
if fileType is None:
fileType = self.type
if fileType == "readonly":
return scriptPath("spec-data", "readonly", *segs)
else:
return scriptPath("spec-data", *segs)
def _fail(self, location, str, okayToFail):
if okayToFail:
if str:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{location}'")
defaultRequester = DataFileRequester(
type="latest", fallback=DataFileRequester(type="readonly")
)
def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True):
# Looks in three or four locations, in order:
# the folder the spec source is in, the group's boilerplate folder, the megagroup's boilerplate folder, and the generic boilerplate folder.
# In each location, it first looks for the file specialized on status, and then for the generic file.
# Filenames must be of the format NAME.include or NAME-STATUS.include
if group is None and doc.md.group is not None:
group = doc.md.group.lower()
if status is None:
if doc.md.status is not None:
status = doc.md.status
elif doc.md.rawStatus is not None:
status = doc.md.rawStatus
megaGroup, status = splitStatus(status)
searchLocally = doc.md.localBoilerplate[name]
def boilerplatePath(*segs):
return scriptPath("boilerplate", *segs)
statusFile = f"{name}-{status}.include"
genericFile = f"{name}.include"
sources = []
if searchLocally:
sources.append(doc.inputSource.relative(statusFile)) # Can be None.
sources.append(doc.inputSource.relative(genericFile))
else:
for f in (statusFile, genericFile):
if doc.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, name)
)
# We should remove this after giving specs time to react to the warning:
sources.append(doc.inputSource.relative(f))
if group:
sources.append(InputSource(boilerplatePath(group, statusFile)))
sources.append(InputSource(boilerplatePath(group, genericFile)))
if megaGroup:
sources.append(InputSource(boilerplatePath(megaGroup, statusFile)))
sources.append(InputSource(boilerplatePath(megaGroup, genericFile)))
sources.append(InputSource(boilerplatePath(statusFile)))
sources.append(InputSource(boilerplatePath(genericFile)))
# Watch all the possible sources, not just the one that got used, because if
# an earlier one appears, we want to rebuild.
doc.recordDependencies(*sources)
for source in sources:
if source is not None:
try:
return source.read().content
except OSError:
# That input doesn't exist.
pass
else:
if error:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given group='{1}' and status='{2}'.",
name,
group,
status,
)
return ""
| # pylint: disable=R1732
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class DataFileRequester:
def __init__(self, type=None, fallback=None):
self.type = type
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for DataFileRequester.type, got '{type}'.")
# fallback is another requester, used if the main one fails.
self.fallback = fallback
def fetch(self, *segs, **kwargs):
str = kwargs.get("str", False)
okayToFail = kwargs.get("okayToFail", False)
fileType = kwargs.get("type", self.type)
location = self._buildPath(segs=segs, fileType=fileType)
try:
if str:
with open(location, encoding="utf-8") as fh:
return fh.read()
else:
return open(location, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*segs, str=str, okayToFail=okayToFail)
except OSError:
return self._fail(location, str, okayToFail)
return self._fail(location, str, okayToFail)
def walkFiles(self, *segs, **kwargs):
fileType = kwargs.get("type", self.type)
for _, _, files in os.walk(self._buildPath(segs, fileType=fileType)):
yield from files
def _buildPath(self, segs, fileType=None):
if fileType is None:
fileType = self.type
if fileType == "readonly":
return scriptPath("spec-data", "readonly", *segs)
else:
return scriptPath("spec-data", *segs)
def _fail(self, location, str, okayToFail):
if okayToFail:
if str:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{location}'")
defaultRequester = DataFileRequester(
type="latest", fallback=DataFileRequester(type="readonly")
)
def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True, allowLocal=True):
# Looks in three or four locations, in order:
# the folder the spec source is in, the group's boilerplate folder, the megagroup's boilerplate folder, and the generic boilerplate folder.
# In each location, it first looks for the file specialized on status, and then for the generic file.
# Filenames must be of the format NAME.include or NAME-STATUS.include
if group is None and doc.md.group is not None:
group = doc.md.group.lower()
if status is None:
if doc.md.status is not None:
status = doc.md.status
elif doc.md.rawStatus is not None:
status = doc.md.rawStatus
megaGroup, status = splitStatus(status)
searchLocally = allowLocal and doc.md.localBoilerplate[name]
def boilerplatePath(*segs):
return scriptPath("boilerplate", *segs)
statusFile = f"{name}-{status}.include"
genericFile = f"{name}.include"
sources = []
if searchLocally:
sources.append(doc.inputSource.relative(statusFile)) # Can be None.
sources.append(doc.inputSource.relative(genericFile))
else:
for f in (statusFile, genericFile):
if doc.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, name)
)
# We should remove this after giving specs time to react to the warning:
sources.append(doc.inputSource.relative(f))
if group:
sources.append(InputSource(boilerplatePath(group, statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(group, genericFile), chroot=False))
if megaGroup:
sources.append(InputSource(boilerplatePath(megaGroup, statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(megaGroup, genericFile), chroot=False))
sources.append(InputSource(boilerplatePath(statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(genericFile), chroot=False))
# Watch all the possible sources, not just the one that got used, because if
# an earlier one appears, we want to rebuild.
doc.recordDependencies(*sources)
for source in sources:
if source is not None:
try:
return source.read().content
except OSError:
# That input doesn't exist.
pass
else:
if error:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given group='{1}' and status='{2}'.",
name,
group,
status,
)
return ""
| path_disclosure | {
"code": [
"def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True):",
" searchLocally = doc.md.localBoilerplate[name]",
" sources.append(InputSource(boilerplatePath(group, statusFile)))",
" sources.append(InputSource(boilerplatePath(group, genericFile)))",
" sources.append(InputSource(boilerplatePath(megaGroup, statusFile)))",
" sources.append(InputSource(boilerplatePath(megaGroup, genericFile)))",
" sources.append(InputSource(boilerplatePath(statusFile)))",
" sources.append(InputSource(boilerplatePath(genericFile)))"
],
"line_no": [
66,
80,
104,
105,
107,
108,
109,
110
]
} | {
"code": [
"def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True, allowLocal=True):",
" searchLocally = allowLocal and doc.md.localBoilerplate[name]",
" sources.append(InputSource(boilerplatePath(group, statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(group, genericFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(megaGroup, statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(megaGroup, genericFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(genericFile), chroot=False))"
],
"line_no": [
66,
80,
104,
105,
107,
108,
109,
110
]
} |
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class CLASS_0:
def __init__(self, VAR_6=None, VAR_7=None):
self.type = VAR_6
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for CLASS_0.type, got '{VAR_6}'.")
self.fallback = VAR_7
def FUNC_1(self, *VAR_8, **VAR_9):
VAR_12 = VAR_9.get("str", False)
VAR_13 = VAR_9.get("okayToFail", False)
VAR_10 = VAR_9.get("type", self.type)
VAR_11 = self._buildPath(VAR_8=segs, VAR_10=fileType)
try:
if VAR_12:
with open(VAR_11, encoding="utf-8") as fh:
return fh.read()
else:
return open(VAR_11, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*VAR_8, VAR_12=str, VAR_13=okayToFail)
except OSError:
return self._fail(VAR_11, VAR_12, VAR_13)
return self._fail(VAR_11, VAR_12, VAR_13)
def FUNC_2(self, *VAR_8, **VAR_9):
VAR_10 = VAR_9.get("type", self.type)
for _, _, files in os.walk(self._buildPath(VAR_8, VAR_10=fileType)):
yield from files
def FUNC_3(self, VAR_8, VAR_10=None):
if VAR_10 is None:
VAR_10 = self.type
if VAR_10 == "readonly":
return scriptPath("spec-data", "readonly", *VAR_8)
else:
return scriptPath("spec-data", *VAR_8)
def FUNC_4(self, VAR_11, VAR_12, VAR_13):
if VAR_13:
if VAR_12:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{VAR_11}'")
VAR_0 = CLASS_0(
VAR_6="latest", VAR_7=CLASS_0(VAR_6="readonly")
)
def FUNC_0(VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=True):
if VAR_3 is None and VAR_1.md.group is not None:
VAR_3 = VAR_1.md.group.lower()
if VAR_4 is None:
if VAR_1.md.status is not None:
VAR_4 = VAR_1.md.status
elif VAR_1.md.rawStatus is not None:
VAR_4 = VAR_1.md.rawStatus
VAR_14, VAR_4 = splitStatus(VAR_4)
VAR_15 = VAR_1.md.localBoilerplate[VAR_2]
def FUNC_5(*VAR_8):
return scriptPath("boilerplate", *VAR_8)
VAR_16 = f"{VAR_2}-{VAR_4}.include"
VAR_17 = f"{VAR_2}.include"
VAR_18 = []
if VAR_15:
VAR_18.append(VAR_1.inputSource.relative(VAR_16)) # Can be None.
VAR_18.append(VAR_1.inputSource.relative(VAR_17))
else:
for f in (VAR_16, VAR_17):
if VAR_1.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, VAR_2)
)
VAR_18.append(VAR_1.inputSource.relative(f))
if VAR_3:
VAR_18.append(InputSource(FUNC_5(VAR_3, VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_3, VAR_17)))
if VAR_14:
VAR_18.append(InputSource(FUNC_5(VAR_14, VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_14, VAR_17)))
VAR_18.append(InputSource(FUNC_5(VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_17)))
VAR_1.recordDependencies(*VAR_18)
for source in VAR_18:
if source is not None:
try:
return source.read().content
except OSError:
pass
else:
if VAR_5:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given VAR_3='{1}' and VAR_4='{2}'.",
VAR_2,
VAR_3,
VAR_4,
)
return ""
|
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class CLASS_0:
def __init__(self, VAR_7=None, VAR_8=None):
self.type = VAR_7
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for CLASS_0.type, got '{VAR_7}'.")
self.fallback = VAR_8
def FUNC_1(self, *VAR_9, **VAR_10):
VAR_13 = VAR_10.get("str", False)
VAR_14 = VAR_10.get("okayToFail", False)
VAR_11 = VAR_10.get("type", self.type)
VAR_12 = self._buildPath(VAR_9=segs, VAR_11=fileType)
try:
if VAR_13:
with open(VAR_12, encoding="utf-8") as fh:
return fh.read()
else:
return open(VAR_12, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*VAR_9, VAR_13=str, VAR_14=okayToFail)
except OSError:
return self._fail(VAR_12, VAR_13, VAR_14)
return self._fail(VAR_12, VAR_13, VAR_14)
def FUNC_2(self, *VAR_9, **VAR_10):
VAR_11 = VAR_10.get("type", self.type)
for _, _, files in os.walk(self._buildPath(VAR_9, VAR_11=fileType)):
yield from files
def FUNC_3(self, VAR_9, VAR_11=None):
if VAR_11 is None:
VAR_11 = self.type
if VAR_11 == "readonly":
return scriptPath("spec-data", "readonly", *VAR_9)
else:
return scriptPath("spec-data", *VAR_9)
def FUNC_4(self, VAR_12, VAR_13, VAR_14):
if VAR_14:
if VAR_13:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{VAR_12}'")
VAR_0 = CLASS_0(
VAR_7="latest", VAR_8=CLASS_0(VAR_7="readonly")
)
def FUNC_0(VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=True, VAR_6=True):
if VAR_3 is None and VAR_1.md.group is not None:
VAR_3 = VAR_1.md.group.lower()
if VAR_4 is None:
if VAR_1.md.status is not None:
VAR_4 = VAR_1.md.status
elif VAR_1.md.rawStatus is not None:
VAR_4 = VAR_1.md.rawStatus
VAR_15, VAR_4 = splitStatus(VAR_4)
VAR_16 = VAR_6 and VAR_1.md.localBoilerplate[VAR_2]
def FUNC_5(*VAR_9):
return scriptPath("boilerplate", *VAR_9)
VAR_17 = f"{VAR_2}-{VAR_4}.include"
VAR_18 = f"{VAR_2}.include"
VAR_19 = []
if VAR_16:
VAR_19.append(VAR_1.inputSource.relative(VAR_17)) # Can be None.
VAR_19.append(VAR_1.inputSource.relative(VAR_18))
else:
for f in (VAR_17, VAR_18):
if VAR_1.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, VAR_2)
)
VAR_19.append(VAR_1.inputSource.relative(f))
if VAR_3:
VAR_19.append(InputSource(FUNC_5(VAR_3, VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_3, VAR_18), chroot=False))
if VAR_15:
VAR_19.append(InputSource(FUNC_5(VAR_15, VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_15, VAR_18), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_18), chroot=False))
VAR_1.recordDependencies(*VAR_19)
for source in VAR_19:
if source is not None:
try:
return source.read().content
except OSError:
pass
else:
if VAR_5:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given VAR_3='{1}' and VAR_4='{2}'.",
VAR_2,
VAR_3,
VAR_4,
)
return ""
| [
1,
2,
5,
10,
11,
17,
19,
38,
43,
51,
59,
60,
64,
65,
67,
68,
69,
70,
79,
81,
84,
101,
111,
112,
113,
115,
121,
132
] | [
1,
2,
5,
10,
11,
17,
19,
38,
43,
51,
59,
60,
64,
65,
67,
68,
69,
70,
79,
81,
84,
101,
111,
112,
113,
115,
121,
132
] |
0CWE-22
| # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
LOGGER = get_logger(__name__)
class MarshallComponentException(StreamlitAPIException):
"""Class for exceptions generated during custom component marshalling."""
pass
class CustomComponent:
"""A Custom Component declaration."""
def __init__(
self,
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
):
if (path is None and url is None) or (path is not None and url is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = name
self.path = path
self.url = url
def __repr__(self) -> str:
return util.repr_(self)
@property
def abspath(self) -> Optional[str]:
"""The absolute path that the component is served from."""
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""An alias for create_instance."""
return self.create_instance(*args, default=default, key=key, **kwargs)
def create_instance(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""Create a new instance of the component.
Parameters
----------
*args
Must be empty; all args must be named. (This parameter exists to
enforce correct use of the function.)
default: any or None
The default return value for the component. This is returned when
the component's frontend hasn't yet specified a value with
`setComponentValue`.
key: str or None
If not None, this is the user key we use to generate the
component's "widget ID".
**kwargs
Keyword args to pass to the component.
Returns
-------
any or None
The component's widget value.
"""
if len(args) > 0:
raise MarshallComponentException(f"Argument '{args[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
# In addition to the custom kwargs passed to the component, we also
# send the special 'default' and 'key' params to the component
# frontend.
all_args = dict(kwargs, **{"default": default, "key": key})
json_args = {}
special_args = []
for arg_name, arg_val in all_args.items():
if type_util.is_bytes_like(arg_val):
bytes_arg = SpecialArg()
bytes_arg.key = arg_name
bytes_arg.bytes = to_bytes(arg_val)
special_args.append(bytes_arg)
elif type_util.is_dataframe_like(arg_val):
dataframe_arg = SpecialArg()
dataframe_arg.key = arg_name
component_arrow.marshall(dataframe_arg.arrow_dataframe.data, arg_val)
special_args.append(dataframe_arg)
else:
json_args[arg_name] = arg_val
try:
serialized_json_args = json.dumps(json_args)
except BaseException as e:
raise MarshallComponentException(
"Could not convert component args to JSON", e
)
def marshall_component(dg, element: Element) -> Union[Any, Type[NoValue]]:
element.component_instance.component_name = self.name
element.component_instance.form_id = current_form_id(dg)
if self.url is not None:
element.component_instance.url = self.url
# Normally, a widget's element_hash (which determines
# its identity across multiple runs of an app) is computed
# by hashing the entirety of its protobuf. This means that,
# if any of the arguments to the widget are changed, Streamlit
# considers it a new widget instance and it loses its previous
# state.
#
# However! If a *component* has a `key` argument, then the
# component's hash identity is determined by entirely by
# `component_name + url + key`. This means that, when `key`
# exists, the component will maintain its identity even when its
# other arguments change, and the component's iframe won't be
# remounted on the frontend.
#
# So: if `key` is None, we marshall the element's arguments
# *before* computing its widget_ui_value (which creates its hash).
# If `key` is not None, we marshall the arguments *after*.
def marshall_element_args():
element.component_instance.json_args = serialized_json_args
element.component_instance.special_args.extend(special_args)
if key is None:
marshall_element_args()
def deserialize_component(ui_value, widget_id=""):
# ui_value is an object from json, an ArrowTable proto, or a bytearray
return ui_value
ctx = get_script_run_ctx()
component_state = register_widget(
element_type="component_instance",
element_proto=element.component_instance,
user_key=key,
widget_func_name=self.name,
deserializer=deserialize_component,
serializer=lambda x: x,
ctx=ctx,
)
widget_value = component_state.value
if key is not None:
marshall_element_args()
if widget_value is None:
widget_value = default
elif isinstance(widget_value, ArrowTableProto):
widget_value = component_arrow.arrow_proto_to_dataframe(widget_value)
# widget_value will be either None or whatever the component's most
# recent setWidgetValue value is. We coerce None -> NoValue,
# because that's what DeltaGenerator._enqueue expects.
return widget_value if widget_value is not None else NoValue
# We currently only support writing to st._main, but this will change
# when we settle on an improved API in a post-layout world.
dg = streamlit._main
element = Element()
return_value = marshall_component(dg, element)
result = dg._enqueue(
"component_instance", element.component_instance, return_value
)
return result
def __eq__(self, other) -> bool:
"""Equality operator."""
return (
isinstance(other, CustomComponent)
and self.name == other.name
and self.path == other.path
and self.url == other.url
)
def __ne__(self, other) -> bool:
"""Inequality operator."""
return not self == other
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def declare_component(
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
) -> CustomComponent:
"""Create and register a custom component.
Parameters
----------
name: str
A short, descriptive name for the component. Like, "slider".
path: str or None
The path to serve the component's frontend files from. Either
`path` or `url` must be specified, but not both.
url: str or None
The URL that the component is served from. Either `path` or `url`
must be specified, but not both.
Returns
-------
CustomComponent
A CustomComponent that can be called like a function.
Calling the component will create a new instance of the component
in the Streamlit app.
"""
# Get our stack frame.
current_frame = inspect.currentframe()
assert current_frame is not None
# Get the stack frame of our calling function.
caller_frame = current_frame.f_back
assert caller_frame is not None
# Get the caller's module name. `__name__` gives us the module's
# fully-qualified name, which includes its package.
module = inspect.getmodule(caller_frame)
assert module is not None
module_name = module.__name__
# If the caller was the main module that was executed (that is, if the
# user executed `python my_component.py`), then this name will be
# "__main__" instead of the actual package name. In this case, we use
# the main module's filename, sans `.py` extension, as the component name.
if module_name == "__main__":
file_path = inspect.getfile(caller_frame)
filename = os.path.basename(file_path)
module_name, _ = os.path.splitext(filename)
# Build the component name.
component_name = f"{module_name}.{name}"
# Create our component object, and register it.
component = CustomComponent(name=component_name, path=path, url=url)
ComponentRegistry.instance().register_component(component)
return component
class ComponentRequestHandler(tornado.web.RequestHandler):
def initialize(self, registry: "ComponentRegistry"):
self._registry = registry
def get(self, path: str) -> None:
parts = path.split("/")
component_name = parts[0]
component_root = self._registry.get_component_path(component_name)
if component_root is None:
self.write("not found")
self.set_status(404)
return
filename = "/".join(parts[1:])
abspath = os.path.join(component_root, filename)
LOGGER.debug("ComponentRequestHandler: GET: %s -> %s", path, abspath)
try:
with open(abspath, "rb") as file:
contents = file.read()
except (OSError) as e:
LOGGER.error(f"ComponentRequestHandler: GET {path} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(contents)
self.set_header("Content-Type", self.get_content_type(abspath))
self.set_extra_headers(path)
def set_extra_headers(self, path) -> None:
"""Disable cache for HTML files.
Other assets like JS and CSS are suffixed with their hash, so they can
be cached indefinitely.
"""
is_index_url = len(path) == 0
if is_index_url or path.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def set_default_headers(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def options(self) -> None:
"""/OPTIONS handler for preflight CORS checks."""
self.set_status(204)
self.finish()
@staticmethod
def get_content_type(abspath) -> str:
"""Returns the ``Content-Type`` header to be used for this request.
From tornado.web.StaticFileHandler.
"""
mime_type, encoding = mimetypes.guess_type(abspath)
# per RFC 6713, use the appropriate type for a gzip compressed file
if encoding == "gzip":
return "application/gzip"
# As of 2015-07-21 there is no bzip2 encoding defined at
# http://www.iana.org/assignments/media-types/media-types.xhtml
# So for that (and any other encoding), use octet-stream.
elif encoding is not None:
return "application/octet-stream"
elif mime_type is not None:
return mime_type
# if mime_type not detected, use application/octet-stream
else:
return "application/octet-stream"
@staticmethod
def get_url(file_id: str) -> str:
"""Return the URL for a component file with the given ID."""
return "components/{}".format(file_id)
class ComponentRegistry:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def instance(cls) -> "ComponentRegistry":
"""Returns the singleton ComponentRegistry"""
# We use a double-checked locking optimization to avoid the overhead
# of acquiring the lock in the common case:
# https://en.wikipedia.org/wiki/Double-checked_locking
if cls._instance is None:
with cls._instance_lock:
if cls._instance is None:
cls._instance = ComponentRegistry()
return cls._instance
def __init__(self):
self._components = {} # type: Dict[str, CustomComponent]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def register_component(self, component: CustomComponent) -> None:
"""Register a CustomComponent.
Parameters
----------
component : CustomComponent
The component to register.
"""
# Validate the component's path
abspath = component.abspath
if abspath is not None and not os.path.isdir(abspath):
raise StreamlitAPIException(f"No such component directory: '{abspath}'")
with self._lock:
existing = self._components.get(component.name)
self._components[component.name] = component
if existing is not None and component != existing:
LOGGER.warning(
"%s overriding previously-registered %s",
component,
existing,
)
LOGGER.debug("Registered component %s", component)
def get_component_path(self, name: str) -> Optional[str]:
"""Return the filesystem path for the component with the given name.
If no such component is registered, or if the component exists but is
being served from a URL, return None instead.
"""
component = self._components.get(name, None)
return component.abspath if component is not None else None
| # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
LOGGER = get_logger(__name__)
class MarshallComponentException(StreamlitAPIException):
"""Class for exceptions generated during custom component marshalling."""
pass
class CustomComponent:
"""A Custom Component declaration."""
def __init__(
self,
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
):
if (path is None and url is None) or (path is not None and url is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = name
self.path = path
self.url = url
def __repr__(self) -> str:
return util.repr_(self)
@property
def abspath(self) -> Optional[str]:
"""The absolute path that the component is served from."""
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""An alias for create_instance."""
return self.create_instance(*args, default=default, key=key, **kwargs)
def create_instance(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""Create a new instance of the component.
Parameters
----------
*args
Must be empty; all args must be named. (This parameter exists to
enforce correct use of the function.)
default: any or None
The default return value for the component. This is returned when
the component's frontend hasn't yet specified a value with
`setComponentValue`.
key: str or None
If not None, this is the user key we use to generate the
component's "widget ID".
**kwargs
Keyword args to pass to the component.
Returns
-------
any or None
The component's widget value.
"""
if len(args) > 0:
raise MarshallComponentException(f"Argument '{args[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
# In addition to the custom kwargs passed to the component, we also
# send the special 'default' and 'key' params to the component
# frontend.
all_args = dict(kwargs, **{"default": default, "key": key})
json_args = {}
special_args = []
for arg_name, arg_val in all_args.items():
if type_util.is_bytes_like(arg_val):
bytes_arg = SpecialArg()
bytes_arg.key = arg_name
bytes_arg.bytes = to_bytes(arg_val)
special_args.append(bytes_arg)
elif type_util.is_dataframe_like(arg_val):
dataframe_arg = SpecialArg()
dataframe_arg.key = arg_name
component_arrow.marshall(dataframe_arg.arrow_dataframe.data, arg_val)
special_args.append(dataframe_arg)
else:
json_args[arg_name] = arg_val
try:
serialized_json_args = json.dumps(json_args)
except BaseException as e:
raise MarshallComponentException(
"Could not convert component args to JSON", e
)
def marshall_component(dg, element: Element) -> Union[Any, Type[NoValue]]:
element.component_instance.component_name = self.name
element.component_instance.form_id = current_form_id(dg)
if self.url is not None:
element.component_instance.url = self.url
# Normally, a widget's element_hash (which determines
# its identity across multiple runs of an app) is computed
# by hashing the entirety of its protobuf. This means that,
# if any of the arguments to the widget are changed, Streamlit
# considers it a new widget instance and it loses its previous
# state.
#
# However! If a *component* has a `key` argument, then the
# component's hash identity is determined by entirely by
# `component_name + url + key`. This means that, when `key`
# exists, the component will maintain its identity even when its
# other arguments change, and the component's iframe won't be
# remounted on the frontend.
#
# So: if `key` is None, we marshall the element's arguments
# *before* computing its widget_ui_value (which creates its hash).
# If `key` is not None, we marshall the arguments *after*.
def marshall_element_args():
element.component_instance.json_args = serialized_json_args
element.component_instance.special_args.extend(special_args)
if key is None:
marshall_element_args()
def deserialize_component(ui_value, widget_id=""):
# ui_value is an object from json, an ArrowTable proto, or a bytearray
return ui_value
ctx = get_script_run_ctx()
component_state = register_widget(
element_type="component_instance",
element_proto=element.component_instance,
user_key=key,
widget_func_name=self.name,
deserializer=deserialize_component,
serializer=lambda x: x,
ctx=ctx,
)
widget_value = component_state.value
if key is not None:
marshall_element_args()
if widget_value is None:
widget_value = default
elif isinstance(widget_value, ArrowTableProto):
widget_value = component_arrow.arrow_proto_to_dataframe(widget_value)
# widget_value will be either None or whatever the component's most
# recent setWidgetValue value is. We coerce None -> NoValue,
# because that's what DeltaGenerator._enqueue expects.
return widget_value if widget_value is not None else NoValue
# We currently only support writing to st._main, but this will change
# when we settle on an improved API in a post-layout world.
dg = streamlit._main
element = Element()
return_value = marshall_component(dg, element)
result = dg._enqueue(
"component_instance", element.component_instance, return_value
)
return result
def __eq__(self, other) -> bool:
"""Equality operator."""
return (
isinstance(other, CustomComponent)
and self.name == other.name
and self.path == other.path
and self.url == other.url
)
def __ne__(self, other) -> bool:
"""Inequality operator."""
return not self == other
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def declare_component(
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
) -> CustomComponent:
"""Create and register a custom component.
Parameters
----------
name: str
A short, descriptive name for the component. Like, "slider".
path: str or None
The path to serve the component's frontend files from. Either
`path` or `url` must be specified, but not both.
url: str or None
The URL that the component is served from. Either `path` or `url`
must be specified, but not both.
Returns
-------
CustomComponent
A CustomComponent that can be called like a function.
Calling the component will create a new instance of the component
in the Streamlit app.
"""
# Get our stack frame.
current_frame = inspect.currentframe()
assert current_frame is not None
# Get the stack frame of our calling function.
caller_frame = current_frame.f_back
assert caller_frame is not None
# Get the caller's module name. `__name__` gives us the module's
# fully-qualified name, which includes its package.
module = inspect.getmodule(caller_frame)
assert module is not None
module_name = module.__name__
# If the caller was the main module that was executed (that is, if the
# user executed `python my_component.py`), then this name will be
# "__main__" instead of the actual package name. In this case, we use
# the main module's filename, sans `.py` extension, as the component name.
if module_name == "__main__":
file_path = inspect.getfile(caller_frame)
filename = os.path.basename(file_path)
module_name, _ = os.path.splitext(filename)
# Build the component name.
component_name = f"{module_name}.{name}"
# Create our component object, and register it.
component = CustomComponent(name=component_name, path=path, url=url)
ComponentRegistry.instance().register_component(component)
return component
class ComponentRequestHandler(tornado.web.RequestHandler):
def initialize(self, registry: "ComponentRegistry"):
self._registry = registry
def get(self, path: str) -> None:
parts = path.split("/")
component_name = parts[0]
component_root = self._registry.get_component_path(component_name)
if component_root is None:
self.write("not found")
self.set_status(404)
return
# follow symlinks to get an accurate normalized path
component_root = os.path.realpath(component_root)
filename = "/".join(parts[1:])
abspath = os.path.realpath(os.path.join(component_root, filename))
# Do NOT expose anything outside of the component root.
if os.path.commonprefix([component_root, abspath]) != component_root:
self.write("forbidden")
self.set_status(403)
return
LOGGER.debug("ComponentRequestHandler: GET: %s -> %s", path, abspath)
try:
with open(abspath, "rb") as file:
contents = file.read()
except (OSError) as e:
LOGGER.error(f"ComponentRequestHandler: GET {path} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(contents)
self.set_header("Content-Type", self.get_content_type(abspath))
self.set_extra_headers(path)
def set_extra_headers(self, path) -> None:
"""Disable cache for HTML files.
Other assets like JS and CSS are suffixed with their hash, so they can
be cached indefinitely.
"""
is_index_url = len(path) == 0
if is_index_url or path.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def set_default_headers(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def options(self) -> None:
"""/OPTIONS handler for preflight CORS checks."""
self.set_status(204)
self.finish()
@staticmethod
def get_content_type(abspath) -> str:
"""Returns the ``Content-Type`` header to be used for this request.
From tornado.web.StaticFileHandler.
"""
mime_type, encoding = mimetypes.guess_type(abspath)
# per RFC 6713, use the appropriate type for a gzip compressed file
if encoding == "gzip":
return "application/gzip"
# As of 2015-07-21 there is no bzip2 encoding defined at
# http://www.iana.org/assignments/media-types/media-types.xhtml
# So for that (and any other encoding), use octet-stream.
elif encoding is not None:
return "application/octet-stream"
elif mime_type is not None:
return mime_type
# if mime_type not detected, use application/octet-stream
else:
return "application/octet-stream"
@staticmethod
def get_url(file_id: str) -> str:
"""Return the URL for a component file with the given ID."""
return "components/{}".format(file_id)
class ComponentRegistry:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def instance(cls) -> "ComponentRegistry":
"""Returns the singleton ComponentRegistry"""
# We use a double-checked locking optimization to avoid the overhead
# of acquiring the lock in the common case:
# https://en.wikipedia.org/wiki/Double-checked_locking
if cls._instance is None:
with cls._instance_lock:
if cls._instance is None:
cls._instance = ComponentRegistry()
return cls._instance
def __init__(self):
self._components = {} # type: Dict[str, CustomComponent]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def register_component(self, component: CustomComponent) -> None:
"""Register a CustomComponent.
Parameters
----------
component : CustomComponent
The component to register.
"""
# Validate the component's path
abspath = component.abspath
if abspath is not None and not os.path.isdir(abspath):
raise StreamlitAPIException(f"No such component directory: '{abspath}'")
with self._lock:
existing = self._components.get(component.name)
self._components[component.name] = component
if existing is not None and component != existing:
LOGGER.warning(
"%s overriding previously-registered %s",
component,
existing,
)
LOGGER.debug("Registered component %s", component)
def get_component_path(self, name: str) -> Optional[str]:
"""Return the filesystem path for the component with the given name.
If no such component is registered, or if the component exists but is
being served from a URL, return None instead.
"""
component = self._components.get(name, None)
return component.abspath if component is not None else None
| path_disclosure | {
"code": [
" abspath = os.path.join(component_root, filename)"
],
"line_no": [
320
]
} | {
"code": [
" component_root = os.path.realpath(component_root)",
" abspath = os.path.realpath(os.path.join(component_root, filename))",
" if os.path.commonprefix([component_root, abspath]) != component_root:",
" self.write(\"forbidden\")",
" self.set_status(403)",
" return"
],
"line_no": [
320,
322,
325,
326,
327,
328
]
} |
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import .get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import .get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
VAR_0 = get_logger(__name__)
class CLASS_0(StreamlitAPIException):
pass
class CLASS_1:
def __init__(
self,
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
):
if (VAR_2 is None and VAR_3 is None) or (VAR_2 is not None and VAR_3 is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = VAR_1
self.path = VAR_2
self.url = VAR_3
def __repr__(self) -> str:
return util.repr_(self)
@property
def VAR_16(self) -> Optional[str]:
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
return self.create_instance(*VAR_4, VAR_5=default, VAR_6=key, **VAR_7)
def FUNC_2(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
if len(VAR_4) > 0:
raise CLASS_0(f"Argument '{VAR_4[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import .component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
VAR_19 = dict(VAR_7, **{"default": VAR_5, "key": VAR_6})
VAR_20 = {}
VAR_21 = []
for VAR_44, arg_val in VAR_19.items():
if type_util.is_bytes_like(arg_val):
VAR_41 = SpecialArg()
VAR_41.key = VAR_44
VAR_41.bytes = to_bytes(arg_val)
VAR_21.append(VAR_41)
elif type_util.is_dataframe_like(arg_val):
VAR_43 = SpecialArg()
VAR_43.key = VAR_44
component_arrow.marshall(VAR_43.arrow_dataframe.data, arg_val)
VAR_21.append(VAR_43)
else:
VAR_20[VAR_44] = arg_val
try:
VAR_34 = json.dumps(VAR_20)
except BaseException as e:
raise CLASS_0(
"Could not convert VAR_14 VAR_4 to JSON", e
)
def FUNC_13(VAR_22, VAR_23: Element) -> Union[Any, Type[NoValue]]:
VAR_23.component_instance.component_name = self.name
VAR_23.component_instance.form_id = current_form_id(VAR_22)
if self.url is not None:
VAR_23.component_instance.url = self.url
def FUNC_14():
VAR_23.component_instance.json_args = VAR_34
VAR_23.component_instance.special_args.extend(VAR_21)
if VAR_6 is None:
FUNC_14()
def FUNC_15(VAR_35, VAR_36=""):
return VAR_35
VAR_37 = get_script_run_ctx()
VAR_38 = register_widget(
element_type="component_instance",
element_proto=VAR_23.component_instance,
user_key=VAR_6,
widget_func_name=self.name,
deserializer=FUNC_15,
serializer=lambda x: x,
VAR_37=ctx,
)
VAR_39 = VAR_38.value
if VAR_6 is not None:
FUNC_14()
if VAR_39 is None:
VAR_39 = VAR_5
elif isinstance(VAR_39, ArrowTableProto):
VAR_39 = component_arrow.arrow_proto_to_dataframe(VAR_39)
return VAR_39 if VAR_39 is not None else NoValue
VAR_22 = streamlit._main
VAR_23 = Element()
VAR_24 = FUNC_13(VAR_22, VAR_23)
VAR_25 = VAR_22._enqueue(
"component_instance", VAR_23.component_instance, VAR_24
)
return VAR_25
def __eq__(self, VAR_8) -> bool:
return (
isinstance(VAR_8, CLASS_1)
and self.name == VAR_8.name
and self.path == VAR_8.path
and self.url == VAR_8.url
)
def __ne__(self, VAR_8) -> bool:
return not self == VAR_8
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def FUNC_0(
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
) -> CLASS_1:
VAR_9 = inspect.currentframe()
assert VAR_9 is not None
VAR_10 = VAR_9.f_back
assert VAR_10 is not None
VAR_11 = inspect.getmodule(VAR_10)
assert VAR_11 is not None
VAR_12 = VAR_11.__name__
if VAR_12 == "__main__":
VAR_26 = inspect.getfile(VAR_10)
VAR_27 = os.path.basename(VAR_26)
VAR_12, VAR_28 = os.path.splitext(VAR_27)
VAR_13 = f"{VAR_12}.{VAR_1}"
VAR_14 = CLASS_1(VAR_1=VAR_13, VAR_2=path, VAR_3=url)
CLASS_3.instance().register_component(VAR_14)
return VAR_14
class CLASS_2(tornado.web.RequestHandler):
def FUNC_3(self, VAR_15: "ComponentRegistry"):
self._registry = VAR_15
def FUNC_4(self, VAR_2: str) -> None:
VAR_29 = VAR_2.split("/")
VAR_13 = VAR_29[0]
VAR_30 = self._registry.get_component_path(VAR_13)
if VAR_30 is None:
self.write("not found")
self.set_status(404)
return
VAR_27 = "/".join(VAR_29[1:])
VAR_16 = os.path.join(VAR_30, VAR_27)
VAR_0.debug("ComponentRequestHandler: GET: %s -> %s", VAR_2, VAR_16)
try:
with open(VAR_16, "rb") as file:
VAR_42 = file.read()
except (OSError) as e:
VAR_0.error(f"ComponentRequestHandler: GET {VAR_2} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(VAR_42)
self.set_header("Content-Type", self.get_content_type(VAR_16))
self.set_extra_headers(VAR_2)
def FUNC_5(self, VAR_2) -> None:
VAR_31 = len(VAR_2) == 0
if VAR_31 or VAR_2.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def FUNC_6(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def FUNC_7(self) -> None:
self.set_status(204)
self.finish()
@staticmethod
def FUNC_8(VAR_16) -> str:
VAR_32, VAR_33 = mimetypes.guess_type(VAR_16)
if VAR_33 == "gzip":
return "application/gzip"
elif VAR_33 is not None:
return "application/octet-stream"
elif VAR_32 is not None:
return VAR_32
else:
return "application/octet-stream"
@staticmethod
def FUNC_9(VAR_17: str) -> str:
return "components/{}".format(VAR_17)
class CLASS_3:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def FUNC_10(VAR_18) -> "ComponentRegistry":
if VAR_18._instance is None:
with VAR_18._instance_lock:
if VAR_18._instance is None:
VAR_18._instance = CLASS_3()
return VAR_18._instance
def __init__(self):
self._components = {} # type: Dict[str, CLASS_1]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def FUNC_11(self, VAR_14: CLASS_1) -> None:
VAR_16 = VAR_14.abspath
if VAR_16 is not None and not os.path.isdir(VAR_16):
raise StreamlitAPIException(f"No such VAR_14 directory: '{VAR_16}'")
with self._lock:
VAR_40 = self._components.get(VAR_14.name)
self._components[VAR_14.name] = VAR_14
if VAR_40 is not None and VAR_14 != VAR_40:
VAR_0.warning(
"%s overriding previously-registered %s",
VAR_14,
VAR_40,
)
VAR_0.debug("Registered VAR_14 %s", VAR_14)
def FUNC_12(self, VAR_1: str) -> Optional[str]:
VAR_14 = self._components.get(VAR_1, None)
return VAR_14.abspath if VAR_14 is not None else None
|
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import .get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import .get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
VAR_0 = get_logger(__name__)
class CLASS_0(StreamlitAPIException):
pass
class CLASS_1:
def __init__(
self,
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
):
if (VAR_2 is None and VAR_3 is None) or (VAR_2 is not None and VAR_3 is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = VAR_1
self.path = VAR_2
self.url = VAR_3
def __repr__(self) -> str:
return util.repr_(self)
@property
def VAR_16(self) -> Optional[str]:
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
return self.create_instance(*VAR_4, VAR_5=default, VAR_6=key, **VAR_7)
def FUNC_2(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
if len(VAR_4) > 0:
raise CLASS_0(f"Argument '{VAR_4[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import .component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
VAR_19 = dict(VAR_7, **{"default": VAR_5, "key": VAR_6})
VAR_20 = {}
VAR_21 = []
for VAR_44, arg_val in VAR_19.items():
if type_util.is_bytes_like(arg_val):
VAR_41 = SpecialArg()
VAR_41.key = VAR_44
VAR_41.bytes = to_bytes(arg_val)
VAR_21.append(VAR_41)
elif type_util.is_dataframe_like(arg_val):
VAR_43 = SpecialArg()
VAR_43.key = VAR_44
component_arrow.marshall(VAR_43.arrow_dataframe.data, arg_val)
VAR_21.append(VAR_43)
else:
VAR_20[VAR_44] = arg_val
try:
VAR_34 = json.dumps(VAR_20)
except BaseException as e:
raise CLASS_0(
"Could not convert VAR_14 VAR_4 to JSON", e
)
def FUNC_13(VAR_22, VAR_23: Element) -> Union[Any, Type[NoValue]]:
VAR_23.component_instance.component_name = self.name
VAR_23.component_instance.form_id = current_form_id(VAR_22)
if self.url is not None:
VAR_23.component_instance.url = self.url
def FUNC_14():
VAR_23.component_instance.json_args = VAR_34
VAR_23.component_instance.special_args.extend(VAR_21)
if VAR_6 is None:
FUNC_14()
def FUNC_15(VAR_35, VAR_36=""):
return VAR_35
VAR_37 = get_script_run_ctx()
VAR_38 = register_widget(
element_type="component_instance",
element_proto=VAR_23.component_instance,
user_key=VAR_6,
widget_func_name=self.name,
deserializer=FUNC_15,
serializer=lambda x: x,
VAR_37=ctx,
)
VAR_39 = VAR_38.value
if VAR_6 is not None:
FUNC_14()
if VAR_39 is None:
VAR_39 = VAR_5
elif isinstance(VAR_39, ArrowTableProto):
VAR_39 = component_arrow.arrow_proto_to_dataframe(VAR_39)
return VAR_39 if VAR_39 is not None else NoValue
VAR_22 = streamlit._main
VAR_23 = Element()
VAR_24 = FUNC_13(VAR_22, VAR_23)
VAR_25 = VAR_22._enqueue(
"component_instance", VAR_23.component_instance, VAR_24
)
return VAR_25
def __eq__(self, VAR_8) -> bool:
return (
isinstance(VAR_8, CLASS_1)
and self.name == VAR_8.name
and self.path == VAR_8.path
and self.url == VAR_8.url
)
def __ne__(self, VAR_8) -> bool:
return not self == VAR_8
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def FUNC_0(
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
) -> CLASS_1:
VAR_9 = inspect.currentframe()
assert VAR_9 is not None
VAR_10 = VAR_9.f_back
assert VAR_10 is not None
VAR_11 = inspect.getmodule(VAR_10)
assert VAR_11 is not None
VAR_12 = VAR_11.__name__
if VAR_12 == "__main__":
VAR_26 = inspect.getfile(VAR_10)
VAR_27 = os.path.basename(VAR_26)
VAR_12, VAR_28 = os.path.splitext(VAR_27)
VAR_13 = f"{VAR_12}.{VAR_1}"
VAR_14 = CLASS_1(VAR_1=VAR_13, VAR_2=path, VAR_3=url)
CLASS_3.instance().register_component(VAR_14)
return VAR_14
class CLASS_2(tornado.web.RequestHandler):
def FUNC_3(self, VAR_15: "ComponentRegistry"):
self._registry = VAR_15
def FUNC_4(self, VAR_2: str) -> None:
VAR_29 = VAR_2.split("/")
VAR_13 = VAR_29[0]
VAR_30 = self._registry.get_component_path(VAR_13)
if VAR_30 is None:
self.write("not found")
self.set_status(404)
return
VAR_30 = os.path.realpath(VAR_30)
VAR_27 = "/".join(VAR_29[1:])
VAR_16 = os.path.realpath(os.path.join(VAR_30, VAR_27))
if os.path.commonprefix([VAR_30, VAR_16]) != VAR_30:
self.write("forbidden")
self.set_status(403)
return
VAR_0.debug("ComponentRequestHandler: GET: %s -> %s", VAR_2, VAR_16)
try:
with open(VAR_16, "rb") as file:
VAR_42 = file.read()
except (OSError) as e:
VAR_0.error(f"ComponentRequestHandler: GET {VAR_2} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(VAR_42)
self.set_header("Content-Type", self.get_content_type(VAR_16))
self.set_extra_headers(VAR_2)
def FUNC_5(self, VAR_2) -> None:
VAR_31 = len(VAR_2) == 0
if VAR_31 or VAR_2.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def FUNC_6(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def FUNC_7(self) -> None:
self.set_status(204)
self.finish()
@staticmethod
def FUNC_8(VAR_16) -> str:
VAR_32, VAR_33 = mimetypes.guess_type(VAR_16)
if VAR_33 == "gzip":
return "application/gzip"
elif VAR_33 is not None:
return "application/octet-stream"
elif VAR_32 is not None:
return VAR_32
else:
return "application/octet-stream"
@staticmethod
def FUNC_9(VAR_17: str) -> str:
return "components/{}".format(VAR_17)
class CLASS_3:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def FUNC_10(VAR_18) -> "ComponentRegistry":
if VAR_18._instance is None:
with VAR_18._instance_lock:
if VAR_18._instance is None:
VAR_18._instance = CLASS_3()
return VAR_18._instance
def __init__(self):
self._components = {} # type: Dict[str, CLASS_1]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def FUNC_11(self, VAR_14: CLASS_1) -> None:
VAR_16 = VAR_14.abspath
if VAR_16 is not None and not os.path.isdir(VAR_16):
raise StreamlitAPIException(f"No such VAR_14 directory: '{VAR_16}'")
with self._lock:
VAR_40 = self._components.get(VAR_14.name)
self._components[VAR_14.name] = VAR_14
if VAR_40 is not None and VAR_14 != VAR_40:
VAR_0.warning(
"%s overriding previously-registered %s",
VAR_14,
VAR_40,
)
VAR_0.debug("Registered VAR_14 %s", VAR_14)
def FUNC_12(self, VAR_1: str) -> Optional[str]:
VAR_14 = self._components.get(VAR_1, None)
return VAR_14.abspath if VAR_14 is not None else None
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
21,
24,
35,
37,
38,
41,
43,
44,
47,
58,
62,
65,
72,
82,
91,
106,
111,
115,
123,
125,
128,
129,
130,
131,
133,
149,
156,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
184,
187,
189,
191,
203,
206,
211,
212,
213,
214,
216,
217,
218,
220,
226,
228,
237,
241,
244,
245,
252,
263,
270,
272,
273,
276,
277,
280,
281,
282,
286,
287,
288,
289,
290,
295,
296,
298,
299,
302,
304,
305,
309,
318,
321,
323,
332,
335,
337,
340,
345,
350,
354,
359,
366,
369,
370,
371,
376,
379,
384,
385,
389,
393,
394,
395,
401,
405,
408,
411,
417,
418,
422,
426,
433,
435,
438,
444,
40,
46,
251,
252,
253,
254,
255,
256,
257,
258,
259,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
68,
80,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
230,
239,
339,
340,
341,
342,
343,
356,
362,
363,
364,
382,
392,
410,
411,
412,
413,
414,
415,
416,
437,
438,
439,
440,
441
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
21,
24,
35,
37,
38,
41,
43,
44,
47,
58,
62,
65,
72,
82,
91,
106,
111,
115,
123,
125,
128,
129,
130,
131,
133,
149,
156,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
184,
187,
189,
191,
203,
206,
211,
212,
213,
214,
216,
217,
218,
220,
226,
228,
237,
241,
244,
245,
252,
263,
270,
272,
273,
276,
277,
280,
281,
282,
286,
287,
288,
289,
290,
295,
296,
298,
299,
302,
304,
305,
309,
318,
319,
323,
324,
329,
331,
340,
343,
345,
348,
353,
358,
362,
367,
374,
377,
378,
379,
384,
387,
392,
393,
397,
401,
402,
403,
409,
413,
416,
419,
425,
426,
430,
434,
441,
443,
446,
452,
40,
46,
251,
252,
253,
254,
255,
256,
257,
258,
259,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
68,
80,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
230,
239,
347,
348,
349,
350,
351,
364,
370,
371,
372,
390,
400,
418,
419,
420,
421,
422,
423,
424,
445,
446,
447,
448,
449
] |
2CWE-601
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from security_monkey import app, db
from flask_wtf.csrf import generate_csrf
from security_monkey.auth.models import RBACRole
from security_monkey.decorators import crossdomain
from flask_restful import fields, marshal, Resource, reqparse
from flask_login import current_user
ORIGINS = [
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('WEB_PORT')),
# Adding this next one so you can also access the dart UI by prepending /static to the path.
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('API_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('NGINX_PORT')),
'https://{}:80'.format(app.config.get('FQDN'))
]
##### Marshal Datastructures #####
# Used by RevisionGet, RevisionList, ItemList
REVISION_FIELDS = {
'id': fields.Integer,
'date_created': fields.String,
'date_last_ephemeral_change': fields.String,
'active': fields.Boolean,
'item_id': fields.Integer
}
# Used by RevisionList, ItemGet, ItemList
ITEM_FIELDS = {
'id': fields.Integer,
'region': fields.String,
'name': fields.String
}
# Used by ItemList, Justify
AUDIT_FIELDS = {
'id': fields.Integer,
'score': fields.Integer,
'issue': fields.String,
'notes': fields.String,
'justified': fields.Boolean,
'justification': fields.String,
'justified_date': fields.String,
'item_id': fields.Integer
}
## Single Use Marshal Objects ##
# SINGLE USE - RevisionGet
REVISION_COMMENT_FIELDS = {
'id': fields.Integer,
'revision_id': fields.Integer,
'date_created': fields.String,
'text': fields.String
}
# SINGLE USE - ItemGet
ITEM_COMMENT_FIELDS = {
'id': fields.Integer,
'date_created': fields.String,
'text': fields.String,
'item_id': fields.Integer
}
# SINGLE USE - UserSettings
USER_SETTINGS_FIELDS = {
# 'id': fields.Integer,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String
}
# SINGLE USE - AccountGet
ACCOUNT_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'identifier': fields.String,
'notes': fields.String,
'active': fields.Boolean,
'third_party': fields.Boolean,
'account_type': fields.String
}
USER_FIELDS = {
'id': fields.Integer,
'active': fields.Boolean,
'email': fields.String,
'role': fields.String,
'confirmed_at': fields.String,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String,
'last_login_at': fields.String,
'current_login_at': fields.String,
'login_count': fields.Integer,
'last_login_ip': fields.String,
'current_login_ip': fields.String
}
ROLE_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'description': fields.String,
}
WHITELIST_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'notes': fields.String,
'cidr': fields.String
}
IGNORELIST_FIELDS = {
'id': fields.Integer,
'prefix': fields.String,
'notes': fields.String,
}
AUDITORSETTING_FIELDS = {
'id': fields.Integer,
'disabled': fields.Boolean,
'issue_text': fields.String
}
ITEM_LINK_FIELDS = {
'id': fields.Integer,
'name': fields.String
}
class AuthenticatedService(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(AuthenticatedService, self).__init__()
self.auth_dict = dict()
if current_user.is_authenticated():
roles_marshal = []
for role in current_user.roles:
roles_marshal.append(marshal(role.__dict__, ROLE_FIELDS))
roles_marshal.append({"name": current_user.role})
for role in RBACRole.roles[current_user.role].get_parents():
roles_marshal.append({"name": role.name})
self.auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": roles_marshal
}
else:
if app.config.get('FRONTED_BY_NGINX'):
url = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
url = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
self.auth_dict = {
"authenticated": False,
"user": None,
"url": url
}
@app.after_request
@crossdomain(allowed_origins=ORIGINS)
def after(response):
response.set_cookie('XSRF-COOKIE', generate_csrf())
return response
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from security_monkey import app, db
from flask_wtf.csrf import generate_csrf
from security_monkey.auth.models import RBACRole
from security_monkey.decorators import crossdomain
from flask_restful import fields, marshal, Resource, reqparse
from flask_login import current_user
ORIGINS = [
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('WEB_PORT')),
# Adding this next one so you can also access the dart UI by prepending /static to the path.
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('API_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('NGINX_PORT')),
'https://{}:80'.format(app.config.get('FQDN'))
]
##### Marshal Datastructures #####
# Used by RevisionGet, RevisionList, ItemList
REVISION_FIELDS = {
'id': fields.Integer,
'date_created': fields.String,
'date_last_ephemeral_change': fields.String,
'active': fields.Boolean,
'item_id': fields.Integer
}
# Used by RevisionList, ItemGet, ItemList
ITEM_FIELDS = {
'id': fields.Integer,
'region': fields.String,
'name': fields.String
}
# Used by ItemList, Justify
AUDIT_FIELDS = {
'id': fields.Integer,
'score': fields.Integer,
'issue': fields.String,
'notes': fields.String,
'justified': fields.Boolean,
'justification': fields.String,
'justified_date': fields.String,
'item_id': fields.Integer
}
## Single Use Marshal Objects ##
# SINGLE USE - RevisionGet
REVISION_COMMENT_FIELDS = {
'id': fields.Integer,
'revision_id': fields.Integer,
'date_created': fields.String,
'text': fields.String
}
# SINGLE USE - ItemGet
ITEM_COMMENT_FIELDS = {
'id': fields.Integer,
'date_created': fields.String,
'text': fields.String,
'item_id': fields.Integer
}
# SINGLE USE - UserSettings
USER_SETTINGS_FIELDS = {
# 'id': fields.Integer,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String
}
# SINGLE USE - AccountGet
ACCOUNT_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'identifier': fields.String,
'notes': fields.String,
'active': fields.Boolean,
'third_party': fields.Boolean,
'account_type': fields.String
}
USER_FIELDS = {
'id': fields.Integer,
'active': fields.Boolean,
'email': fields.String,
'role': fields.String,
'confirmed_at': fields.String,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String,
'last_login_at': fields.String,
'current_login_at': fields.String,
'login_count': fields.Integer,
'last_login_ip': fields.String,
'current_login_ip': fields.String
}
ROLE_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'description': fields.String,
}
WHITELIST_FIELDS = {
'id': fields.Integer,
'name': fields.String,
'notes': fields.String,
'cidr': fields.String
}
IGNORELIST_FIELDS = {
'id': fields.Integer,
'prefix': fields.String,
'notes': fields.String,
}
AUDITORSETTING_FIELDS = {
'id': fields.Integer,
'disabled': fields.Boolean,
'issue_text': fields.String
}
ITEM_LINK_FIELDS = {
'id': fields.Integer,
'name': fields.String
}
class AuthenticatedService(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(AuthenticatedService, self).__init__()
self.auth_dict = dict()
if current_user.is_authenticated:
roles_marshal = []
for role in current_user.roles:
roles_marshal.append(marshal(role.__dict__, ROLE_FIELDS))
roles_marshal.append({"name": current_user.role})
for role in RBACRole.roles[current_user.role].get_parents():
roles_marshal.append({"name": role.name})
self.auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": roles_marshal
}
else:
if app.config.get('FRONTED_BY_NGINX'):
url = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
url = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
self.auth_dict = {
"authenticated": False,
"user": None,
"url": url
}
@app.after_request
@crossdomain(allowed_origins=ORIGINS)
def after(response):
response.set_cookie('XSRF-COOKIE', generate_csrf())
return response
| open_redirect | {
"code": [
" if current_user.is_authenticated():"
],
"line_no": [
147
]
} | {
"code": [
" if current_user.is_authenticated:"
],
"line_no": [
147
]
} |
from security_monkey import app, db
from flask_wtf.csrf import generate_csrf
from security_monkey.auth.models import RBACRole
from security_monkey.decorators import crossdomain
from flask_restful import fields, marshal, Resource, reqparse
from flask_login import current_user
VAR_0 = [
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('WEB_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('API_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('NGINX_PORT')),
'https://{}:80'.format(app.config.get('FQDN'))
]
VAR_1 = {
'id': fields.Integer,
'date_created': fields.String,
'date_last_ephemeral_change': fields.String,
'active': fields.Boolean,
'item_id': fields.Integer
}
VAR_2 = {
'id': fields.Integer,
'region': fields.String,
'name': fields.String
}
VAR_3 = {
'id': fields.Integer,
'score': fields.Integer,
'issue': fields.String,
'notes': fields.String,
'justified': fields.Boolean,
'justification': fields.String,
'justified_date': fields.String,
'item_id': fields.Integer
}
VAR_4 = {
'id': fields.Integer,
'revision_id': fields.Integer,
'date_created': fields.String,
'text': fields.String
}
VAR_5 = {
'id': fields.Integer,
'date_created': fields.String,
'text': fields.String,
'item_id': fields.Integer
}
VAR_6 = {
'daily_audit_email': fields.Boolean,
'change_reports': fields.String
}
VAR_7 = {
'id': fields.Integer,
'name': fields.String,
'identifier': fields.String,
'notes': fields.String,
'active': fields.Boolean,
'third_party': fields.Boolean,
'account_type': fields.String
}
VAR_8 = {
'id': fields.Integer,
'active': fields.Boolean,
'email': fields.String,
'role': fields.String,
'confirmed_at': fields.String,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String,
'last_login_at': fields.String,
'current_login_at': fields.String,
'login_count': fields.Integer,
'last_login_ip': fields.String,
'current_login_ip': fields.String
}
VAR_9 = {
'id': fields.Integer,
'name': fields.String,
'description': fields.String,
}
VAR_10 = {
'id': fields.Integer,
'name': fields.String,
'notes': fields.String,
'cidr': fields.String
}
VAR_11 = {
'id': fields.Integer,
'prefix': fields.String,
'notes': fields.String,
}
VAR_12 = {
'id': fields.Integer,
'disabled': fields.Boolean,
'issue_text': fields.String
}
VAR_13 = {
'id': fields.Integer,
'name': fields.String
}
class CLASS_0(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_0, self).__init__()
self.auth_dict = dict()
if current_user.is_authenticated():
VAR_15 = []
for role in current_user.roles:
VAR_15.append(marshal(role.__dict__, VAR_9))
VAR_15.append({"name": current_user.role})
for role in RBACRole.roles[current_user.role].get_parents():
VAR_15.append({"name": role.name})
self.auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": VAR_15
}
else:
if app.config.get('FRONTED_BY_NGINX'):
VAR_16 = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
VAR_16 = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
self.auth_dict = {
"authenticated": False,
"user": None,
"url": VAR_16
}
@app.after_request
@crossdomain(allowed_origins=VAR_0)
def FUNC_0(VAR_14):
response.set_cookie('XSRF-COOKIE', generate_csrf())
return VAR_14
|
from security_monkey import app, db
from flask_wtf.csrf import generate_csrf
from security_monkey.auth.models import RBACRole
from security_monkey.decorators import crossdomain
from flask_restful import fields, marshal, Resource, reqparse
from flask_login import current_user
VAR_0 = [
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('WEB_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('API_PORT')),
'https://{}:{}'.format(app.config.get('FQDN'), app.config.get('NGINX_PORT')),
'https://{}:80'.format(app.config.get('FQDN'))
]
VAR_1 = {
'id': fields.Integer,
'date_created': fields.String,
'date_last_ephemeral_change': fields.String,
'active': fields.Boolean,
'item_id': fields.Integer
}
VAR_2 = {
'id': fields.Integer,
'region': fields.String,
'name': fields.String
}
VAR_3 = {
'id': fields.Integer,
'score': fields.Integer,
'issue': fields.String,
'notes': fields.String,
'justified': fields.Boolean,
'justification': fields.String,
'justified_date': fields.String,
'item_id': fields.Integer
}
VAR_4 = {
'id': fields.Integer,
'revision_id': fields.Integer,
'date_created': fields.String,
'text': fields.String
}
VAR_5 = {
'id': fields.Integer,
'date_created': fields.String,
'text': fields.String,
'item_id': fields.Integer
}
VAR_6 = {
'daily_audit_email': fields.Boolean,
'change_reports': fields.String
}
VAR_7 = {
'id': fields.Integer,
'name': fields.String,
'identifier': fields.String,
'notes': fields.String,
'active': fields.Boolean,
'third_party': fields.Boolean,
'account_type': fields.String
}
VAR_8 = {
'id': fields.Integer,
'active': fields.Boolean,
'email': fields.String,
'role': fields.String,
'confirmed_at': fields.String,
'daily_audit_email': fields.Boolean,
'change_reports': fields.String,
'last_login_at': fields.String,
'current_login_at': fields.String,
'login_count': fields.Integer,
'last_login_ip': fields.String,
'current_login_ip': fields.String
}
VAR_9 = {
'id': fields.Integer,
'name': fields.String,
'description': fields.String,
}
VAR_10 = {
'id': fields.Integer,
'name': fields.String,
'notes': fields.String,
'cidr': fields.String
}
VAR_11 = {
'id': fields.Integer,
'prefix': fields.String,
'notes': fields.String,
}
VAR_12 = {
'id': fields.Integer,
'disabled': fields.Boolean,
'issue_text': fields.String
}
VAR_13 = {
'id': fields.Integer,
'name': fields.String
}
class CLASS_0(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_0, self).__init__()
self.auth_dict = dict()
if current_user.is_authenticated:
VAR_15 = []
for role in current_user.roles:
VAR_15.append(marshal(role.__dict__, VAR_9))
VAR_15.append({"name": current_user.role})
for role in RBACRole.roles[current_user.role].get_parents():
VAR_15.append({"name": role.name})
self.auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": VAR_15
}
else:
if app.config.get('FRONTED_BY_NGINX'):
VAR_16 = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
VAR_16 = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
self.auth_dict = {
"authenticated": False,
"user": None,
"url": VAR_16
}
@app.after_request
@crossdomain(allowed_origins=VAR_0)
def FUNC_0(VAR_14):
response.set_cookie('XSRF-COOKIE', generate_csrf())
return VAR_14
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
19,
22,
25,
30,
31,
32,
33,
41,
42,
48,
49,
60,
61,
62,
63,
70,
71,
78,
79,
81,
85,
86,
96,
111,
117,
124,
130,
136,
141,
151,
153,
156,
172,
173,
179
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
19,
22,
25,
30,
31,
32,
33,
41,
42,
48,
49,
60,
61,
62,
63,
70,
71,
78,
79,
81,
85,
86,
96,
111,
117,
124,
130,
136,
141,
151,
153,
156,
172,
173,
179
] |
1CWE-79
| """
django-helpdesk - A Django powered ticket tracker for small enterprise.
(c) Copyright 2008 Jutda. All Rights Reserved. See LICENSE for details.
models.py - Model (and hence database) definitions. This is the core of the
helpdesk structure.
"""
from django.contrib.auth.models import Permission
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _, ugettext
from io import StringIO
import re
import os
import mimetypes
import datetime
from django.utils.safestring import mark_safe
from markdown import markdown
from markdown.extensions import Extension
import uuid
from helpdesk import settings as helpdesk_settings
from .validators import validate_file_extension
from .templated_email import send_templated_mail
def format_time_spent(time_spent):
if time_spent:
time_spent = "{0:02d}h:{1:02d}m".format(
time_spent.seconds // 3600,
time_spent.seconds % 3600 // 60
)
else:
time_spent = ""
return time_spent
class EscapeHtml(Extension):
def extendMarkdown(self, md, md_globals):
del md.preprocessors['html_block']
del md.inlinePatterns['html']
def get_markdown(text):
if not text:
return ""
pattern = fr'([\[\s\S\]]*?)\(([\s\S]*?):([\[\s\S\]]*?)\)'
# Regex check
if re.match(pattern, text):
# get get value of group regex
scheme = re.search(pattern, text, re.IGNORECASE).group(2)
# scheme check
if scheme in helpdesk_settings.ALLOWED_URL_SCHEMES:
replacement = '\\1(\\2:\\3)'
else:
replacement = '\\1(\\3)'
text = re.sub(pattern, replacement, text, flags=re.IGNORECASE)
return mark_safe(
markdown(
text,
extensions=[
EscapeHtml(), 'markdown.extensions.nl2br',
'markdown.extensions.fenced_code'
]
)
)
class Queue(models.Model):
"""
A queue is a collection of tickets into what would generally be business
areas or departments.
For example, a company may have a queue for each Product they provide, or
a queue for each of Accounts, Pre-Sales, and Support.
"""
title = models.CharField(
_('Title'),
max_length=100,
)
slug = models.SlugField(
_('Slug'),
max_length=50,
unique=True,
help_text=_('This slug is used when building ticket ID\'s. Once set, '
'try not to change it or e-mailing may get messy.'),
)
email_address = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
help_text=_('All outgoing e-mails for this queue will use this e-mail '
'address. If you use IMAP or POP3, this should be the e-mail '
'address for that mailbox.'),
)
locale = models.CharField(
_('Locale'),
max_length=10,
blank=True,
null=True,
help_text=_('Locale of this queue. All correspondence in this '
'queue will be in this language.'),
)
allow_public_submission = models.BooleanField(
_('Allow Public Submission?'),
blank=True,
default=False,
help_text=_('Should this queue be listed on the public submission form?'),
)
allow_email_submission = models.BooleanField(
_('Allow E-Mail Submission?'),
blank=True,
default=False,
help_text=_('Do you want to poll the e-mail box below for new '
'tickets?'),
)
escalate_days = models.IntegerField(
_('Escalation Days'),
blank=True,
null=True,
help_text=_('For tickets which are not held, how often do you wish to '
'increase their priority? Set to 0 for no escalation.'),
)
new_ticket_cc = models.CharField(
_('New Ticket CC Address'),
blank=True,
null=True,
max_length=200,
help_text=_('If an e-mail address is entered here, then it will '
'receive notification of all new tickets created for this queue. '
'Enter a comma between multiple e-mail addresses.'),
)
updated_ticket_cc = models.CharField(
_('Updated Ticket CC Address'),
blank=True,
null=True,
max_length=200,
help_text=_('If an e-mail address is entered here, then it will '
'receive notification of all activity (new tickets, closed '
'tickets, updates, reassignments, etc) for this queue. Separate '
'multiple addresses with a comma.'),
)
enable_notifications_on_email_events = models.BooleanField(
_('Notify contacts when email updates arrive'),
blank=True,
default=False,
help_text=_('When an email arrives to either create a ticket or to '
'interact with an existing discussion. Should email notifications be sent ? '
'Note: the new_ticket_cc and updated_ticket_cc work independently of this feature'),
)
email_box_type = models.CharField(
_('E-Mail Box Type'),
max_length=5,
choices=(('pop3', _('POP 3')), ('imap', _('IMAP')), ('local', _('Local Directory'))),
blank=True,
null=True,
help_text=_('E-Mail server type for creating tickets automatically '
'from a mailbox - both POP3 and IMAP are supported, as well as '
'reading from a local directory.'),
)
email_box_host = models.CharField(
_('E-Mail Hostname'),
max_length=200,
blank=True,
null=True,
help_text=_('Your e-mail server address - either the domain name or '
'IP address. May be "localhost".'),
)
email_box_port = models.IntegerField(
_('E-Mail Port'),
blank=True,
null=True,
help_text=_('Port number to use for accessing e-mail. Default for '
'POP3 is "110", and for IMAP is "143". This may differ on some '
'servers. Leave it blank to use the defaults.'),
)
email_box_ssl = models.BooleanField(
_('Use SSL for E-Mail?'),
blank=True,
default=False,
help_text=_('Whether to use SSL for IMAP or POP3 - the default ports '
'when using SSL are 993 for IMAP and 995 for POP3.'),
)
email_box_user = models.CharField(
_('E-Mail Username'),
max_length=200,
blank=True,
null=True,
help_text=_('Username for accessing this mailbox.'),
)
email_box_pass = models.CharField(
_('E-Mail Password'),
max_length=200,
blank=True,
null=True,
help_text=_('Password for the above username'),
)
email_box_imap_folder = models.CharField(
_('IMAP Folder'),
max_length=100,
blank=True,
null=True,
help_text=_('If using IMAP, what folder do you wish to fetch messages '
'from? This allows you to use one IMAP account for multiple '
'queues, by filtering messages on your IMAP server into separate '
'folders. Default: INBOX.'),
)
email_box_local_dir = models.CharField(
_('E-Mail Local Directory'),
max_length=200,
blank=True,
null=True,
help_text=_('If using a local directory, what directory path do you '
'wish to poll for new email? '
'Example: /var/lib/mail/helpdesk/'),
)
permission_name = models.CharField(
_('Django auth permission name'),
max_length=72, # based on prepare_permission_name() pre-pending chars to slug
blank=True,
null=True,
editable=False,
help_text=_('Name used in the django.contrib.auth permission system'),
)
email_box_interval = models.IntegerField(
_('E-Mail Check Interval'),
help_text=_('How often do you wish to check this mailbox? (in Minutes)'),
blank=True,
null=True,
default='5',
)
email_box_last_check = models.DateTimeField(
blank=True,
null=True,
editable=False,
# This is updated by management/commands/get_mail.py.
)
socks_proxy_type = models.CharField(
_('Socks Proxy Type'),
max_length=8,
choices=(('socks4', _('SOCKS4')), ('socks5', _('SOCKS5'))),
blank=True,
null=True,
help_text=_('SOCKS4 or SOCKS5 allows you to proxy your connections through a SOCKS server.'),
)
socks_proxy_host = models.GenericIPAddressField(
_('Socks Proxy Host'),
blank=True,
null=True,
help_text=_('Socks proxy IP address. Default: 127.0.0.1'),
)
socks_proxy_port = models.IntegerField(
_('Socks Proxy Port'),
blank=True,
null=True,
help_text=_('Socks proxy port number. Default: 9150 (default TOR port)'),
)
logging_type = models.CharField(
_('Logging Type'),
max_length=5,
choices=(
('none', _('None')),
('debug', _('Debug')),
('info', _('Information')),
('warn', _('Warning')),
('error', _('Error')),
('crit', _('Critical'))
),
blank=True,
null=True,
help_text=_('Set the default logging level. All messages at that '
'level or above will be logged to the directory set '
'below. If no level is set, logging will be disabled.'),
)
logging_dir = models.CharField(
_('Logging Directory'),
max_length=200,
blank=True,
null=True,
help_text=_('If logging is enabled, what directory should we use to '
'store log files for this queue? '
'The standard logging mechanims are used if no directory is set'),
)
default_owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
related_name='default_owner',
blank=True,
null=True,
verbose_name=_('Default owner'),
)
dedicated_time = models.DurationField(
help_text=_("Time to be spent on this Queue in total"),
blank=True, null=True
)
def __str__(self):
return "%s" % self.title
class Meta:
ordering = ('title',)
verbose_name = _('Queue')
verbose_name_plural = _('Queues')
def _from_address(self):
"""
Short property to provide a sender address in SMTP format,
eg 'Name <email>'. We do this so we can put a simple error message
in the sender name field, so hopefully the admin can see and fix it.
"""
if not self.email_address:
# must check if given in format "Foo <foo@example.com>"
default_email = re.match(".*<(?P<email>.*@*.)>", settings.DEFAULT_FROM_EMAIL)
if default_email is not None:
# already in the right format, so just include it here
return u'NO QUEUE EMAIL ADDRESS DEFINED %s' % settings.DEFAULT_FROM_EMAIL
else:
return u'NO QUEUE EMAIL ADDRESS DEFINED <%s>' % settings.DEFAULT_FROM_EMAIL
else:
return u'%s <%s>' % (self.title, self.email_address)
from_address = property(_from_address)
@property
def time_spent(self):
"""Return back total time spent on the ticket. This is calculated value
based on total sum from all FollowUps
"""
total = datetime.timedelta(0)
for val in self.ticket_set.all():
if val.time_spent:
total = total + val.time_spent
return total
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
def prepare_permission_name(self):
"""Prepare internally the codename for the permission and store it in permission_name.
:return: The codename that can be used to create a new Permission object.
"""
# Prepare the permission associated to this Queue
basename = "queue_access_%s" % self.slug
self.permission_name = "helpdesk.%s" % basename
return basename
def save(self, *args, **kwargs):
if self.email_box_type == 'imap' and not self.email_box_imap_folder:
self.email_box_imap_folder = 'INBOX'
if self.socks_proxy_type:
if not self.socks_proxy_host:
self.socks_proxy_host = '127.0.0.1'
if not self.socks_proxy_port:
self.socks_proxy_port = 9150
else:
self.socks_proxy_host = None
self.socks_proxy_port = None
if not self.email_box_port:
if self.email_box_type == 'imap' and self.email_box_ssl:
self.email_box_port = 993
elif self.email_box_type == 'imap' and not self.email_box_ssl:
self.email_box_port = 143
elif self.email_box_type == 'pop3' and self.email_box_ssl:
self.email_box_port = 995
elif self.email_box_type == 'pop3' and not self.email_box_ssl:
self.email_box_port = 110
if not self.id:
# Prepare the permission codename and the permission
# (even if they are not needed with the current configuration)
basename = self.prepare_permission_name()
Permission.objects.create(
name=_("Permission for queue: ") + self.title,
content_type=ContentType.objects.get_for_model(self.__class__),
codename=basename,
)
super(Queue, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
permission_name = self.permission_name
super(Queue, self).delete(*args, **kwargs)
# once the Queue is safely deleted, remove the permission (if exists)
if permission_name:
try:
p = Permission.objects.get(codename=permission_name[9:])
p.delete()
except ObjectDoesNotExist:
pass
def mk_secret():
return str(uuid.uuid4())
class Ticket(models.Model):
"""
To allow a ticket to be entered as quickly as possible, only the
bare minimum fields are required. These basically allow us to
sort and manage the ticket. The user can always go back and
enter more information later.
A good example of this is when a customer is on the phone, and
you want to give them a ticket ID as quickly as possible. You can
enter some basic info, save the ticket, give the customer the ID
and get off the phone, then add in further detail at a later time
(once the customer is not on the line).
Note that assigned_to is optional - unassigned tickets are displayed on
the dashboard to prompt users to take ownership of them.
"""
OPEN_STATUS = 1
REOPENED_STATUS = 2
RESOLVED_STATUS = 3
CLOSED_STATUS = 4
DUPLICATE_STATUS = 5
STATUS_CHOICES = (
(OPEN_STATUS, _('Open')),
(REOPENED_STATUS, _('Reopened')),
(RESOLVED_STATUS, _('Resolved')),
(CLOSED_STATUS, _('Closed')),
(DUPLICATE_STATUS, _('Duplicate')),
)
PRIORITY_CHOICES = (
(1, _('1. Critical')),
(2, _('2. High')),
(3, _('3. Normal')),
(4, _('4. Low')),
(5, _('5. Very Low')),
)
title = models.CharField(
_('Title'),
max_length=200,
)
queue = models.ForeignKey(
Queue,
on_delete=models.CASCADE,
verbose_name=_('Queue'),
)
created = models.DateTimeField(
_('Created'),
blank=True,
help_text=_('Date this ticket was first created'),
)
modified = models.DateTimeField(
_('Modified'),
blank=True,
help_text=_('Date this ticket was most recently changed.'),
)
submitter_email = models.EmailField(
_('Submitter E-Mail'),
blank=True,
null=True,
help_text=_('The submitter will receive an email for all public '
'follow-ups left for this task.'),
)
assigned_to = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='assigned_to',
blank=True,
null=True,
verbose_name=_('Assigned to'),
)
status = models.IntegerField(
_('Status'),
choices=STATUS_CHOICES,
default=OPEN_STATUS,
)
on_hold = models.BooleanField(
_('On Hold'),
blank=True,
default=False,
help_text=_('If a ticket is on hold, it will not automatically be escalated.'),
)
description = models.TextField(
_('Description'),
blank=True,
null=True,
help_text=_('The content of the customers query.'),
)
resolution = models.TextField(
_('Resolution'),
blank=True,
null=True,
help_text=_('The resolution provided to the customer by our staff.'),
)
priority = models.IntegerField(
_('Priority'),
choices=PRIORITY_CHOICES,
default=3,
blank=3,
help_text=_('1 = Highest Priority, 5 = Low Priority'),
)
due_date = models.DateTimeField(
_('Due on'),
blank=True,
null=True,
)
last_escalation = models.DateTimeField(
blank=True,
null=True,
editable=False,
help_text=_('The date this ticket was last escalated - updated '
'automatically by management/commands/escalate_tickets.py.'),
)
secret_key = models.CharField(
_("Secret key needed for viewing/editing ticket by non-logged in users"),
max_length=36,
default=mk_secret,
)
kbitem = models.ForeignKey(
"KBItem",
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_('Knowledge base item the user was viewing when they created this ticket.'),
)
merged_to = models.ForeignKey(
'self',
verbose_name=_('merged to'),
related_name='merged_tickets',
on_delete=models.CASCADE,
null=True,
blank=True
)
@property
def time_spent(self):
"""Return back total time spent on the ticket. This is calculated value
based on total sum from all FollowUps
"""
total = datetime.timedelta(0)
for val in self.followup_set.all():
if val.time_spent:
total = total + val.time_spent
return total
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
def send(self, roles, dont_send_to=None, **kwargs):
"""
Send notifications to everyone interested in this ticket.
The the roles argument is a dictionary mapping from roles to (template, context) pairs.
If a role is not present in the dictionary, users of that type will not receive the notification.
The following roles exist:
- 'submitter'
- 'new_ticket_cc'
- 'ticket_cc'
- 'assigned_to'
Here is an example roles dictionary:
{
'submitter': (template_name, context),
'assigned_to': (template_name2, context),
}
**kwargs are passed to send_templated_mail defined in templated_email.py
returns the set of email addresses the notification was delivered to.
"""
recipients = set()
if dont_send_to is not None:
recipients.update(dont_send_to)
recipients.add(self.queue.email_address)
def should_receive(email):
return email and email not in recipients
def send(role, recipient):
if recipient and recipient not in recipients and role in roles:
template, context = roles[role]
send_templated_mail(template, context, recipient, sender=self.queue.from_address, **kwargs)
recipients.add(recipient)
send('submitter', self.submitter_email)
send('ticket_cc', self.queue.updated_ticket_cc)
send('new_ticket_cc', self.queue.new_ticket_cc)
if self.assigned_to:
send('assigned_to', self.assigned_to.email)
if self.queue.enable_notifications_on_email_events:
for cc in self.ticketcc_set.all():
send('ticket_cc', cc.email_address)
return recipients
def _get_assigned_to(self):
""" Custom property to allow us to easily print 'Unassigned' if a
ticket has no owner, or the users name if it's assigned. If the user
has a full name configured, we use that, otherwise their username. """
if not self.assigned_to:
return _('Unassigned')
else:
if self.assigned_to.get_full_name():
return self.assigned_to.get_full_name()
else:
return self.assigned_to.get_username()
get_assigned_to = property(_get_assigned_to)
def _get_ticket(self):
""" A user-friendly ticket ID, which is a combination of ticket ID
and queue slug. This is generally used in e-mail subjects. """
return u"[%s]" % self.ticket_for_url
ticket = property(_get_ticket)
def _get_ticket_for_url(self):
""" A URL-friendly ticket ID, used in links. """
return u"%s-%s" % (self.queue.slug, self.id)
ticket_for_url = property(_get_ticket_for_url)
def _get_priority_css_class(self):
"""
Return the boostrap class corresponding to the priority.
"""
if self.priority == 2:
return "warning"
elif self.priority == 1:
return "danger"
elif self.priority == 5:
return "success"
else:
return ""
get_priority_css_class = property(_get_priority_css_class)
def _get_status(self):
"""
Displays the ticket status, with an "On Hold" message if needed.
"""
held_msg = ''
if self.on_hold:
held_msg = _(' - On Hold')
dep_msg = ''
if not self.can_be_resolved:
dep_msg = _(' - Open dependencies')
return u'%s%s%s' % (self.get_status_display(), held_msg, dep_msg)
get_status = property(_get_status)
def _get_ticket_url(self):
"""
Returns a publicly-viewable URL for this ticket, used when giving
a URL to the submitter of a ticket.
"""
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
site = Site.objects.get_current()
except ImproperlyConfigured:
site = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
protocol = 'https'
else:
protocol = 'http'
return u"%s://%s%s?ticket=%s&email=%s&key=%s" % (
protocol,
site.domain,
reverse('helpdesk:public_view'),
self.ticket_for_url,
self.submitter_email,
self.secret_key
)
ticket_url = property(_get_ticket_url)
def _get_staff_url(self):
"""
Returns a staff-only URL for this ticket, used when giving a URL to
a staff member (in emails etc)
"""
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
site = Site.objects.get_current()
except ImproperlyConfigured:
site = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
protocol = 'https'
else:
protocol = 'http'
return u"%s://%s%s" % (
protocol,
site.domain,
reverse('helpdesk:view',
args=[self.id])
)
staff_url = property(_get_staff_url)
def _can_be_resolved(self):
"""
Returns a boolean.
True = any dependencies are resolved
False = There are non-resolved dependencies
"""
OPEN_STATUSES = (Ticket.OPEN_STATUS, Ticket.REOPENED_STATUS)
return TicketDependency.objects.filter(ticket=self).filter(
depends_on__status__in=OPEN_STATUSES).count() == 0
can_be_resolved = property(_can_be_resolved)
def get_submitter_userprofile(self):
User = get_user_model()
try:
return User.objects.get(email=self.submitter_email)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return None
class Meta:
get_latest_by = "created"
ordering = ('id',)
verbose_name = _('Ticket')
verbose_name_plural = _('Tickets')
def __str__(self):
return '%s %s' % (self.id, self.title)
def get_absolute_url(self):
from django.urls import reverse
return reverse('helpdesk:view', args=(self.id,))
def save(self, *args, **kwargs):
if not self.id:
# This is a new ticket as no ID yet exists.
self.created = timezone.now()
if not self.priority:
self.priority = 3
self.modified = timezone.now()
if len(self.title) > 200:
self.title = self.title[:197] + "..."
super(Ticket, self).save(*args, **kwargs)
@staticmethod
def queue_and_id_from_query(query):
# Apply the opposite logic here compared to self._get_ticket_for_url
# Ensure that queues with '-' in them will work
parts = query.split('-')
queue = '-'.join(parts[0:-1])
return queue, parts[-1]
def get_markdown(self):
return get_markdown(self.description)
@property
def get_resolution_markdown(self):
return get_markdown(self.resolution)
def add_email_to_ticketcc_if_not_in(self, email=None, user=None, ticketcc=None):
"""
Check that given email/user_email/ticketcc_email is not already present on the ticket
(submitter email, assigned to, or in ticket CCs) and add it to a new ticket CC,
or move the given one
:param str email:
:param User user:
:param TicketCC ticketcc:
:rtype: TicketCC|None
"""
if ticketcc:
email = ticketcc.display
elif user:
if user.email:
email = user.email
else:
# Ignore if user has no email address
return
elif not email:
raise ValueError('You must provide at least one parameter to get the email from')
# Prepare all emails already into the ticket
ticket_emails = [x.display for x in self.ticketcc_set.all()]
if self.submitter_email:
ticket_emails.append(self.submitter_email)
if self.assigned_to and self.assigned_to.email:
ticket_emails.append(self.assigned_to.email)
# Check that email is not already part of the ticket
if email not in ticket_emails:
if ticketcc:
ticketcc.ticket = self
ticketcc.save(update_fields=['ticket'])
elif user:
ticketcc = self.ticketcc_set.create(user=user)
else:
ticketcc = self.ticketcc_set.create(email=email)
return ticketcc
class FollowUpManager(models.Manager):
def private_followups(self):
return self.filter(public=False)
def public_followups(self):
return self.filter(public=True)
class FollowUp(models.Model):
"""
A FollowUp is a comment and/or change to a ticket. We keep a simple
title, the comment entered by the user, and the new status of a ticket
to enable easy flagging of details on the view-ticket page.
The title is automatically generated at save-time, based on what action
the user took.
Tickets that aren't public are never shown to or e-mailed to the submitter,
although all staff can see them.
"""
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
date = models.DateTimeField(
_('Date'),
default=timezone.now
)
title = models.CharField(
_('Title'),
max_length=200,
blank=True,
null=True,
)
comment = models.TextField(
_('Comment'),
blank=True,
null=True,
)
public = models.BooleanField(
_('Public'),
blank=True,
default=False,
help_text=_(
'Public tickets are viewable by the submitter and all '
'staff, but non-public tickets can only be seen by staff.'
),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
verbose_name=_('User'),
)
new_status = models.IntegerField(
_('New Status'),
choices=Ticket.STATUS_CHOICES,
blank=True,
null=True,
help_text=_('If the status was changed, what was it changed to?'),
)
message_id = models.CharField(
_('E-Mail ID'),
max_length=256,
blank=True,
null=True,
help_text=_("The Message ID of the submitter's email."),
editable=False,
)
objects = FollowUpManager()
time_spent = models.DurationField(
help_text=_("Time spent on this follow up"),
blank=True, null=True
)
class Meta:
ordering = ('date',)
verbose_name = _('Follow-up')
verbose_name_plural = _('Follow-ups')
def __str__(self):
return '%s' % self.title
def get_absolute_url(self):
return u"%s#followup%s" % (self.ticket.get_absolute_url(), self.id)
def save(self, *args, **kwargs):
t = self.ticket
t.modified = timezone.now()
t.save()
super(FollowUp, self).save(*args, **kwargs)
def get_markdown(self):
return get_markdown(self.comment)
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
class TicketChange(models.Model):
"""
For each FollowUp, any changes to the parent ticket (eg Title, Priority,
etc) are tracked here for display purposes.
"""
followup = models.ForeignKey(
FollowUp,
on_delete=models.CASCADE,
verbose_name=_('Follow-up'),
)
field = models.CharField(
_('Field'),
max_length=100,
)
old_value = models.TextField(
_('Old Value'),
blank=True,
null=True,
)
new_value = models.TextField(
_('New Value'),
blank=True,
null=True,
)
def __str__(self):
out = '%s ' % self.field
if not self.new_value:
out += ugettext('removed')
elif not self.old_value:
out += ugettext('set to %s') % self.new_value
else:
out += ugettext('changed from "%(old_value)s" to "%(new_value)s"') % {
'old_value': self.old_value,
'new_value': self.new_value
}
return out
class Meta:
verbose_name = _('Ticket change')
verbose_name_plural = _('Ticket changes')
def attachment_path(instance, filename):
"""Just bridge"""
return instance.attachment_path(filename)
class Attachment(models.Model):
"""
Represents a file attached to a follow-up. This could come from an e-mail
attachment, or it could be uploaded via the web interface.
"""
file = models.FileField(
_('File'),
upload_to=attachment_path,
max_length=1000,
validators=[validate_file_extension]
)
filename = models.CharField(
_('Filename'),
blank=True,
max_length=1000,
)
mime_type = models.CharField(
_('MIME Type'),
blank=True,
max_length=255,
)
size = models.IntegerField(
_('Size'),
blank=True,
help_text=_('Size of this file in bytes'),
)
def __str__(self):
return '%s' % self.filename
def save(self, *args, **kwargs):
if not self.size:
self.size = self.get_size()
if not self.filename:
self.filename = self.get_filename()
if not self.mime_type:
self.mime_type = \
mimetypes.guess_type(self.filename, strict=False)[0] or \
'application/octet-stream'
return super(Attachment, self).save(*args, **kwargs)
def get_filename(self):
return str(self.file)
def get_size(self):
return self.file.file.size
def attachment_path(self, filename):
"""Provide a file path that will help prevent files being overwritten, by
putting attachments in a folder off attachments for ticket/followup_id/.
"""
assert NotImplementedError(
"This method is to be implemented by Attachment classes"
)
class Meta:
ordering = ('filename',)
verbose_name = _('Attachment')
verbose_name_plural = _('Attachments')
abstract = True
class FollowUpAttachment(Attachment):
followup = models.ForeignKey(
FollowUp,
on_delete=models.CASCADE,
verbose_name=_('Follow-up'),
)
def attachment_path(self, filename):
os.umask(0)
path = 'helpdesk/attachments/{ticket_for_url}-{secret_key}/{id_}'.format(
ticket_for_url=self.followup.ticket.ticket_for_url,
secret_key=self.followup.ticket.secret_key,
id_=self.followup.id)
att_path = os.path.join(settings.MEDIA_ROOT, path)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(att_path):
os.makedirs(att_path, 0o777)
return os.path.join(path, filename)
class KBIAttachment(Attachment):
kbitem = models.ForeignKey(
"KBItem",
on_delete=models.CASCADE,
verbose_name=_('Knowledge base item'),
)
def attachment_path(self, filename):
os.umask(0)
path = 'helpdesk/attachments/kb/{category}/{kbi}'.format(
category=self.kbitem.category,
kbi=self.kbitem.id)
att_path = os.path.join(settings.MEDIA_ROOT, path)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(att_path):
os.makedirs(att_path, 0o777)
return os.path.join(path, filename)
class PreSetReply(models.Model):
"""
We can allow the admin to define a number of pre-set replies, used to
simplify the sending of updates and resolutions. These are basically Django
templates with a limited context - however if you wanted to get crafy it would
be easy to write a reply that displays ALL updates in hierarchical order etc
with use of for loops over {{ ticket.followup_set.all }} and friends.
When replying to a ticket, the user can select any reply set for the current
queue, and the body text is fetched via AJAX.
"""
class Meta:
ordering = ('name',)
verbose_name = _('Pre-set reply')
verbose_name_plural = _('Pre-set replies')
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank to allow this reply to be used for all '
'queues, or select those queues you wish to limit this reply to.'),
)
name = models.CharField(
_('Name'),
max_length=100,
help_text=_('Only used to assist users with selecting a reply - not '
'shown to the user.'),
)
body = models.TextField(
_('Body'),
help_text=_('Context available: {{ ticket }} - ticket object (eg '
'{{ ticket.title }}); {{ queue }} - The queue; and {{ user }} '
'- the current user.'),
)
def __str__(self):
return '%s' % self.name
class EscalationExclusion(models.Model):
"""
An 'EscalationExclusion' lets us define a date on which escalation should
not happen, for example a weekend or public holiday.
You may also have a queue that is only used on one day per week.
To create these on a regular basis, check out the README file for an
example cronjob that runs 'create_escalation_exclusions.py'.
"""
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank for this exclusion to be applied to all queues, '
'or select those queues you wish to exclude with this entry.'),
)
name = models.CharField(
_('Name'),
max_length=100,
)
date = models.DateField(
_('Date'),
help_text=_('Date on which escalation should not happen'),
)
def __str__(self):
return '%s' % self.name
class Meta:
verbose_name = _('Escalation exclusion')
verbose_name_plural = _('Escalation exclusions')
class EmailTemplate(models.Model):
"""
Since these are more likely to be changed than other templates, we store
them in the database.
This means that an admin can change email templates without having to have
access to the filesystem.
"""
template_name = models.CharField(
_('Template Name'),
max_length=100,
)
subject = models.CharField(
_('Subject'),
max_length=100,
help_text=_('This will be prefixed with "[ticket.ticket] ticket.title"'
'. We recommend something simple such as "(Updated") or "(Closed)"'
' - the same context is available as in plain_text, below.'),
)
heading = models.CharField(
_('Heading'),
max_length=100,
help_text=_('In HTML e-mails, this will be the heading at the top of '
'the email - the same context is available as in plain_text, '
'below.'),
)
plain_text = models.TextField(
_('Plain Text'),
help_text=_('The context available to you includes {{ ticket }}, '
'{{ queue }}, and depending on the time of the call: '
'{{ resolution }} or {{ comment }}.'),
)
html = models.TextField(
_('HTML'),
help_text=_('The same context is available here as in plain_text, above.'),
)
locale = models.CharField(
_('Locale'),
max_length=10,
blank=True,
null=True,
help_text=_('Locale of this template.'),
)
def __str__(self):
return '%s' % self.template_name
class Meta:
ordering = ('template_name', 'locale')
verbose_name = _('e-mail template')
verbose_name_plural = _('e-mail templates')
class KBCategory(models.Model):
"""
Lets help users help themselves: the Knowledge Base is a categorised
listing of questions & answers.
"""
name = models.CharField(
_('Name of the category'),
max_length=100,
)
title = models.CharField(
_('Title on knowledgebase page'),
max_length=100,
)
slug = models.SlugField(
_('Slug'),
)
description = models.TextField(
_('Description'),
)
queue = models.ForeignKey(
Queue,
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_('Default queue when creating a ticket after viewing this category.'),
)
public = models.BooleanField(
default=True,
verbose_name=_("Is KBCategory publicly visible?")
)
def __str__(self):
return '%s' % self.name
class Meta:
ordering = ('title',)
verbose_name = _('Knowledge base category')
verbose_name_plural = _('Knowledge base categories')
def get_absolute_url(self):
from django.urls import reverse
return reverse('helpdesk:kb_category', kwargs={'slug': self.slug})
class KBItem(models.Model):
"""
An item within the knowledgebase. Very straightforward question/answer
style system.
"""
voted_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='votes',
)
downvoted_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='downvotes',
)
category = models.ForeignKey(
KBCategory,
on_delete=models.CASCADE,
verbose_name=_('Category'),
)
title = models.CharField(
_('Title'),
max_length=100,
)
question = models.TextField(
_('Question'),
)
answer = models.TextField(
_('Answer'),
)
votes = models.IntegerField(
_('Votes'),
help_text=_('Total number of votes cast for this item'),
default=0,
)
recommendations = models.IntegerField(
_('Positive Votes'),
help_text=_('Number of votes for this item which were POSITIVE.'),
default=0,
)
last_updated = models.DateTimeField(
_('Last Updated'),
help_text=_('The date on which this question was most recently changed.'),
blank=True,
)
team = models.ForeignKey(
helpdesk_settings.HELPDESK_TEAMS_MODEL,
on_delete=models.CASCADE,
verbose_name=_('Team'),
blank=True,
null=True,
)
order = models.PositiveIntegerField(
_('Order'),
blank=True,
null=True,
)
enabled = models.BooleanField(
_('Enabled to display to users'),
default=True,
)
def save(self, *args, **kwargs):
if not self.last_updated:
self.last_updated = timezone.now()
return super(KBItem, self).save(*args, **kwargs)
def get_team(self):
return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)
def _score(self):
""" Return a score out of 10 or Unrated if no votes """
if self.votes > 0:
return (self.recommendations / self.votes) * 10
else:
return _('Unrated')
score = property(_score)
def __str__(self):
return '%s: %s' % (self.category.title, self.title)
class Meta:
ordering = ('order', 'title',)
verbose_name = _('Knowledge base item')
verbose_name_plural = _('Knowledge base items')
def get_absolute_url(self):
from django.urls import reverse
return str(reverse('helpdesk:kb_category', args=(self.category.slug,))) + "?kbitem=" + str(self.pk)
def query_url(self):
from django.urls import reverse
return str(reverse('helpdesk:list')) + "?kbitem=" + str(self.pk)
def num_open_tickets(self):
return Ticket.objects.filter(kbitem=self, status__in=(1, 2)).count()
def unassigned_tickets(self):
return Ticket.objects.filter(kbitem=self, status__in=(1, 2), assigned_to__isnull=True)
def get_markdown(self):
return get_markdown(self.answer)
class SavedSearch(models.Model):
"""
Allow a user to save a ticket search, eg their filtering and sorting
options, and optionally share it with other users. This lets people
easily create a set of commonly-used filters, such as:
* My tickets waiting on me
* My tickets waiting on submitter
* My tickets in 'Priority Support' queue with priority of 1
* All tickets containing the word 'billing'.
etc...
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_('User'),
)
title = models.CharField(
_('Query Name'),
max_length=100,
help_text=_('User-provided name for this query'),
)
shared = models.BooleanField(
_('Shared With Other Users?'),
blank=True,
default=False,
help_text=_('Should other users see this query?'),
)
query = models.TextField(
_('Search Query'),
help_text=_('Pickled query object. Be wary changing this.'),
)
def __str__(self):
if self.shared:
return '%s (*)' % self.title
else:
return '%s' % self.title
class Meta:
verbose_name = _('Saved search')
verbose_name_plural = _('Saved searches')
def get_default_setting(setting):
from helpdesk.settings import DEFAULT_USER_SETTINGS
return DEFAULT_USER_SETTINGS[setting]
def login_view_ticketlist_default():
return get_default_setting('login_view_ticketlist')
def email_on_ticket_change_default():
return get_default_setting('email_on_ticket_change')
def email_on_ticket_assign_default():
return get_default_setting('email_on_ticket_assign')
def tickets_per_page_default():
return get_default_setting('tickets_per_page')
def use_email_as_submitter_default():
return get_default_setting('use_email_as_submitter')
class UserSettings(models.Model):
"""
A bunch of user-specific settings that we want to be able to define, such
as notification preferences and other things that should probably be
configurable.
"""
PAGE_SIZES = ((10, '10'), (25, '25'), (50, '50'), (100, '100'))
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="usersettings_helpdesk")
settings_pickled = models.TextField(
_('DEPRECATED! Settings Dictionary DEPRECATED!'),
help_text=_('DEPRECATED! This is a base64-encoded representation of a pickled Python dictionary. '
'Do not change this field via the admin.'),
blank=True,
null=True,
)
login_view_ticketlist = models.BooleanField(
verbose_name=_('Show Ticket List on Login?'),
help_text=_('Display the ticket list upon login? Otherwise, the dashboard is shown.'),
default=login_view_ticketlist_default,
)
email_on_ticket_change = models.BooleanField(
verbose_name=_('E-mail me on ticket change?'),
help_text=_(
'If you\'re the ticket owner and the ticket is changed via the web by somebody else,'
'do you want to receive an e-mail?'
),
default=email_on_ticket_change_default,
)
email_on_ticket_assign = models.BooleanField(
verbose_name=_('E-mail me when assigned a ticket?'),
help_text=_('If you are assigned a ticket via the web, do you want to receive an e-mail?'),
default=email_on_ticket_assign_default,
)
tickets_per_page = models.IntegerField(
verbose_name=_('Number of tickets to show per page'),
help_text=_('How many tickets do you want to see on the Ticket List page?'),
default=tickets_per_page_default,
choices=PAGE_SIZES,
)
use_email_as_submitter = models.BooleanField(
verbose_name=_('Use my e-mail address when submitting tickets?'),
help_text=_('When you submit a ticket, do you want to automatically '
'use your e-mail address as the submitter address? You '
'can type a different e-mail address when entering the '
'ticket if needed, this option only changes the default.'),
default=use_email_as_submitter_default,
)
def __str__(self):
return 'Preferences for %s' % self.user
class Meta:
verbose_name = _('User Setting')
verbose_name_plural = _('User Settings')
def create_usersettings(sender, instance, created, **kwargs):
"""
Helper function to create UserSettings instances as
required, eg when we first create the UserSettings database
table via 'syncdb' or when we save a new user.
If we end up with users with no UserSettings, then we get horrible
'DoesNotExist: UserSettings matching query does not exist.' errors.
"""
if created:
UserSettings.objects.create(user=instance)
models.signals.post_save.connect(create_usersettings, sender=settings.AUTH_USER_MODEL)
class IgnoreEmail(models.Model):
"""
This model lets us easily ignore e-mails from certain senders when
processing IMAP and POP3 mailboxes, eg mails from postmaster or from
known trouble-makers.
"""
class Meta:
verbose_name = _('Ignored e-mail address')
verbose_name_plural = _('Ignored e-mail addresses')
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank for this e-mail to be ignored on all queues, '
'or select those queues you wish to ignore this e-mail for.'),
)
name = models.CharField(
_('Name'),
max_length=100,
)
date = models.DateField(
_('Date'),
help_text=_('Date on which this e-mail address was added'),
blank=True,
editable=False
)
email_address = models.CharField(
_('E-Mail Address'),
max_length=150,
help_text=_('Enter a full e-mail address, or portions with '
'wildcards, eg *@domain.com or postmaster@*.'),
)
keep_in_mailbox = models.BooleanField(
_('Save Emails in Mailbox?'),
blank=True,
default=False,
help_text=_('Do you want to save emails from this address in the mailbox? '
'If this is unticked, emails from this address will be deleted.'),
)
def __str__(self):
return '%s' % self.name
def save(self, *args, **kwargs):
if not self.date:
self.date = timezone.now()
return super(IgnoreEmail, self).save(*args, **kwargs)
def queue_list(self):
"""Return a list of the queues this IgnoreEmail applies to.
If this IgnoreEmail applies to ALL queues, return '*'.
"""
queues = self.queues.all().order_by('title')
if len(queues) == 0:
return '*'
else:
return ', '.join([str(q) for q in queues])
def test(self, email):
"""
Possible situations:
1. Username & Domain both match
2. Username is wildcard, domain matches
3. Username matches, domain is wildcard
4. username & domain are both wildcards
5. Other (no match)
1-4 return True, 5 returns False.
"""
own_parts = self.email_address.split("@")
email_parts = email.split("@")
if self.email_address == email or \
own_parts[0] == "*" and own_parts[1] == email_parts[1] or \
own_parts[1] == "*" and own_parts[0] == email_parts[0] or \
own_parts[0] == "*" and own_parts[1] == "*":
return True
else:
return False
class TicketCC(models.Model):
"""
Often, there are people who wish to follow a ticket who aren't the
person who originally submitted it. This model provides a way for those
people to follow a ticket.
In this circumstance, a 'person' could be either an e-mail address or
an existing system user.
"""
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
help_text=_('User who wishes to receive updates for this ticket.'),
verbose_name=_('User'),
)
email = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
help_text=_('For non-user followers, enter their e-mail address'),
)
can_view = models.BooleanField(
_('Can View Ticket?'),
blank=True,
default=False,
help_text=_('Can this CC login to view the ticket details?'),
)
can_update = models.BooleanField(
_('Can Update Ticket?'),
blank=True,
default=False,
help_text=_('Can this CC login and update the ticket?'),
)
def _email_address(self):
if self.user and self.user.email is not None:
return self.user.email
else:
return self.email
email_address = property(_email_address)
def _display(self):
if self.user:
return self.user
else:
return self.email
display = property(_display)
def __str__(self):
return '%s for %s' % (self.display, self.ticket.title)
def clean(self):
if self.user and not self.user.email:
raise ValidationError('User has no email address')
class CustomFieldManager(models.Manager):
def get_queryset(self):
return super(CustomFieldManager, self).get_queryset().order_by('ordering')
class CustomField(models.Model):
"""
Definitions for custom fields that are glued onto each ticket.
"""
name = models.SlugField(
_('Field Name'),
help_text=_('As used in the database and behind the scenes. '
'Must be unique and consist of only lowercase letters with no punctuation.'),
unique=True,
)
label = models.CharField(
_('Label'),
max_length=30,
help_text=_('The display label for this field'),
)
help_text = models.TextField(
_('Help Text'),
help_text=_('Shown to the user when editing the ticket'),
blank=True,
null=True
)
DATA_TYPE_CHOICES = (
('varchar', _('Character (single line)')),
('text', _('Text (multi-line)')),
('integer', _('Integer')),
('decimal', _('Decimal')),
('list', _('List')),
('boolean', _('Boolean (checkbox yes/no)')),
('date', _('Date')),
('time', _('Time')),
('datetime', _('Date & Time')),
('email', _('E-Mail Address')),
('url', _('URL')),
('ipaddress', _('IP Address')),
('slug', _('Slug')),
)
data_type = models.CharField(
_('Data Type'),
max_length=100,
help_text=_('Allows you to restrict the data entered into this field'),
choices=DATA_TYPE_CHOICES,
)
max_length = models.IntegerField(
_('Maximum Length (characters)'),
blank=True,
null=True,
)
decimal_places = models.IntegerField(
_('Decimal Places'),
help_text=_('Only used for decimal fields'),
blank=True,
null=True,
)
empty_selection_list = models.BooleanField(
_('Add empty first choice to List?'),
default=False,
help_text=_('Only for List: adds an empty first entry to the choices list, '
'which enforces that the user makes an active choice.'),
)
list_values = models.TextField(
_('List Values'),
help_text=_('For list fields only. Enter one option per line.'),
blank=True,
null=True,
)
ordering = models.IntegerField(
_('Ordering'),
help_text=_('Lower numbers are displayed first; higher numbers are listed later'),
blank=True,
null=True,
)
def _choices_as_array(self):
valuebuffer = StringIO(self.list_values)
choices = [[item.strip(), item.strip()] for item in valuebuffer.readlines()]
valuebuffer.close()
return choices
choices_as_array = property(_choices_as_array)
required = models.BooleanField(
_('Required?'),
help_text=_('Does the user have to enter a value for this field?'),
default=False,
)
staff_only = models.BooleanField(
_('Staff Only?'),
help_text=_('If this is ticked, then the public submission form '
'will NOT show this field'),
default=False,
)
objects = CustomFieldManager()
def __str__(self):
return '%s' % self.name
class Meta:
verbose_name = _('Custom field')
verbose_name_plural = _('Custom fields')
class TicketCustomFieldValue(models.Model):
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
field = models.ForeignKey(
CustomField,
on_delete=models.CASCADE,
verbose_name=_('Field'),
)
value = models.TextField(blank=True, null=True)
def __str__(self):
return '%s / %s' % (self.ticket, self.field)
class Meta:
unique_together = (('ticket', 'field'),)
verbose_name = _('Ticket custom field value')
verbose_name_plural = _('Ticket custom field values')
class TicketDependency(models.Model):
"""
The ticket identified by `ticket` cannot be resolved until the ticket in `depends_on` has been resolved.
To help enforce this, a helper function `can_be_resolved` on each Ticket instance checks that
these have all been resolved.
"""
class Meta:
unique_together = (('ticket', 'depends_on'),)
verbose_name = _('Ticket dependency')
verbose_name_plural = _('Ticket dependencies')
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
related_name='ticketdependency',
)
depends_on = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Depends On Ticket'),
related_name='depends_on',
)
def __str__(self):
return '%s / %s' % (self.ticket, self.depends_on)
| """
django-helpdesk - A Django powered ticket tracker for small enterprise.
(c) Copyright 2008 Jutda. All Rights Reserved. See LICENSE for details.
models.py - Model (and hence database) definitions. This is the core of the
helpdesk structure.
"""
from django.contrib.auth.models import Permission
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _, ugettext
from io import StringIO
import re
import os
import mimetypes
import datetime
from django.utils.safestring import mark_safe
from markdown import markdown
from markdown.extensions import Extension
import uuid
from helpdesk import settings as helpdesk_settings
from .validators import validate_file_extension
from .templated_email import send_templated_mail
def format_time_spent(time_spent):
if time_spent:
time_spent = "{0:02d}h:{1:02d}m".format(
time_spent.seconds // 3600,
time_spent.seconds % 3600 // 60
)
else:
time_spent = ""
return time_spent
class EscapeHtml(Extension):
def extendMarkdown(self, md, md_globals):
del md.preprocessors['html_block']
del md.inlinePatterns['html']
def get_markdown(text):
if not text:
return ""
pattern = fr'([\[\s\S\]]*?)\(([\s\S]*?):([\s\S]*?)\)'
# Regex check
if re.match(pattern, text):
# get get value of group regex
scheme = re.search(pattern, text, re.IGNORECASE).group(2)
# scheme check
if scheme in helpdesk_settings.ALLOWED_URL_SCHEMES:
replacement = '\\1(\\2:\\3)'
else:
replacement = '\\1(\\3)'
text = re.sub(pattern, replacement, text, flags=re.IGNORECASE)
return mark_safe(
markdown(
text,
extensions=[
EscapeHtml(), 'markdown.extensions.nl2br',
'markdown.extensions.fenced_code'
]
)
)
class Queue(models.Model):
"""
A queue is a collection of tickets into what would generally be business
areas or departments.
For example, a company may have a queue for each Product they provide, or
a queue for each of Accounts, Pre-Sales, and Support.
"""
title = models.CharField(
_('Title'),
max_length=100,
)
slug = models.SlugField(
_('Slug'),
max_length=50,
unique=True,
help_text=_('This slug is used when building ticket ID\'s. Once set, '
'try not to change it or e-mailing may get messy.'),
)
email_address = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
help_text=_('All outgoing e-mails for this queue will use this e-mail '
'address. If you use IMAP or POP3, this should be the e-mail '
'address for that mailbox.'),
)
locale = models.CharField(
_('Locale'),
max_length=10,
blank=True,
null=True,
help_text=_('Locale of this queue. All correspondence in this '
'queue will be in this language.'),
)
allow_public_submission = models.BooleanField(
_('Allow Public Submission?'),
blank=True,
default=False,
help_text=_('Should this queue be listed on the public submission form?'),
)
allow_email_submission = models.BooleanField(
_('Allow E-Mail Submission?'),
blank=True,
default=False,
help_text=_('Do you want to poll the e-mail box below for new '
'tickets?'),
)
escalate_days = models.IntegerField(
_('Escalation Days'),
blank=True,
null=True,
help_text=_('For tickets which are not held, how often do you wish to '
'increase their priority? Set to 0 for no escalation.'),
)
new_ticket_cc = models.CharField(
_('New Ticket CC Address'),
blank=True,
null=True,
max_length=200,
help_text=_('If an e-mail address is entered here, then it will '
'receive notification of all new tickets created for this queue. '
'Enter a comma between multiple e-mail addresses.'),
)
updated_ticket_cc = models.CharField(
_('Updated Ticket CC Address'),
blank=True,
null=True,
max_length=200,
help_text=_('If an e-mail address is entered here, then it will '
'receive notification of all activity (new tickets, closed '
'tickets, updates, reassignments, etc) for this queue. Separate '
'multiple addresses with a comma.'),
)
enable_notifications_on_email_events = models.BooleanField(
_('Notify contacts when email updates arrive'),
blank=True,
default=False,
help_text=_('When an email arrives to either create a ticket or to '
'interact with an existing discussion. Should email notifications be sent ? '
'Note: the new_ticket_cc and updated_ticket_cc work independently of this feature'),
)
email_box_type = models.CharField(
_('E-Mail Box Type'),
max_length=5,
choices=(('pop3', _('POP 3')), ('imap', _('IMAP')), ('local', _('Local Directory'))),
blank=True,
null=True,
help_text=_('E-Mail server type for creating tickets automatically '
'from a mailbox - both POP3 and IMAP are supported, as well as '
'reading from a local directory.'),
)
email_box_host = models.CharField(
_('E-Mail Hostname'),
max_length=200,
blank=True,
null=True,
help_text=_('Your e-mail server address - either the domain name or '
'IP address. May be "localhost".'),
)
email_box_port = models.IntegerField(
_('E-Mail Port'),
blank=True,
null=True,
help_text=_('Port number to use for accessing e-mail. Default for '
'POP3 is "110", and for IMAP is "143". This may differ on some '
'servers. Leave it blank to use the defaults.'),
)
email_box_ssl = models.BooleanField(
_('Use SSL for E-Mail?'),
blank=True,
default=False,
help_text=_('Whether to use SSL for IMAP or POP3 - the default ports '
'when using SSL are 993 for IMAP and 995 for POP3.'),
)
email_box_user = models.CharField(
_('E-Mail Username'),
max_length=200,
blank=True,
null=True,
help_text=_('Username for accessing this mailbox.'),
)
email_box_pass = models.CharField(
_('E-Mail Password'),
max_length=200,
blank=True,
null=True,
help_text=_('Password for the above username'),
)
email_box_imap_folder = models.CharField(
_('IMAP Folder'),
max_length=100,
blank=True,
null=True,
help_text=_('If using IMAP, what folder do you wish to fetch messages '
'from? This allows you to use one IMAP account for multiple '
'queues, by filtering messages on your IMAP server into separate '
'folders. Default: INBOX.'),
)
email_box_local_dir = models.CharField(
_('E-Mail Local Directory'),
max_length=200,
blank=True,
null=True,
help_text=_('If using a local directory, what directory path do you '
'wish to poll for new email? '
'Example: /var/lib/mail/helpdesk/'),
)
permission_name = models.CharField(
_('Django auth permission name'),
max_length=72, # based on prepare_permission_name() pre-pending chars to slug
blank=True,
null=True,
editable=False,
help_text=_('Name used in the django.contrib.auth permission system'),
)
email_box_interval = models.IntegerField(
_('E-Mail Check Interval'),
help_text=_('How often do you wish to check this mailbox? (in Minutes)'),
blank=True,
null=True,
default='5',
)
email_box_last_check = models.DateTimeField(
blank=True,
null=True,
editable=False,
# This is updated by management/commands/get_mail.py.
)
socks_proxy_type = models.CharField(
_('Socks Proxy Type'),
max_length=8,
choices=(('socks4', _('SOCKS4')), ('socks5', _('SOCKS5'))),
blank=True,
null=True,
help_text=_('SOCKS4 or SOCKS5 allows you to proxy your connections through a SOCKS server.'),
)
socks_proxy_host = models.GenericIPAddressField(
_('Socks Proxy Host'),
blank=True,
null=True,
help_text=_('Socks proxy IP address. Default: 127.0.0.1'),
)
socks_proxy_port = models.IntegerField(
_('Socks Proxy Port'),
blank=True,
null=True,
help_text=_('Socks proxy port number. Default: 9150 (default TOR port)'),
)
logging_type = models.CharField(
_('Logging Type'),
max_length=5,
choices=(
('none', _('None')),
('debug', _('Debug')),
('info', _('Information')),
('warn', _('Warning')),
('error', _('Error')),
('crit', _('Critical'))
),
blank=True,
null=True,
help_text=_('Set the default logging level. All messages at that '
'level or above will be logged to the directory set '
'below. If no level is set, logging will be disabled.'),
)
logging_dir = models.CharField(
_('Logging Directory'),
max_length=200,
blank=True,
null=True,
help_text=_('If logging is enabled, what directory should we use to '
'store log files for this queue? '
'The standard logging mechanims are used if no directory is set'),
)
default_owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
related_name='default_owner',
blank=True,
null=True,
verbose_name=_('Default owner'),
)
dedicated_time = models.DurationField(
help_text=_("Time to be spent on this Queue in total"),
blank=True, null=True
)
def __str__(self):
return "%s" % self.title
class Meta:
ordering = ('title',)
verbose_name = _('Queue')
verbose_name_plural = _('Queues')
def _from_address(self):
"""
Short property to provide a sender address in SMTP format,
eg 'Name <email>'. We do this so we can put a simple error message
in the sender name field, so hopefully the admin can see and fix it.
"""
if not self.email_address:
# must check if given in format "Foo <foo@example.com>"
default_email = re.match(".*<(?P<email>.*@*.)>", settings.DEFAULT_FROM_EMAIL)
if default_email is not None:
# already in the right format, so just include it here
return u'NO QUEUE EMAIL ADDRESS DEFINED %s' % settings.DEFAULT_FROM_EMAIL
else:
return u'NO QUEUE EMAIL ADDRESS DEFINED <%s>' % settings.DEFAULT_FROM_EMAIL
else:
return u'%s <%s>' % (self.title, self.email_address)
from_address = property(_from_address)
@property
def time_spent(self):
"""Return back total time spent on the ticket. This is calculated value
based on total sum from all FollowUps
"""
total = datetime.timedelta(0)
for val in self.ticket_set.all():
if val.time_spent:
total = total + val.time_spent
return total
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
def prepare_permission_name(self):
"""Prepare internally the codename for the permission and store it in permission_name.
:return: The codename that can be used to create a new Permission object.
"""
# Prepare the permission associated to this Queue
basename = "queue_access_%s" % self.slug
self.permission_name = "helpdesk.%s" % basename
return basename
def save(self, *args, **kwargs):
if self.email_box_type == 'imap' and not self.email_box_imap_folder:
self.email_box_imap_folder = 'INBOX'
if self.socks_proxy_type:
if not self.socks_proxy_host:
self.socks_proxy_host = '127.0.0.1'
if not self.socks_proxy_port:
self.socks_proxy_port = 9150
else:
self.socks_proxy_host = None
self.socks_proxy_port = None
if not self.email_box_port:
if self.email_box_type == 'imap' and self.email_box_ssl:
self.email_box_port = 993
elif self.email_box_type == 'imap' and not self.email_box_ssl:
self.email_box_port = 143
elif self.email_box_type == 'pop3' and self.email_box_ssl:
self.email_box_port = 995
elif self.email_box_type == 'pop3' and not self.email_box_ssl:
self.email_box_port = 110
if not self.id:
# Prepare the permission codename and the permission
# (even if they are not needed with the current configuration)
basename = self.prepare_permission_name()
Permission.objects.create(
name=_("Permission for queue: ") + self.title,
content_type=ContentType.objects.get_for_model(self.__class__),
codename=basename,
)
super(Queue, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
permission_name = self.permission_name
super(Queue, self).delete(*args, **kwargs)
# once the Queue is safely deleted, remove the permission (if exists)
if permission_name:
try:
p = Permission.objects.get(codename=permission_name[9:])
p.delete()
except ObjectDoesNotExist:
pass
def mk_secret():
return str(uuid.uuid4())
class Ticket(models.Model):
"""
To allow a ticket to be entered as quickly as possible, only the
bare minimum fields are required. These basically allow us to
sort and manage the ticket. The user can always go back and
enter more information later.
A good example of this is when a customer is on the phone, and
you want to give them a ticket ID as quickly as possible. You can
enter some basic info, save the ticket, give the customer the ID
and get off the phone, then add in further detail at a later time
(once the customer is not on the line).
Note that assigned_to is optional - unassigned tickets are displayed on
the dashboard to prompt users to take ownership of them.
"""
OPEN_STATUS = 1
REOPENED_STATUS = 2
RESOLVED_STATUS = 3
CLOSED_STATUS = 4
DUPLICATE_STATUS = 5
STATUS_CHOICES = (
(OPEN_STATUS, _('Open')),
(REOPENED_STATUS, _('Reopened')),
(RESOLVED_STATUS, _('Resolved')),
(CLOSED_STATUS, _('Closed')),
(DUPLICATE_STATUS, _('Duplicate')),
)
PRIORITY_CHOICES = (
(1, _('1. Critical')),
(2, _('2. High')),
(3, _('3. Normal')),
(4, _('4. Low')),
(5, _('5. Very Low')),
)
title = models.CharField(
_('Title'),
max_length=200,
)
queue = models.ForeignKey(
Queue,
on_delete=models.CASCADE,
verbose_name=_('Queue'),
)
created = models.DateTimeField(
_('Created'),
blank=True,
help_text=_('Date this ticket was first created'),
)
modified = models.DateTimeField(
_('Modified'),
blank=True,
help_text=_('Date this ticket was most recently changed.'),
)
submitter_email = models.EmailField(
_('Submitter E-Mail'),
blank=True,
null=True,
help_text=_('The submitter will receive an email for all public '
'follow-ups left for this task.'),
)
assigned_to = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='assigned_to',
blank=True,
null=True,
verbose_name=_('Assigned to'),
)
status = models.IntegerField(
_('Status'),
choices=STATUS_CHOICES,
default=OPEN_STATUS,
)
on_hold = models.BooleanField(
_('On Hold'),
blank=True,
default=False,
help_text=_('If a ticket is on hold, it will not automatically be escalated.'),
)
description = models.TextField(
_('Description'),
blank=True,
null=True,
help_text=_('The content of the customers query.'),
)
resolution = models.TextField(
_('Resolution'),
blank=True,
null=True,
help_text=_('The resolution provided to the customer by our staff.'),
)
priority = models.IntegerField(
_('Priority'),
choices=PRIORITY_CHOICES,
default=3,
blank=3,
help_text=_('1 = Highest Priority, 5 = Low Priority'),
)
due_date = models.DateTimeField(
_('Due on'),
blank=True,
null=True,
)
last_escalation = models.DateTimeField(
blank=True,
null=True,
editable=False,
help_text=_('The date this ticket was last escalated - updated '
'automatically by management/commands/escalate_tickets.py.'),
)
secret_key = models.CharField(
_("Secret key needed for viewing/editing ticket by non-logged in users"),
max_length=36,
default=mk_secret,
)
kbitem = models.ForeignKey(
"KBItem",
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_('Knowledge base item the user was viewing when they created this ticket.'),
)
merged_to = models.ForeignKey(
'self',
verbose_name=_('merged to'),
related_name='merged_tickets',
on_delete=models.CASCADE,
null=True,
blank=True
)
@property
def time_spent(self):
"""Return back total time spent on the ticket. This is calculated value
based on total sum from all FollowUps
"""
total = datetime.timedelta(0)
for val in self.followup_set.all():
if val.time_spent:
total = total + val.time_spent
return total
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
def send(self, roles, dont_send_to=None, **kwargs):
"""
Send notifications to everyone interested in this ticket.
The the roles argument is a dictionary mapping from roles to (template, context) pairs.
If a role is not present in the dictionary, users of that type will not receive the notification.
The following roles exist:
- 'submitter'
- 'new_ticket_cc'
- 'ticket_cc'
- 'assigned_to'
Here is an example roles dictionary:
{
'submitter': (template_name, context),
'assigned_to': (template_name2, context),
}
**kwargs are passed to send_templated_mail defined in templated_email.py
returns the set of email addresses the notification was delivered to.
"""
recipients = set()
if dont_send_to is not None:
recipients.update(dont_send_to)
recipients.add(self.queue.email_address)
def should_receive(email):
return email and email not in recipients
def send(role, recipient):
if recipient and recipient not in recipients and role in roles:
template, context = roles[role]
send_templated_mail(template, context, recipient, sender=self.queue.from_address, **kwargs)
recipients.add(recipient)
send('submitter', self.submitter_email)
send('ticket_cc', self.queue.updated_ticket_cc)
send('new_ticket_cc', self.queue.new_ticket_cc)
if self.assigned_to:
send('assigned_to', self.assigned_to.email)
if self.queue.enable_notifications_on_email_events:
for cc in self.ticketcc_set.all():
send('ticket_cc', cc.email_address)
return recipients
def _get_assigned_to(self):
""" Custom property to allow us to easily print 'Unassigned' if a
ticket has no owner, or the users name if it's assigned. If the user
has a full name configured, we use that, otherwise their username. """
if not self.assigned_to:
return _('Unassigned')
else:
if self.assigned_to.get_full_name():
return self.assigned_to.get_full_name()
else:
return self.assigned_to.get_username()
get_assigned_to = property(_get_assigned_to)
def _get_ticket(self):
""" A user-friendly ticket ID, which is a combination of ticket ID
and queue slug. This is generally used in e-mail subjects. """
return u"[%s]" % self.ticket_for_url
ticket = property(_get_ticket)
def _get_ticket_for_url(self):
""" A URL-friendly ticket ID, used in links. """
return u"%s-%s" % (self.queue.slug, self.id)
ticket_for_url = property(_get_ticket_for_url)
def _get_priority_css_class(self):
"""
Return the boostrap class corresponding to the priority.
"""
if self.priority == 2:
return "warning"
elif self.priority == 1:
return "danger"
elif self.priority == 5:
return "success"
else:
return ""
get_priority_css_class = property(_get_priority_css_class)
def _get_status(self):
"""
Displays the ticket status, with an "On Hold" message if needed.
"""
held_msg = ''
if self.on_hold:
held_msg = _(' - On Hold')
dep_msg = ''
if not self.can_be_resolved:
dep_msg = _(' - Open dependencies')
return u'%s%s%s' % (self.get_status_display(), held_msg, dep_msg)
get_status = property(_get_status)
def _get_ticket_url(self):
"""
Returns a publicly-viewable URL for this ticket, used when giving
a URL to the submitter of a ticket.
"""
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
site = Site.objects.get_current()
except ImproperlyConfigured:
site = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
protocol = 'https'
else:
protocol = 'http'
return u"%s://%s%s?ticket=%s&email=%s&key=%s" % (
protocol,
site.domain,
reverse('helpdesk:public_view'),
self.ticket_for_url,
self.submitter_email,
self.secret_key
)
ticket_url = property(_get_ticket_url)
def _get_staff_url(self):
"""
Returns a staff-only URL for this ticket, used when giving a URL to
a staff member (in emails etc)
"""
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
site = Site.objects.get_current()
except ImproperlyConfigured:
site = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
protocol = 'https'
else:
protocol = 'http'
return u"%s://%s%s" % (
protocol,
site.domain,
reverse('helpdesk:view',
args=[self.id])
)
staff_url = property(_get_staff_url)
def _can_be_resolved(self):
"""
Returns a boolean.
True = any dependencies are resolved
False = There are non-resolved dependencies
"""
OPEN_STATUSES = (Ticket.OPEN_STATUS, Ticket.REOPENED_STATUS)
return TicketDependency.objects.filter(ticket=self).filter(
depends_on__status__in=OPEN_STATUSES).count() == 0
can_be_resolved = property(_can_be_resolved)
def get_submitter_userprofile(self):
User = get_user_model()
try:
return User.objects.get(email=self.submitter_email)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return None
class Meta:
get_latest_by = "created"
ordering = ('id',)
verbose_name = _('Ticket')
verbose_name_plural = _('Tickets')
def __str__(self):
return '%s %s' % (self.id, self.title)
def get_absolute_url(self):
from django.urls import reverse
return reverse('helpdesk:view', args=(self.id,))
def save(self, *args, **kwargs):
if not self.id:
# This is a new ticket as no ID yet exists.
self.created = timezone.now()
if not self.priority:
self.priority = 3
self.modified = timezone.now()
if len(self.title) > 200:
self.title = self.title[:197] + "..."
super(Ticket, self).save(*args, **kwargs)
@staticmethod
def queue_and_id_from_query(query):
# Apply the opposite logic here compared to self._get_ticket_for_url
# Ensure that queues with '-' in them will work
parts = query.split('-')
queue = '-'.join(parts[0:-1])
return queue, parts[-1]
def get_markdown(self):
return get_markdown(self.description)
@property
def get_resolution_markdown(self):
return get_markdown(self.resolution)
def add_email_to_ticketcc_if_not_in(self, email=None, user=None, ticketcc=None):
"""
Check that given email/user_email/ticketcc_email is not already present on the ticket
(submitter email, assigned to, or in ticket CCs) and add it to a new ticket CC,
or move the given one
:param str email:
:param User user:
:param TicketCC ticketcc:
:rtype: TicketCC|None
"""
if ticketcc:
email = ticketcc.display
elif user:
if user.email:
email = user.email
else:
# Ignore if user has no email address
return
elif not email:
raise ValueError('You must provide at least one parameter to get the email from')
# Prepare all emails already into the ticket
ticket_emails = [x.display for x in self.ticketcc_set.all()]
if self.submitter_email:
ticket_emails.append(self.submitter_email)
if self.assigned_to and self.assigned_to.email:
ticket_emails.append(self.assigned_to.email)
# Check that email is not already part of the ticket
if email not in ticket_emails:
if ticketcc:
ticketcc.ticket = self
ticketcc.save(update_fields=['ticket'])
elif user:
ticketcc = self.ticketcc_set.create(user=user)
else:
ticketcc = self.ticketcc_set.create(email=email)
return ticketcc
class FollowUpManager(models.Manager):
def private_followups(self):
return self.filter(public=False)
def public_followups(self):
return self.filter(public=True)
class FollowUp(models.Model):
"""
A FollowUp is a comment and/or change to a ticket. We keep a simple
title, the comment entered by the user, and the new status of a ticket
to enable easy flagging of details on the view-ticket page.
The title is automatically generated at save-time, based on what action
the user took.
Tickets that aren't public are never shown to or e-mailed to the submitter,
although all staff can see them.
"""
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
date = models.DateTimeField(
_('Date'),
default=timezone.now
)
title = models.CharField(
_('Title'),
max_length=200,
blank=True,
null=True,
)
comment = models.TextField(
_('Comment'),
blank=True,
null=True,
)
public = models.BooleanField(
_('Public'),
blank=True,
default=False,
help_text=_(
'Public tickets are viewable by the submitter and all '
'staff, but non-public tickets can only be seen by staff.'
),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
verbose_name=_('User'),
)
new_status = models.IntegerField(
_('New Status'),
choices=Ticket.STATUS_CHOICES,
blank=True,
null=True,
help_text=_('If the status was changed, what was it changed to?'),
)
message_id = models.CharField(
_('E-Mail ID'),
max_length=256,
blank=True,
null=True,
help_text=_("The Message ID of the submitter's email."),
editable=False,
)
objects = FollowUpManager()
time_spent = models.DurationField(
help_text=_("Time spent on this follow up"),
blank=True, null=True
)
class Meta:
ordering = ('date',)
verbose_name = _('Follow-up')
verbose_name_plural = _('Follow-ups')
def __str__(self):
return '%s' % self.title
def get_absolute_url(self):
return u"%s#followup%s" % (self.ticket.get_absolute_url(), self.id)
def save(self, *args, **kwargs):
t = self.ticket
t.modified = timezone.now()
t.save()
super(FollowUp, self).save(*args, **kwargs)
def get_markdown(self):
return get_markdown(self.comment)
@property
def time_spent_formated(self):
return format_time_spent(self.time_spent)
class TicketChange(models.Model):
"""
For each FollowUp, any changes to the parent ticket (eg Title, Priority,
etc) are tracked here for display purposes.
"""
followup = models.ForeignKey(
FollowUp,
on_delete=models.CASCADE,
verbose_name=_('Follow-up'),
)
field = models.CharField(
_('Field'),
max_length=100,
)
old_value = models.TextField(
_('Old Value'),
blank=True,
null=True,
)
new_value = models.TextField(
_('New Value'),
blank=True,
null=True,
)
def __str__(self):
out = '%s ' % self.field
if not self.new_value:
out += ugettext('removed')
elif not self.old_value:
out += ugettext('set to %s') % self.new_value
else:
out += ugettext('changed from "%(old_value)s" to "%(new_value)s"') % {
'old_value': self.old_value,
'new_value': self.new_value
}
return out
class Meta:
verbose_name = _('Ticket change')
verbose_name_plural = _('Ticket changes')
def attachment_path(instance, filename):
"""Just bridge"""
return instance.attachment_path(filename)
class Attachment(models.Model):
"""
Represents a file attached to a follow-up. This could come from an e-mail
attachment, or it could be uploaded via the web interface.
"""
file = models.FileField(
_('File'),
upload_to=attachment_path,
max_length=1000,
validators=[validate_file_extension]
)
filename = models.CharField(
_('Filename'),
blank=True,
max_length=1000,
)
mime_type = models.CharField(
_('MIME Type'),
blank=True,
max_length=255,
)
size = models.IntegerField(
_('Size'),
blank=True,
help_text=_('Size of this file in bytes'),
)
def __str__(self):
return '%s' % self.filename
def save(self, *args, **kwargs):
if not self.size:
self.size = self.get_size()
if not self.filename:
self.filename = self.get_filename()
if not self.mime_type:
self.mime_type = \
mimetypes.guess_type(self.filename, strict=False)[0] or \
'application/octet-stream'
return super(Attachment, self).save(*args, **kwargs)
def get_filename(self):
return str(self.file)
def get_size(self):
return self.file.file.size
def attachment_path(self, filename):
"""Provide a file path that will help prevent files being overwritten, by
putting attachments in a folder off attachments for ticket/followup_id/.
"""
assert NotImplementedError(
"This method is to be implemented by Attachment classes"
)
class Meta:
ordering = ('filename',)
verbose_name = _('Attachment')
verbose_name_plural = _('Attachments')
abstract = True
class FollowUpAttachment(Attachment):
followup = models.ForeignKey(
FollowUp,
on_delete=models.CASCADE,
verbose_name=_('Follow-up'),
)
def attachment_path(self, filename):
os.umask(0)
path = 'helpdesk/attachments/{ticket_for_url}-{secret_key}/{id_}'.format(
ticket_for_url=self.followup.ticket.ticket_for_url,
secret_key=self.followup.ticket.secret_key,
id_=self.followup.id)
att_path = os.path.join(settings.MEDIA_ROOT, path)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(att_path):
os.makedirs(att_path, 0o777)
return os.path.join(path, filename)
class KBIAttachment(Attachment):
kbitem = models.ForeignKey(
"KBItem",
on_delete=models.CASCADE,
verbose_name=_('Knowledge base item'),
)
def attachment_path(self, filename):
os.umask(0)
path = 'helpdesk/attachments/kb/{category}/{kbi}'.format(
category=self.kbitem.category,
kbi=self.kbitem.id)
att_path = os.path.join(settings.MEDIA_ROOT, path)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(att_path):
os.makedirs(att_path, 0o777)
return os.path.join(path, filename)
class PreSetReply(models.Model):
"""
We can allow the admin to define a number of pre-set replies, used to
simplify the sending of updates and resolutions. These are basically Django
templates with a limited context - however if you wanted to get crafy it would
be easy to write a reply that displays ALL updates in hierarchical order etc
with use of for loops over {{ ticket.followup_set.all }} and friends.
When replying to a ticket, the user can select any reply set for the current
queue, and the body text is fetched via AJAX.
"""
class Meta:
ordering = ('name',)
verbose_name = _('Pre-set reply')
verbose_name_plural = _('Pre-set replies')
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank to allow this reply to be used for all '
'queues, or select those queues you wish to limit this reply to.'),
)
name = models.CharField(
_('Name'),
max_length=100,
help_text=_('Only used to assist users with selecting a reply - not '
'shown to the user.'),
)
body = models.TextField(
_('Body'),
help_text=_('Context available: {{ ticket }} - ticket object (eg '
'{{ ticket.title }}); {{ queue }} - The queue; and {{ user }} '
'- the current user.'),
)
def __str__(self):
return '%s' % self.name
class EscalationExclusion(models.Model):
"""
An 'EscalationExclusion' lets us define a date on which escalation should
not happen, for example a weekend or public holiday.
You may also have a queue that is only used on one day per week.
To create these on a regular basis, check out the README file for an
example cronjob that runs 'create_escalation_exclusions.py'.
"""
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank for this exclusion to be applied to all queues, '
'or select those queues you wish to exclude with this entry.'),
)
name = models.CharField(
_('Name'),
max_length=100,
)
date = models.DateField(
_('Date'),
help_text=_('Date on which escalation should not happen'),
)
def __str__(self):
return '%s' % self.name
class Meta:
verbose_name = _('Escalation exclusion')
verbose_name_plural = _('Escalation exclusions')
class EmailTemplate(models.Model):
"""
Since these are more likely to be changed than other templates, we store
them in the database.
This means that an admin can change email templates without having to have
access to the filesystem.
"""
template_name = models.CharField(
_('Template Name'),
max_length=100,
)
subject = models.CharField(
_('Subject'),
max_length=100,
help_text=_('This will be prefixed with "[ticket.ticket] ticket.title"'
'. We recommend something simple such as "(Updated") or "(Closed)"'
' - the same context is available as in plain_text, below.'),
)
heading = models.CharField(
_('Heading'),
max_length=100,
help_text=_('In HTML e-mails, this will be the heading at the top of '
'the email - the same context is available as in plain_text, '
'below.'),
)
plain_text = models.TextField(
_('Plain Text'),
help_text=_('The context available to you includes {{ ticket }}, '
'{{ queue }}, and depending on the time of the call: '
'{{ resolution }} or {{ comment }}.'),
)
html = models.TextField(
_('HTML'),
help_text=_('The same context is available here as in plain_text, above.'),
)
locale = models.CharField(
_('Locale'),
max_length=10,
blank=True,
null=True,
help_text=_('Locale of this template.'),
)
def __str__(self):
return '%s' % self.template_name
class Meta:
ordering = ('template_name', 'locale')
verbose_name = _('e-mail template')
verbose_name_plural = _('e-mail templates')
class KBCategory(models.Model):
"""
Lets help users help themselves: the Knowledge Base is a categorised
listing of questions & answers.
"""
name = models.CharField(
_('Name of the category'),
max_length=100,
)
title = models.CharField(
_('Title on knowledgebase page'),
max_length=100,
)
slug = models.SlugField(
_('Slug'),
)
description = models.TextField(
_('Description'),
)
queue = models.ForeignKey(
Queue,
blank=True,
null=True,
on_delete=models.CASCADE,
verbose_name=_('Default queue when creating a ticket after viewing this category.'),
)
public = models.BooleanField(
default=True,
verbose_name=_("Is KBCategory publicly visible?")
)
def __str__(self):
return '%s' % self.name
class Meta:
ordering = ('title',)
verbose_name = _('Knowledge base category')
verbose_name_plural = _('Knowledge base categories')
def get_absolute_url(self):
from django.urls import reverse
return reverse('helpdesk:kb_category', kwargs={'slug': self.slug})
class KBItem(models.Model):
"""
An item within the knowledgebase. Very straightforward question/answer
style system.
"""
voted_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='votes',
)
downvoted_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='downvotes',
)
category = models.ForeignKey(
KBCategory,
on_delete=models.CASCADE,
verbose_name=_('Category'),
)
title = models.CharField(
_('Title'),
max_length=100,
)
question = models.TextField(
_('Question'),
)
answer = models.TextField(
_('Answer'),
)
votes = models.IntegerField(
_('Votes'),
help_text=_('Total number of votes cast for this item'),
default=0,
)
recommendations = models.IntegerField(
_('Positive Votes'),
help_text=_('Number of votes for this item which were POSITIVE.'),
default=0,
)
last_updated = models.DateTimeField(
_('Last Updated'),
help_text=_('The date on which this question was most recently changed.'),
blank=True,
)
team = models.ForeignKey(
helpdesk_settings.HELPDESK_TEAMS_MODEL,
on_delete=models.CASCADE,
verbose_name=_('Team'),
blank=True,
null=True,
)
order = models.PositiveIntegerField(
_('Order'),
blank=True,
null=True,
)
enabled = models.BooleanField(
_('Enabled to display to users'),
default=True,
)
def save(self, *args, **kwargs):
if not self.last_updated:
self.last_updated = timezone.now()
return super(KBItem, self).save(*args, **kwargs)
def get_team(self):
return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)
def _score(self):
""" Return a score out of 10 or Unrated if no votes """
if self.votes > 0:
return (self.recommendations / self.votes) * 10
else:
return _('Unrated')
score = property(_score)
def __str__(self):
return '%s: %s' % (self.category.title, self.title)
class Meta:
ordering = ('order', 'title',)
verbose_name = _('Knowledge base item')
verbose_name_plural = _('Knowledge base items')
def get_absolute_url(self):
from django.urls import reverse
return str(reverse('helpdesk:kb_category', args=(self.category.slug,))) + "?kbitem=" + str(self.pk)
def query_url(self):
from django.urls import reverse
return str(reverse('helpdesk:list')) + "?kbitem=" + str(self.pk)
def num_open_tickets(self):
return Ticket.objects.filter(kbitem=self, status__in=(1, 2)).count()
def unassigned_tickets(self):
return Ticket.objects.filter(kbitem=self, status__in=(1, 2), assigned_to__isnull=True)
def get_markdown(self):
return get_markdown(self.answer)
class SavedSearch(models.Model):
"""
Allow a user to save a ticket search, eg their filtering and sorting
options, and optionally share it with other users. This lets people
easily create a set of commonly-used filters, such as:
* My tickets waiting on me
* My tickets waiting on submitter
* My tickets in 'Priority Support' queue with priority of 1
* All tickets containing the word 'billing'.
etc...
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_('User'),
)
title = models.CharField(
_('Query Name'),
max_length=100,
help_text=_('User-provided name for this query'),
)
shared = models.BooleanField(
_('Shared With Other Users?'),
blank=True,
default=False,
help_text=_('Should other users see this query?'),
)
query = models.TextField(
_('Search Query'),
help_text=_('Pickled query object. Be wary changing this.'),
)
def __str__(self):
if self.shared:
return '%s (*)' % self.title
else:
return '%s' % self.title
class Meta:
verbose_name = _('Saved search')
verbose_name_plural = _('Saved searches')
def get_default_setting(setting):
from helpdesk.settings import DEFAULT_USER_SETTINGS
return DEFAULT_USER_SETTINGS[setting]
def login_view_ticketlist_default():
return get_default_setting('login_view_ticketlist')
def email_on_ticket_change_default():
return get_default_setting('email_on_ticket_change')
def email_on_ticket_assign_default():
return get_default_setting('email_on_ticket_assign')
def tickets_per_page_default():
return get_default_setting('tickets_per_page')
def use_email_as_submitter_default():
return get_default_setting('use_email_as_submitter')
class UserSettings(models.Model):
"""
A bunch of user-specific settings that we want to be able to define, such
as notification preferences and other things that should probably be
configurable.
"""
PAGE_SIZES = ((10, '10'), (25, '25'), (50, '50'), (100, '100'))
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="usersettings_helpdesk")
settings_pickled = models.TextField(
_('DEPRECATED! Settings Dictionary DEPRECATED!'),
help_text=_('DEPRECATED! This is a base64-encoded representation of a pickled Python dictionary. '
'Do not change this field via the admin.'),
blank=True,
null=True,
)
login_view_ticketlist = models.BooleanField(
verbose_name=_('Show Ticket List on Login?'),
help_text=_('Display the ticket list upon login? Otherwise, the dashboard is shown.'),
default=login_view_ticketlist_default,
)
email_on_ticket_change = models.BooleanField(
verbose_name=_('E-mail me on ticket change?'),
help_text=_(
'If you\'re the ticket owner and the ticket is changed via the web by somebody else,'
'do you want to receive an e-mail?'
),
default=email_on_ticket_change_default,
)
email_on_ticket_assign = models.BooleanField(
verbose_name=_('E-mail me when assigned a ticket?'),
help_text=_('If you are assigned a ticket via the web, do you want to receive an e-mail?'),
default=email_on_ticket_assign_default,
)
tickets_per_page = models.IntegerField(
verbose_name=_('Number of tickets to show per page'),
help_text=_('How many tickets do you want to see on the Ticket List page?'),
default=tickets_per_page_default,
choices=PAGE_SIZES,
)
use_email_as_submitter = models.BooleanField(
verbose_name=_('Use my e-mail address when submitting tickets?'),
help_text=_('When you submit a ticket, do you want to automatically '
'use your e-mail address as the submitter address? You '
'can type a different e-mail address when entering the '
'ticket if needed, this option only changes the default.'),
default=use_email_as_submitter_default,
)
def __str__(self):
return 'Preferences for %s' % self.user
class Meta:
verbose_name = _('User Setting')
verbose_name_plural = _('User Settings')
def create_usersettings(sender, instance, created, **kwargs):
"""
Helper function to create UserSettings instances as
required, eg when we first create the UserSettings database
table via 'syncdb' or when we save a new user.
If we end up with users with no UserSettings, then we get horrible
'DoesNotExist: UserSettings matching query does not exist.' errors.
"""
if created:
UserSettings.objects.create(user=instance)
models.signals.post_save.connect(create_usersettings, sender=settings.AUTH_USER_MODEL)
class IgnoreEmail(models.Model):
"""
This model lets us easily ignore e-mails from certain senders when
processing IMAP and POP3 mailboxes, eg mails from postmaster or from
known trouble-makers.
"""
class Meta:
verbose_name = _('Ignored e-mail address')
verbose_name_plural = _('Ignored e-mail addresses')
queues = models.ManyToManyField(
Queue,
blank=True,
help_text=_('Leave blank for this e-mail to be ignored on all queues, '
'or select those queues you wish to ignore this e-mail for.'),
)
name = models.CharField(
_('Name'),
max_length=100,
)
date = models.DateField(
_('Date'),
help_text=_('Date on which this e-mail address was added'),
blank=True,
editable=False
)
email_address = models.CharField(
_('E-Mail Address'),
max_length=150,
help_text=_('Enter a full e-mail address, or portions with '
'wildcards, eg *@domain.com or postmaster@*.'),
)
keep_in_mailbox = models.BooleanField(
_('Save Emails in Mailbox?'),
blank=True,
default=False,
help_text=_('Do you want to save emails from this address in the mailbox? '
'If this is unticked, emails from this address will be deleted.'),
)
def __str__(self):
return '%s' % self.name
def save(self, *args, **kwargs):
if not self.date:
self.date = timezone.now()
return super(IgnoreEmail, self).save(*args, **kwargs)
def queue_list(self):
"""Return a list of the queues this IgnoreEmail applies to.
If this IgnoreEmail applies to ALL queues, return '*'.
"""
queues = self.queues.all().order_by('title')
if len(queues) == 0:
return '*'
else:
return ', '.join([str(q) for q in queues])
def test(self, email):
"""
Possible situations:
1. Username & Domain both match
2. Username is wildcard, domain matches
3. Username matches, domain is wildcard
4. username & domain are both wildcards
5. Other (no match)
1-4 return True, 5 returns False.
"""
own_parts = self.email_address.split("@")
email_parts = email.split("@")
if self.email_address == email or \
own_parts[0] == "*" and own_parts[1] == email_parts[1] or \
own_parts[1] == "*" and own_parts[0] == email_parts[0] or \
own_parts[0] == "*" and own_parts[1] == "*":
return True
else:
return False
class TicketCC(models.Model):
"""
Often, there are people who wish to follow a ticket who aren't the
person who originally submitted it. This model provides a way for those
people to follow a ticket.
In this circumstance, a 'person' could be either an e-mail address or
an existing system user.
"""
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
help_text=_('User who wishes to receive updates for this ticket.'),
verbose_name=_('User'),
)
email = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
help_text=_('For non-user followers, enter their e-mail address'),
)
can_view = models.BooleanField(
_('Can View Ticket?'),
blank=True,
default=False,
help_text=_('Can this CC login to view the ticket details?'),
)
can_update = models.BooleanField(
_('Can Update Ticket?'),
blank=True,
default=False,
help_text=_('Can this CC login and update the ticket?'),
)
def _email_address(self):
if self.user and self.user.email is not None:
return self.user.email
else:
return self.email
email_address = property(_email_address)
def _display(self):
if self.user:
return self.user
else:
return self.email
display = property(_display)
def __str__(self):
return '%s for %s' % (self.display, self.ticket.title)
def clean(self):
if self.user and not self.user.email:
raise ValidationError('User has no email address')
class CustomFieldManager(models.Manager):
def get_queryset(self):
return super(CustomFieldManager, self).get_queryset().order_by('ordering')
class CustomField(models.Model):
"""
Definitions for custom fields that are glued onto each ticket.
"""
name = models.SlugField(
_('Field Name'),
help_text=_('As used in the database and behind the scenes. '
'Must be unique and consist of only lowercase letters with no punctuation.'),
unique=True,
)
label = models.CharField(
_('Label'),
max_length=30,
help_text=_('The display label for this field'),
)
help_text = models.TextField(
_('Help Text'),
help_text=_('Shown to the user when editing the ticket'),
blank=True,
null=True
)
DATA_TYPE_CHOICES = (
('varchar', _('Character (single line)')),
('text', _('Text (multi-line)')),
('integer', _('Integer')),
('decimal', _('Decimal')),
('list', _('List')),
('boolean', _('Boolean (checkbox yes/no)')),
('date', _('Date')),
('time', _('Time')),
('datetime', _('Date & Time')),
('email', _('E-Mail Address')),
('url', _('URL')),
('ipaddress', _('IP Address')),
('slug', _('Slug')),
)
data_type = models.CharField(
_('Data Type'),
max_length=100,
help_text=_('Allows you to restrict the data entered into this field'),
choices=DATA_TYPE_CHOICES,
)
max_length = models.IntegerField(
_('Maximum Length (characters)'),
blank=True,
null=True,
)
decimal_places = models.IntegerField(
_('Decimal Places'),
help_text=_('Only used for decimal fields'),
blank=True,
null=True,
)
empty_selection_list = models.BooleanField(
_('Add empty first choice to List?'),
default=False,
help_text=_('Only for List: adds an empty first entry to the choices list, '
'which enforces that the user makes an active choice.'),
)
list_values = models.TextField(
_('List Values'),
help_text=_('For list fields only. Enter one option per line.'),
blank=True,
null=True,
)
ordering = models.IntegerField(
_('Ordering'),
help_text=_('Lower numbers are displayed first; higher numbers are listed later'),
blank=True,
null=True,
)
def _choices_as_array(self):
valuebuffer = StringIO(self.list_values)
choices = [[item.strip(), item.strip()] for item in valuebuffer.readlines()]
valuebuffer.close()
return choices
choices_as_array = property(_choices_as_array)
required = models.BooleanField(
_('Required?'),
help_text=_('Does the user have to enter a value for this field?'),
default=False,
)
staff_only = models.BooleanField(
_('Staff Only?'),
help_text=_('If this is ticked, then the public submission form '
'will NOT show this field'),
default=False,
)
objects = CustomFieldManager()
def __str__(self):
return '%s' % self.name
class Meta:
verbose_name = _('Custom field')
verbose_name_plural = _('Custom fields')
class TicketCustomFieldValue(models.Model):
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
)
field = models.ForeignKey(
CustomField,
on_delete=models.CASCADE,
verbose_name=_('Field'),
)
value = models.TextField(blank=True, null=True)
def __str__(self):
return '%s / %s' % (self.ticket, self.field)
class Meta:
unique_together = (('ticket', 'field'),)
verbose_name = _('Ticket custom field value')
verbose_name_plural = _('Ticket custom field values')
class TicketDependency(models.Model):
"""
The ticket identified by `ticket` cannot be resolved until the ticket in `depends_on` has been resolved.
To help enforce this, a helper function `can_be_resolved` on each Ticket instance checks that
these have all been resolved.
"""
class Meta:
unique_together = (('ticket', 'depends_on'),)
verbose_name = _('Ticket dependency')
verbose_name_plural = _('Ticket dependencies')
ticket = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Ticket'),
related_name='ticketdependency',
)
depends_on = models.ForeignKey(
Ticket,
on_delete=models.CASCADE,
verbose_name=_('Depends On Ticket'),
related_name='depends_on',
)
def __str__(self):
return '%s / %s' % (self.ticket, self.depends_on)
| xss | {
"code": [
" pattern = fr'([\\[\\s\\S\\]]*?)\\(([\\s\\S]*?):([\\[\\s\\S\\]]*?)\\)'"
],
"line_no": [
59
]
} | {
"code": [
" pattern = fr'([\\[\\s\\S\\]]*?)\\(([\\s\\S]*?):([\\s\\S]*?)\\)'"
],
"line_no": [
59
]
} |
from django.contrib.auth.models import Permission
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.conf import .settings
from django.utils import .timezone
from django.utils.translation import ugettext_lazy as _, ugettext
from io import StringIO
import re
import os
import mimetypes
import .datetime
from django.utils.safestring import mark_safe
from markdown import markdown
from markdown.extensions import Extension
import uuid
from helpdesk import .settings as helpdesk_settings
from .validators import validate_file_extension
from .templated_email import .send_templated_mail
def FUNC_0(VAR_0):
if VAR_0:
VAR_0 = "{0:02d}h:{1:02d}m".format(
VAR_0.seconds // 3600,
VAR_0.seconds % 3600 // 60
)
else:
VAR_0 = ""
return VAR_0
class CLASS_0(Extension):
def FUNC_11(self, VAR_8, VAR_9):
del VAR_8.preprocessors['html_block']
del VAR_8.inlinePatterns['html']
def FUNC_1(VAR_1):
if not VAR_1:
return ""
VAR_10 = fr'([\[\s\S\]]*?)\(([\s\S]*?):([\[\s\S\]]*?)\)'
if re.match(VAR_10, VAR_1):
VAR_135 = re.search(VAR_10, VAR_1, re.IGNORECASE).group(2)
if VAR_135 in helpdesk_settings.ALLOWED_URL_SCHEMES:
VAR_160 = '\\1(\\2:\\3)'
else:
VAR_160 = '\\1(\\3)'
VAR_1 = re.sub(VAR_10, VAR_160, VAR_1, flags=re.IGNORECASE)
return mark_safe(
markdown(
VAR_1,
extensions=[
CLASS_0(), 'markdown.extensions.nl2br',
'markdown.extensions.fenced_code'
]
)
)
class CLASS_1(models.Model):
VAR_11 = models.CharField(
_('Title'),
VAR_125=100,
)
VAR_12 = models.SlugField(
_('Slug'),
VAR_125=50,
unique=True,
VAR_122=_('This VAR_12 is used when building VAR_65 ID\'s. Once set, '
'try not to change it or e-mailing may get messy.'),
)
VAR_13 = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
VAR_122=_('All outgoing e-mails for this VAR_48 will use this e-mail '
'address. If you use IMAP or POP3, this should be the e-mail '
'address for that mailbox.'),
)
VAR_14 = models.CharField(
_('Locale'),
VAR_125=10,
blank=True,
null=True,
VAR_122=_('Locale of this VAR_48. All correspondence in this '
'queue will be in this language.'),
)
VAR_15 = models.BooleanField(
_('Allow Public Submission?'),
blank=True,
default=False,
VAR_122=_('Should this VAR_48 be listed on the VAR_78 submission form?'),
)
VAR_16 = models.BooleanField(
_('Allow E-Mail Submission?'),
blank=True,
default=False,
VAR_122=_('Do you want to poll the e-mail box below for new '
'tickets?'),
)
VAR_17 = models.IntegerField(
_('Escalation Days'),
blank=True,
null=True,
VAR_122=_('For tickets which are not held, how often do you wish to '
'increase their VAR_56? Set to 0 for no escalation.'),
)
VAR_18 = models.CharField(
_('New CLASS_2 CC Address'),
blank=True,
null=True,
VAR_125=200,
VAR_122=_('If an e-mail address is entered here, then it will '
'receive notification of all new tickets VAR_6 for this VAR_48. '
'Enter a comma between multiple e-mail addresses.'),
)
VAR_19 = models.CharField(
_('Updated CLASS_2 CC Address'),
blank=True,
null=True,
VAR_125=200,
VAR_122=_('If an e-mail address is entered here, then it will '
'receive notification of all activity (new tickets, closed '
'tickets, updates, reassignments, etc) for this VAR_48. Separate '
'multiple addresses with a comma.'),
)
VAR_20 = models.BooleanField(
_('Notify contacts when VAR_73 updates arrive'),
blank=True,
default=False,
VAR_122=_('When an VAR_73 arrives to either create a VAR_65 or to '
'interact with an existing discussion. Should VAR_73 notifications be sent ? '
'Note: the VAR_18 and VAR_19 work independently of this feature'),
)
VAR_21 = models.CharField(
_('E-Mail Box Type'),
VAR_125=5,
VAR_158=(('pop3', _('POP 3')), ('imap', _('IMAP')), ('local', _('Local Directory'))),
blank=True,
null=True,
VAR_122=_('E-Mail server type for creating tickets automatically '
'from a mailbox - both POP3 and IMAP are supported, as well as '
'reading from a local directory.'),
)
VAR_22 = models.CharField(
_('E-Mail Hostname'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Your e-mail server address - either the domain VAR_90 or '
'IP address. May be "localhost".'),
)
VAR_23 = models.IntegerField(
_('E-Mail Port'),
blank=True,
null=True,
VAR_122=_('Port number to use for accessing e-mail. Default for '
'POP3 is "110", and for IMAP is "143". This may differ on some '
'servers. Leave it blank to use the defaults.'),
)
VAR_24 = models.BooleanField(
_('Use SSL for E-Mail?'),
blank=True,
default=False,
VAR_122=_('Whether to use SSL for IMAP or POP3 - the default ports '
'when using SSL are 993 for IMAP and 995 for POP3.'),
)
VAR_25 = models.CharField(
_('E-Mail Username'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Username for accessing this mailbox.'),
)
VAR_26 = models.CharField(
_('E-Mail Password'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Password for the above username'),
)
VAR_27 = models.CharField(
_('IMAP Folder'),
VAR_125=100,
blank=True,
null=True,
VAR_122=_('If using IMAP, what folder do you wish to fetch messages '
'from? This allows you to use one IMAP account for multiple '
'queues, by filtering messages on your IMAP server into separate '
'folders. Default: INBOX.'),
)
VAR_28 = models.CharField(
_('E-Mail Local Directory'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('If using a local directory, what directory VAR_153 do you '
'wish to poll for new VAR_73? '
'Example: /var/lib/mail/helpdesk/'),
)
VAR_29 = models.CharField(
_('Django auth permission name'),
VAR_125=72, # based on FUNC_15() pre-pending chars to VAR_12
blank=True,
null=True,
editable=False,
VAR_122=_('Name used in the django.contrib.auth permission system'),
)
VAR_30 = models.IntegerField(
_('E-Mail Check Interval'),
VAR_122=_('How often do you wish to check this mailbox? (in Minutes)'),
blank=True,
null=True,
default='5',
)
VAR_31 = models.DateTimeField(
blank=True,
null=True,
editable=False,
)
VAR_32 = models.CharField(
_('Socks Proxy Type'),
VAR_125=8,
VAR_158=(('socks4', _('SOCKS4')), ('socks5', _('SOCKS5'))),
blank=True,
null=True,
VAR_122=_('SOCKS4 or SOCKS5 allows you to proxy your connections through a SOCKS server.'),
)
VAR_33 = models.GenericIPAddressField(
_('Socks Proxy Host'),
blank=True,
null=True,
VAR_122=_('Socks proxy IP address. Default: 127.0.0.1'),
)
VAR_34 = models.IntegerField(
_('Socks Proxy Port'),
blank=True,
null=True,
VAR_122=_('Socks proxy port number. Default: 9150 (default TOR port)'),
)
VAR_35 = models.CharField(
_('Logging Type'),
VAR_125=5,
VAR_158=(
('none', _('None')),
('debug', _('Debug')),
('info', _('Information')),
('warn', _('Warning')),
('error', _('Error')),
('crit', _('Critical'))
),
blank=True,
null=True,
VAR_122=_('Set the default logging level. All messages at that '
'level or above will be logged to the directory set '
'below. If no level is set, logging will be disabled.'),
)
VAR_36 = models.CharField(
_('Logging Directory'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('If logging is VAR_107, what directory should we use to '
'store log files for this VAR_48? '
'The standard logging mechanims are used if no directory is set'),
)
VAR_37 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
related_name='default_owner',
blank=True,
null=True,
VAR_136=_('Default owner'),
)
VAR_38 = models.DurationField(
VAR_122=_("Time to be spent on this CLASS_1 in total"),
blank=True, null=True
)
def __str__(self):
return "%s" % self.title
class CLASS_22:
VAR_129 = ('title',)
VAR_136 = _('Queue')
VAR_137 = _('Queues')
def FUNC_12(self):
if not self.email_address:
VAR_161 = re.match(".*<(?P<VAR_73>.*@*.)>", settings.DEFAULT_FROM_EMAIL)
if VAR_161 is not None:
return u'NO QUEUE EMAIL ADDRESS DEFINED %s' % settings.DEFAULT_FROM_EMAIL
else:
return u'NO QUEUE EMAIL ADDRESS DEFINED <%s>' % settings.DEFAULT_FROM_EMAIL
else:
return u'%s <%s>' % (self.title, self.email_address)
VAR_39 = property(FUNC_12)
@property
def VAR_0(self):
VAR_138 = datetime.timedelta(0)
for val in self.ticket_set.all():
if val.time_spent:
VAR_138 = VAR_138 + val.time_spent
return VAR_138
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
def FUNC_15(self):
VAR_139 = "queue_access_%s" % self.slug
self.permission_name = "helpdesk.%s" % VAR_139
return VAR_139
def FUNC_16(self, *VAR_40, **VAR_7):
if self.email_box_type == 'imap' and not self.email_box_imap_folder:
self.email_box_imap_folder = 'INBOX'
if self.socks_proxy_type:
if not self.socks_proxy_host:
self.socks_proxy_host = '127.0.0.1'
if not self.socks_proxy_port:
self.socks_proxy_port = 9150
else:
self.socks_proxy_host = None
self.socks_proxy_port = None
if not self.email_box_port:
if self.email_box_type == 'imap' and self.email_box_ssl:
self.email_box_port = 993
elif self.email_box_type == 'imap' and not self.email_box_ssl:
self.email_box_port = 143
elif self.email_box_type == 'pop3' and self.email_box_ssl:
self.email_box_port = 995
elif self.email_box_type == 'pop3' and not self.email_box_ssl:
self.email_box_port = 110
if not self.id:
VAR_139 = self.prepare_permission_name()
Permission.objects.create(
VAR_90=_("Permission for VAR_48: ") + self.title,
content_type=ContentType.objects.get_for_model(self.__class__),
codename=VAR_139,
)
super(CLASS_1, self).save(*VAR_40, **VAR_7)
def FUNC_17(self, *VAR_40, **VAR_7):
VAR_29 = self.permission_name
super(CLASS_1, self).delete(*VAR_40, **VAR_7)
if VAR_29:
try:
VAR_164 = Permission.objects.get(codename=VAR_29[9:])
VAR_164.delete()
except ObjectDoesNotExist:
pass
def FUNC_2():
return str(uuid.uuid4())
class CLASS_2(models.Model):
VAR_41 = 1
VAR_42 = 2
VAR_43 = 3
VAR_44 = 4
VAR_45 = 5
VAR_46 = (
(VAR_41, _('Open')),
(VAR_42, _('Reopened')),
(VAR_43, _('Resolved')),
(VAR_44, _('Closed')),
(VAR_45, _('Duplicate')),
)
VAR_47 = (
(1, _('1. Critical')),
(2, _('2. High')),
(3, _('3. Normal')),
(4, _('4. Low')),
(5, _('5. Very Low')),
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=200,
)
VAR_48 = models.ForeignKey(
CLASS_1,
on_delete=models.CASCADE,
VAR_136=_('Queue'),
)
VAR_6 = models.DateTimeField(
_('Created'),
blank=True,
VAR_122=_('Date this VAR_65 was first created'),
)
VAR_49 = models.DateTimeField(
_('Modified'),
blank=True,
VAR_122=_('Date this VAR_65 was most recently changed.'),
)
VAR_50 = models.EmailField(
_('Submitter E-Mail'),
blank=True,
null=True,
VAR_122=_('The submitter will receive an VAR_73 for all VAR_78 '
'follow-ups left for this task.'),
)
VAR_51 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='assigned_to',
blank=True,
null=True,
VAR_136=_('Assigned to'),
)
VAR_52 = models.IntegerField(
_('Status'),
VAR_158=VAR_46,
default=VAR_41,
)
VAR_53 = models.BooleanField(
_('On Hold'),
blank=True,
default=False,
VAR_122=_('If a VAR_65 is on hold, it will not automatically be escalated.'),
)
VAR_54 = models.TextField(
_('Description'),
blank=True,
null=True,
VAR_122=_('The content of the customers VAR_72.'),
)
VAR_55 = models.TextField(
_('Resolution'),
blank=True,
null=True,
VAR_122=_('The VAR_55 provided to the customer by our staff.'),
)
VAR_56 = models.IntegerField(
_('Priority'),
VAR_158=VAR_47,
default=3,
blank=3,
VAR_122=_('1 = Highest Priority, 5 = Low Priority'),
)
VAR_57 = models.DateTimeField(
_('Due on'),
blank=True,
null=True,
)
VAR_58 = models.DateTimeField(
blank=True,
null=True,
editable=False,
VAR_122=_('The VAR_76 this VAR_65 was last escalated - updated '
'automatically by management/commands/escalate_tickets.py.'),
)
VAR_59 = models.CharField(
_("Secret key needed for viewing/editing VAR_65 by non-logged in users"),
VAR_125=36,
default=FUNC_2,
)
VAR_60 = models.ForeignKey(
"KBItem",
blank=True,
null=True,
on_delete=models.CASCADE,
VAR_136=_('Knowledge base item the VAR_74 was viewing when they VAR_6 this VAR_65.'),
)
VAR_61 = models.ForeignKey(
'self',
VAR_136=_('merged to'),
related_name='merged_tickets',
on_delete=models.CASCADE,
null=True,
blank=True
)
@property
def VAR_0(self):
VAR_138 = datetime.timedelta(0)
for val in self.followup_set.all():
if val.time_spent:
VAR_138 = VAR_138 + val.time_spent
return VAR_138
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
def FUNC_18(self, VAR_62, VAR_63=None, **VAR_7):
VAR_140 = set()
if VAR_63 is not None:
VAR_140.update(VAR_63)
VAR_140.add(self.queue.email_address)
def FUNC_48(VAR_73):
return VAR_73 and VAR_73 not in VAR_140
def FUNC_18(VAR_141, VAR_142):
if VAR_142 and VAR_142 not in VAR_140 and VAR_141 in VAR_62:
VAR_165, VAR_166 = VAR_62[VAR_141]
send_templated_mail(VAR_165, VAR_166, VAR_142, VAR_5=self.queue.from_address, **VAR_7)
VAR_140.add(VAR_142)
FUNC_18('submitter', self.submitter_email)
FUNC_18('ticket_cc', self.queue.updated_ticket_cc)
FUNC_18('new_ticket_cc', self.queue.new_ticket_cc)
if self.assigned_to:
FUNC_18('assigned_to', self.assigned_to.email)
if self.queue.enable_notifications_on_email_events:
for cc in self.ticketcc_set.all():
FUNC_18('ticket_cc', cc.email_address)
return VAR_140
def FUNC_19(self):
if not self.assigned_to:
return _('Unassigned')
else:
if self.assigned_to.get_full_name():
return self.assigned_to.get_full_name()
else:
return self.assigned_to.get_username()
VAR_64 = property(FUNC_19)
def FUNC_20(self):
return u"[%s]" % self.ticket_for_url
VAR_65 = property(FUNC_20)
def FUNC_21(self):
return u"%s-%s" % (self.queue.slug, self.id)
VAR_66 = property(FUNC_21)
def FUNC_22(self):
if self.priority == 2:
return "warning"
elif self.priority == 1:
return "danger"
elif self.priority == 5:
return "success"
else:
return ""
VAR_67 = property(FUNC_22)
def FUNC_23(self):
VAR_143 = ''
if self.on_hold:
VAR_143 = _(' - On Hold')
VAR_144 = ''
if not self.can_be_resolved:
VAR_144 = _(' - Open dependencies')
return u'%s%s%s' % (self.get_status_display(), VAR_143, VAR_144)
VAR_68 = property(FUNC_23)
def FUNC_24(self):
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
VAR_162 = Site.objects.get_current()
except ImproperlyConfigured:
VAR_162 = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
VAR_163 = 'https'
else:
VAR_163 = 'http'
return u"%s://%s%s?VAR_65=%s&VAR_73=%s&key=%s" % (
VAR_163,
VAR_162.domain,
reverse('helpdesk:public_view'),
self.ticket_for_url,
self.submitter_email,
self.secret_key
)
VAR_69 = property(FUNC_24)
def FUNC_25(self):
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
VAR_162 = Site.objects.get_current()
except ImproperlyConfigured:
VAR_162 = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
VAR_163 = 'https'
else:
VAR_163 = 'http'
return u"%s://%s%s" % (
VAR_163,
VAR_162.domain,
reverse('helpdesk:view',
VAR_40=[self.id])
)
VAR_70 = property(FUNC_25)
def FUNC_26(self):
VAR_145 = (CLASS_2.OPEN_STATUS, CLASS_2.REOPENED_STATUS)
return CLASS_21.objects.filter(VAR_65=self).filter(
depends_on__status__in=VAR_145).count() == 0
VAR_71 = property(FUNC_26)
def FUNC_27(self):
VAR_146 = get_user_model()
try:
return VAR_146.objects.get(VAR_73=self.submitter_email)
except (VAR_146.DoesNotExist, VAR_146.MultipleObjectsReturned):
return None
class CLASS_22:
VAR_147 = "created"
VAR_129 = ('id',)
VAR_136 = _('Ticket')
VAR_137 = _('Tickets')
def __str__(self):
return '%s %s' % (self.id, self.title)
def FUNC_28(self):
from django.urls import reverse
return reverse('helpdesk:view', VAR_40=(self.id,))
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.id:
self.created = timezone.now()
if not self.priority:
self.priority = 3
self.modified = timezone.now()
if len(self.title) > 200:
self.title = self.title[:197] + "..."
super(CLASS_2, self).save(*VAR_40, **VAR_7)
@staticmethod
def FUNC_29(VAR_72):
VAR_148 = VAR_72.split('-')
VAR_48 = '-'.join(VAR_148[0:-1])
return VAR_48, VAR_148[-1]
def FUNC_1(self):
return FUNC_1(self.description)
@property
def FUNC_30(self):
return FUNC_1(self.resolution)
def FUNC_31(self, VAR_73=None, VAR_74=None, VAR_75=None):
if VAR_75:
VAR_73 = VAR_75.display
elif VAR_74:
if VAR_74.email:
VAR_73 = VAR_74.email
else:
return
elif not VAR_73:
raise ValueError('You must provide at least one parameter to get the VAR_73 from')
VAR_149 = [x.display for x in self.ticketcc_set.all()]
if self.submitter_email:
VAR_149.append(self.submitter_email)
if self.assigned_to and self.assigned_to.email:
VAR_149.append(self.assigned_to.email)
if VAR_73 not in VAR_149:
if VAR_75:
VAR_75.ticket = self
VAR_75.save(update_fields=['ticket'])
elif VAR_74:
VAR_75 = self.ticketcc_set.create(VAR_74=user)
else:
VAR_75 = self.ticketcc_set.create(VAR_73=email)
return VAR_75
class CLASS_3(models.Manager):
def FUNC_32(self):
return self.filter(VAR_78=False)
def FUNC_33(self):
return self.filter(VAR_78=True)
class CLASS_4(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_76 = models.DateTimeField(
_('Date'),
default=timezone.now
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=200,
blank=True,
null=True,
)
VAR_77 = models.TextField(
_('Comment'),
blank=True,
null=True,
)
VAR_78 = models.BooleanField(
_('Public'),
blank=True,
default=False,
VAR_122=_(
'Public tickets are viewable by the submitter and all '
'staff, but non-VAR_78 tickets can only be seen by staff.'
),
)
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
VAR_136=_('User'),
)
VAR_79 = models.IntegerField(
_('New Status'),
VAR_158=CLASS_2.STATUS_CHOICES,
blank=True,
null=True,
VAR_122=_('If the VAR_52 was changed, what was it changed to?'),
)
VAR_80 = models.CharField(
_('E-Mail ID'),
VAR_125=256,
blank=True,
null=True,
VAR_122=_("The Message ID of the submitter's VAR_73."),
editable=False,
)
VAR_81 = CLASS_3()
VAR_0 = models.DurationField(
VAR_122=_("Time spent on this follow up"),
blank=True, null=True
)
class CLASS_22:
VAR_129 = ('date',)
VAR_136 = _('Follow-up')
VAR_137 = _('Follow-ups')
def __str__(self):
return '%s' % self.title
def FUNC_28(self):
return u"%s#VAR_82%s" % (self.ticket.get_absolute_url(), self.id)
def FUNC_16(self, *VAR_40, **VAR_7):
VAR_150 = self.ticket
VAR_150.modified = timezone.now()
VAR_150.save()
super(CLASS_4, self).save(*VAR_40, **VAR_7)
def FUNC_1(self):
return FUNC_1(self.comment)
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
class CLASS_5(models.Model):
VAR_82 = models.ForeignKey(
CLASS_4,
on_delete=models.CASCADE,
VAR_136=_('Follow-up'),
)
VAR_83 = models.CharField(
_('Field'),
VAR_125=100,
)
VAR_84 = models.TextField(
_('Old Value'),
blank=True,
null=True,
)
VAR_85 = models.TextField(
_('New Value'),
blank=True,
null=True,
)
def __str__(self):
VAR_151 = '%s ' % self.field
if not self.new_value:
VAR_151 += ugettext('removed')
elif not self.old_value:
VAR_151 += ugettext('set to %s') % self.new_value
else:
VAR_151 += ugettext('changed from "%(VAR_84)s" to "%(VAR_85)s"') % {
'old_value': self.old_value,
'new_value': self.new_value
}
return VAR_151
class CLASS_22:
VAR_136 = _('Ticket change')
VAR_137 = _('Ticket changes')
def FUNC_3(VAR_2, VAR_3):
return VAR_2.attachment_path(VAR_3)
class CLASS_6(models.Model):
VAR_86 = models.FileField(
_('File'),
upload_to=FUNC_3,
VAR_125=1000,
validators=[validate_file_extension]
)
VAR_3 = models.CharField(
_('Filename'),
blank=True,
VAR_125=1000,
)
VAR_87 = models.CharField(
_('MIME Type'),
blank=True,
VAR_125=255,
)
VAR_88 = models.IntegerField(
_('Size'),
blank=True,
VAR_122=_('Size of this VAR_86 in bytes'),
)
def __str__(self):
return '%s' % self.filename
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.size:
self.size = self.get_size()
if not self.filename:
self.filename = self.get_filename()
if not self.mime_type:
self.mime_type = \
mimetypes.guess_type(self.filename, strict=False)[0] or \
'application/octet-stream'
return super(CLASS_6, self).save(*VAR_40, **VAR_7)
def FUNC_34(self):
return str(self.file)
def FUNC_35(self):
return self.file.file.size
def FUNC_3(self, VAR_3):
assert NotImplementedError(
"This method is to be implemented by CLASS_6 classes"
)
class CLASS_22:
VAR_129 = ('filename',)
VAR_136 = _('Attachment')
VAR_137 = _('Attachments')
VAR_152 = True
class CLASS_7(CLASS_6):
VAR_82 = models.ForeignKey(
CLASS_4,
on_delete=models.CASCADE,
VAR_136=_('Follow-up'),
)
def FUNC_3(self, VAR_3):
os.umask(0)
VAR_153 = 'helpdesk/attachments/{VAR_66}-{VAR_59}/{id_}'.format(
VAR_66=self.followup.ticket.ticket_for_url,
VAR_59=self.followup.ticket.secret_key,
id_=self.followup.id)
VAR_154 = os.path.join(settings.MEDIA_ROOT, VAR_153)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(VAR_154):
os.makedirs(VAR_154, 0o777)
return os.path.join(VAR_153, VAR_3)
class CLASS_8(CLASS_6):
VAR_60 = models.ForeignKey(
"KBItem",
on_delete=models.CASCADE,
VAR_136=_('Knowledge base item'),
)
def FUNC_3(self, VAR_3):
os.umask(0)
VAR_153 = 'helpdesk/attachments/kb/{VAR_99}/{kbi}'.format(
VAR_99=self.kbitem.category,
kbi=self.kbitem.id)
VAR_154 = os.path.join(settings.MEDIA_ROOT, VAR_153)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(VAR_154):
os.makedirs(VAR_154, 0o777)
return os.path.join(VAR_153, VAR_3)
class CLASS_9(models.Model):
class CLASS_22:
VAR_129 = ('name',)
VAR_136 = _('Pre-set reply')
VAR_137 = _('Pre-set replies')
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank to allow this reply to be used for all '
'queues, or select those VAR_89 you wish to limit this reply to.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
VAR_122=_('Only used to assist users with selecting a reply - not '
'shown to the VAR_74.'),
)
VAR_91 = models.TextField(
_('Body'),
VAR_122=_('Context available: {{ VAR_65 }} - VAR_65 object (eg '
'{{ VAR_65.title }}); {{ VAR_48 }} - The VAR_48; and {{ VAR_74 }} '
'- the current VAR_74.'),
)
def __str__(self):
return '%s' % self.name
class CLASS_10(models.Model):
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank for this exclusion to be applied to all VAR_89, '
'or select those VAR_89 you wish to exclude with this entry.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
)
VAR_76 = models.DateField(
_('Date'),
VAR_122=_('Date on which escalation should not happen'),
)
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_136 = _('Escalation exclusion')
VAR_137 = _('Escalation exclusions')
class CLASS_11(models.Model):
VAR_92 = models.CharField(
_('Template Name'),
VAR_125=100,
)
VAR_93 = models.CharField(
_('Subject'),
VAR_125=100,
VAR_122=_('This will be prefixed with "[VAR_65.ticket] VAR_65.title"'
'. We recommend something simple such as "(Updated") or "(Closed)"'
' - the same VAR_166 is available as in VAR_95, below.'),
)
VAR_94 = models.CharField(
_('Heading'),
VAR_125=100,
VAR_122=_('In HTML e-mails, this will be the VAR_94 at the top of '
'the VAR_73 - the same VAR_166 is available as in VAR_95, '
'below.'),
)
VAR_95 = models.TextField(
_('Plain Text'),
VAR_122=_('The VAR_166 available to you includes {{ VAR_65 }}, '
'{{ VAR_48 }}, and depending on the time of the call: '
'{{ VAR_55 }} or {{ VAR_77 }}.'),
)
VAR_96 = models.TextField(
_('HTML'),
VAR_122=_('The same VAR_166 is available here as in VAR_95, above.'),
)
VAR_14 = models.CharField(
_('Locale'),
VAR_125=10,
blank=True,
null=True,
VAR_122=_('Locale of this VAR_165.'),
)
def __str__(self):
return '%s' % self.template_name
class CLASS_22:
VAR_129 = ('template_name', 'locale')
VAR_136 = _('e-mail template')
VAR_137 = _('e-mail templates')
class CLASS_12(models.Model):
VAR_90 = models.CharField(
_('Name of the category'),
VAR_125=100,
)
VAR_11 = models.CharField(
_('Title on knowledgebase page'),
VAR_125=100,
)
VAR_12 = models.SlugField(
_('Slug'),
)
VAR_54 = models.TextField(
_('Description'),
)
VAR_48 = models.ForeignKey(
CLASS_1,
blank=True,
null=True,
on_delete=models.CASCADE,
VAR_136=_('Default VAR_48 when creating a VAR_65 after viewing this VAR_99.'),
)
VAR_78 = models.BooleanField(
default=True,
VAR_136=_("Is CLASS_12 publicly visible?")
)
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_129 = ('title',)
VAR_136 = _('Knowledge base category')
VAR_137 = _('Knowledge base categories')
def FUNC_28(self):
from django.urls import reverse
return reverse('helpdesk:kb_category', VAR_7={'slug': self.slug})
class CLASS_13(models.Model):
VAR_97 = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='votes',
)
VAR_98 = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='downvotes',
)
VAR_99 = models.ForeignKey(
CLASS_12,
on_delete=models.CASCADE,
VAR_136=_('Category'),
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=100,
)
VAR_100 = models.TextField(
_('Question'),
)
VAR_101 = models.TextField(
_('Answer'),
)
VAR_102 = models.IntegerField(
_('Votes'),
VAR_122=_('Total number of VAR_102 cast for this item'),
default=0,
)
VAR_103 = models.IntegerField(
_('Positive Votes'),
VAR_122=_('Number of VAR_102 for this item which were POSITIVE.'),
default=0,
)
VAR_104 = models.DateTimeField(
_('Last Updated'),
VAR_122=_('The VAR_76 on which this VAR_100 was most recently changed.'),
blank=True,
)
VAR_105 = models.ForeignKey(
helpdesk_settings.HELPDESK_TEAMS_MODEL,
on_delete=models.CASCADE,
VAR_136=_('Team'),
blank=True,
null=True,
)
VAR_106 = models.PositiveIntegerField(
_('Order'),
blank=True,
null=True,
)
VAR_107 = models.BooleanField(
_('Enabled to VAR_120 to users'),
default=True,
)
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.last_updated:
self.last_updated = timezone.now()
return super(CLASS_13, self).save(*VAR_40, **VAR_7)
def FUNC_36(self):
return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)
def FUNC_37(self):
if self.votes > 0:
return (self.recommendations / self.votes) * 10
else:
return _('Unrated')
VAR_108 = property(FUNC_37)
def __str__(self):
return '%s: %s' % (self.category.title, self.title)
class CLASS_22:
VAR_129 = ('order', 'title',)
VAR_136 = _('Knowledge base item')
VAR_137 = _('Knowledge base items')
def FUNC_28(self):
from django.urls import reverse
return str(reverse('helpdesk:kb_category', VAR_40=(self.category.slug,))) + "?VAR_60=" + str(self.pk)
def FUNC_38(self):
from django.urls import reverse
return str(reverse('helpdesk:list')) + "?VAR_60=" + str(self.pk)
def FUNC_39(self):
return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2)).count()
def FUNC_40(self):
return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2), assigned_to__isnull=True)
def FUNC_1(self):
return FUNC_1(self.answer)
class CLASS_14(models.Model):
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
VAR_136=_('User'),
)
VAR_11 = models.CharField(
_('Query Name'),
VAR_125=100,
VAR_122=_('User-provided VAR_90 for this query'),
)
VAR_109 = models.BooleanField(
_('Shared With Other Users?'),
blank=True,
default=False,
VAR_122=_('Should other users see this VAR_72?'),
)
VAR_72 = models.TextField(
_('Search Query'),
VAR_122=_('Pickled VAR_72 object. Be wary changing this.'),
)
def __str__(self):
if self.shared:
return '%s (*)' % self.title
else:
return '%s' % self.title
class CLASS_22:
VAR_136 = _('Saved search')
VAR_137 = _('Saved searches')
def FUNC_4(VAR_4):
from helpdesk.settings import DEFAULT_USER_SETTINGS
return DEFAULT_USER_SETTINGS[VAR_4]
def FUNC_5():
return FUNC_4('login_view_ticketlist')
def FUNC_6():
return FUNC_4('email_on_ticket_change')
def FUNC_7():
return FUNC_4('email_on_ticket_assign')
def FUNC_8():
return FUNC_4('tickets_per_page')
def FUNC_9():
return FUNC_4('use_email_as_submitter')
class CLASS_15(models.Model):
VAR_110 = ((10, '10'), (25, '25'), (50, '50'), (100, '100'))
VAR_74 = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="usersettings_helpdesk")
VAR_111 = models.TextField(
_('DEPRECATED! Settings Dictionary DEPRECATED!'),
VAR_122=_('DEPRECATED! This is a base64-encoded representation of a pickled Python dictionary. '
'Do not change this VAR_83 via the admin.'),
blank=True,
null=True,
)
VAR_112 = models.BooleanField(
VAR_136=_('Show CLASS_2 List on Login?'),
VAR_122=_('Display the VAR_65 list upon login? Otherwise, the dashboard is shown.'),
default=FUNC_5,
)
VAR_113 = models.BooleanField(
VAR_136=_('E-mail me on VAR_65 change?'),
VAR_122=_(
'If you\'re the VAR_65 owner and the VAR_65 is changed via the web by somebody else,'
'do you want to receive an e-mail?'
),
default=FUNC_6,
)
VAR_114 = models.BooleanField(
VAR_136=_('E-mail me when assigned a VAR_65?'),
VAR_122=_('If you are assigned a VAR_65 via the web, do you want to receive an e-mail?'),
default=FUNC_7,
)
VAR_115 = models.IntegerField(
VAR_136=_('Number of tickets to show per page'),
VAR_122=_('How many tickets do you want to see on the CLASS_2 List page?'),
default=FUNC_8,
VAR_158=VAR_110,
)
VAR_116 = models.BooleanField(
VAR_136=_('Use my e-mail address when submitting tickets?'),
VAR_122=_('When you submit a VAR_65, do you want to automatically '
'use your e-mail address as the submitter address? You '
'can type a different e-mail address when entering the '
'ticket if needed, this option only changes the default.'),
default=FUNC_9,
)
def __str__(self):
return 'Preferences for %s' % self.user
class CLASS_22:
VAR_136 = _('User Setting')
VAR_137 = _('User Settings')
def FUNC_10(VAR_5, VAR_2, VAR_6, **VAR_7):
if VAR_6:
CLASS_15.objects.create(VAR_74=VAR_2)
models.signals.post_save.connect(FUNC_10, VAR_5=settings.AUTH_USER_MODEL)
class CLASS_16(models.Model):
class CLASS_22:
VAR_136 = _('Ignored e-mail address')
VAR_137 = _('Ignored e-mail addresses')
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank for this e-mail to be ignored on all VAR_89, '
'or select those VAR_89 you wish to ignore this e-mail for.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
)
VAR_76 = models.DateField(
_('Date'),
VAR_122=_('Date on which this e-mail address was added'),
blank=True,
editable=False
)
VAR_13 = models.CharField(
_('E-Mail Address'),
VAR_125=150,
VAR_122=_('Enter a full e-mail address, or portions with '
'wildcards, eg *@domain.com or postmaster@*.'),
)
VAR_117 = models.BooleanField(
_('Save Emails in Mailbox?'),
blank=True,
default=False,
VAR_122=_('Do you want to FUNC_16 emails from this address in the mailbox? '
'If this is unticked, emails from this address will be deleted.'),
)
def __str__(self):
return '%s' % self.name
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.date:
self.date = timezone.now()
return super(CLASS_16, self).save(*VAR_40, **VAR_7)
def FUNC_41(self):
VAR_89 = self.queues.all().order_by('title')
if len(VAR_89) == 0:
return '*'
else:
return ', '.join([str(q) for q in VAR_89])
def FUNC_42(self, VAR_73):
VAR_155 = self.email_address.split("@")
VAR_156 = VAR_73.split("@")
if self.email_address == VAR_73 or \
VAR_155[0] == "*" and VAR_155[1] == VAR_156[1] or \
VAR_155[1] == "*" and VAR_155[0] == VAR_156[0] or \
VAR_155[0] == "*" and VAR_155[1] == "*":
return True
else:
return False
class CLASS_17(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
VAR_122=_('User who wishes to receive updates for this VAR_65.'),
VAR_136=_('User'),
)
VAR_73 = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
VAR_122=_('For non-VAR_74 followers, enter their e-mail address'),
)
VAR_118 = models.BooleanField(
_('Can View CLASS_2?'),
blank=True,
default=False,
VAR_122=_('Can this CC login to view the VAR_65 details?'),
)
VAR_119 = models.BooleanField(
_('Can Update CLASS_2?'),
blank=True,
default=False,
VAR_122=_('Can this CC login and update the VAR_65?'),
)
def FUNC_43(self):
if self.user and self.user.email is not None:
return self.user.email
else:
return self.email
VAR_13 = property(FUNC_43)
def FUNC_44(self):
if self.user:
return self.user
else:
return self.email
VAR_120 = property(FUNC_44)
def __str__(self):
return '%s for %s' % (self.display, self.ticket.title)
def FUNC_45(self):
if self.user and not self.user.email:
raise ValidationError('User has no VAR_73 address')
class CLASS_18(models.Manager):
def FUNC_46(self):
return super(CLASS_18, self).get_queryset().order_by('ordering')
class CLASS_19(models.Model):
VAR_90 = models.SlugField(
_('Field Name'),
VAR_122=_('As used in the database and behind the scenes. '
'Must be unique and consist of only lowercase letters with no punctuation.'),
unique=True,
)
VAR_121 = models.CharField(
_('Label'),
VAR_125=30,
VAR_122=_('The VAR_120 VAR_121 for this field'),
)
VAR_122 = models.TextField(
_('Help Text'),
VAR_122=_('Shown to the VAR_74 when editing the ticket'),
blank=True,
null=True
)
VAR_123 = (
('varchar', _('Character (single line)')),
('text', _('Text (multi-line)')),
('integer', _('Integer')),
('decimal', _('Decimal')),
('list', _('List')),
('boolean', _('Boolean (checkbox yes/no)')),
('date', _('Date')),
('time', _('Time')),
('datetime', _('Date & Time')),
('email', _('E-Mail Address')),
('url', _('URL')),
('ipaddress', _('IP Address')),
('slug', _('Slug')),
)
VAR_124 = models.CharField(
_('Data Type'),
VAR_125=100,
VAR_122=_('Allows you to restrict the data entered into this field'),
VAR_158=VAR_123,
)
VAR_125 = models.IntegerField(
_('Maximum Length (characters)'),
blank=True,
null=True,
)
VAR_126 = models.IntegerField(
_('Decimal Places'),
VAR_122=_('Only used for decimal fields'),
blank=True,
null=True,
)
VAR_127 = models.BooleanField(
_('Add empty first choice to List?'),
default=False,
VAR_122=_('Only for List: adds an empty first entry to the VAR_158 list, '
'which enforces that the VAR_74 makes an active choice.'),
)
VAR_128 = models.TextField(
_('List Values'),
VAR_122=_('For list fields only. Enter one option per line.'),
blank=True,
null=True,
)
VAR_129 = models.IntegerField(
_('Ordering'),
VAR_122=_('Lower numbers are displayed first; higher numbers are listed later'),
blank=True,
null=True,
)
def FUNC_47(self):
VAR_157 = StringIO(self.list_values)
VAR_158 = [[item.strip(), item.strip()] for item in VAR_157.readlines()]
VAR_157.close()
return VAR_158
VAR_130 = property(FUNC_47)
VAR_131 = models.BooleanField(
_('Required?'),
VAR_122=_('Does the VAR_74 have to enter a VAR_133 for this VAR_83?'),
default=False,
)
VAR_132 = models.BooleanField(
_('Staff Only?'),
VAR_122=_('If this is ticked, then the VAR_78 submission form '
'will NOT show this field'),
default=False,
)
VAR_81 = CLASS_18()
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_136 = _('Custom field')
VAR_137 = _('Custom fields')
class CLASS_20(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_83 = models.ForeignKey(
CLASS_19,
on_delete=models.CASCADE,
VAR_136=_('Field'),
)
VAR_133 = models.TextField(blank=True, null=True)
def __str__(self):
return '%s / %s' % (self.ticket, self.field)
class CLASS_22:
VAR_159 = (('ticket', 'field'),)
VAR_136 = _('Ticket custom VAR_83 value')
VAR_137 = _('Ticket custom VAR_83 values')
class CLASS_21(models.Model):
class CLASS_22:
VAR_159 = (('ticket', 'depends_on'),)
VAR_136 = _('Ticket dependency')
VAR_137 = _('Ticket dependencies')
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
related_name='ticketdependency',
)
VAR_134 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Depends On Ticket'),
related_name='depends_on',
)
def __str__(self):
return '%s / %s' % (self.ticket, self.depends_on)
|
from django.contrib.auth.models import Permission
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.conf import .settings
from django.utils import .timezone
from django.utils.translation import ugettext_lazy as _, ugettext
from io import StringIO
import re
import os
import mimetypes
import .datetime
from django.utils.safestring import mark_safe
from markdown import markdown
from markdown.extensions import Extension
import uuid
from helpdesk import .settings as helpdesk_settings
from .validators import validate_file_extension
from .templated_email import .send_templated_mail
def FUNC_0(VAR_0):
if VAR_0:
VAR_0 = "{0:02d}h:{1:02d}m".format(
VAR_0.seconds // 3600,
VAR_0.seconds % 3600 // 60
)
else:
VAR_0 = ""
return VAR_0
class CLASS_0(Extension):
def FUNC_11(self, VAR_8, VAR_9):
del VAR_8.preprocessors['html_block']
del VAR_8.inlinePatterns['html']
def FUNC_1(VAR_1):
if not VAR_1:
return ""
VAR_10 = fr'([\[\s\S\]]*?)\(([\s\S]*?):([\s\S]*?)\)'
if re.match(VAR_10, VAR_1):
VAR_135 = re.search(VAR_10, VAR_1, re.IGNORECASE).group(2)
if VAR_135 in helpdesk_settings.ALLOWED_URL_SCHEMES:
VAR_160 = '\\1(\\2:\\3)'
else:
VAR_160 = '\\1(\\3)'
VAR_1 = re.sub(VAR_10, VAR_160, VAR_1, flags=re.IGNORECASE)
return mark_safe(
markdown(
VAR_1,
extensions=[
CLASS_0(), 'markdown.extensions.nl2br',
'markdown.extensions.fenced_code'
]
)
)
class CLASS_1(models.Model):
VAR_11 = models.CharField(
_('Title'),
VAR_125=100,
)
VAR_12 = models.SlugField(
_('Slug'),
VAR_125=50,
unique=True,
VAR_122=_('This VAR_12 is used when building VAR_65 ID\'s. Once set, '
'try not to change it or e-mailing may get messy.'),
)
VAR_13 = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
VAR_122=_('All outgoing e-mails for this VAR_48 will use this e-mail '
'address. If you use IMAP or POP3, this should be the e-mail '
'address for that mailbox.'),
)
VAR_14 = models.CharField(
_('Locale'),
VAR_125=10,
blank=True,
null=True,
VAR_122=_('Locale of this VAR_48. All correspondence in this '
'queue will be in this language.'),
)
VAR_15 = models.BooleanField(
_('Allow Public Submission?'),
blank=True,
default=False,
VAR_122=_('Should this VAR_48 be listed on the VAR_78 submission form?'),
)
VAR_16 = models.BooleanField(
_('Allow E-Mail Submission?'),
blank=True,
default=False,
VAR_122=_('Do you want to poll the e-mail box below for new '
'tickets?'),
)
VAR_17 = models.IntegerField(
_('Escalation Days'),
blank=True,
null=True,
VAR_122=_('For tickets which are not held, how often do you wish to '
'increase their VAR_56? Set to 0 for no escalation.'),
)
VAR_18 = models.CharField(
_('New CLASS_2 CC Address'),
blank=True,
null=True,
VAR_125=200,
VAR_122=_('If an e-mail address is entered here, then it will '
'receive notification of all new tickets VAR_6 for this VAR_48. '
'Enter a comma between multiple e-mail addresses.'),
)
VAR_19 = models.CharField(
_('Updated CLASS_2 CC Address'),
blank=True,
null=True,
VAR_125=200,
VAR_122=_('If an e-mail address is entered here, then it will '
'receive notification of all activity (new tickets, closed '
'tickets, updates, reassignments, etc) for this VAR_48. Separate '
'multiple addresses with a comma.'),
)
VAR_20 = models.BooleanField(
_('Notify contacts when VAR_73 updates arrive'),
blank=True,
default=False,
VAR_122=_('When an VAR_73 arrives to either create a VAR_65 or to '
'interact with an existing discussion. Should VAR_73 notifications be sent ? '
'Note: the VAR_18 and VAR_19 work independently of this feature'),
)
VAR_21 = models.CharField(
_('E-Mail Box Type'),
VAR_125=5,
VAR_158=(('pop3', _('POP 3')), ('imap', _('IMAP')), ('local', _('Local Directory'))),
blank=True,
null=True,
VAR_122=_('E-Mail server type for creating tickets automatically '
'from a mailbox - both POP3 and IMAP are supported, as well as '
'reading from a local directory.'),
)
VAR_22 = models.CharField(
_('E-Mail Hostname'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Your e-mail server address - either the domain VAR_90 or '
'IP address. May be "localhost".'),
)
VAR_23 = models.IntegerField(
_('E-Mail Port'),
blank=True,
null=True,
VAR_122=_('Port number to use for accessing e-mail. Default for '
'POP3 is "110", and for IMAP is "143". This may differ on some '
'servers. Leave it blank to use the defaults.'),
)
VAR_24 = models.BooleanField(
_('Use SSL for E-Mail?'),
blank=True,
default=False,
VAR_122=_('Whether to use SSL for IMAP or POP3 - the default ports '
'when using SSL are 993 for IMAP and 995 for POP3.'),
)
VAR_25 = models.CharField(
_('E-Mail Username'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Username for accessing this mailbox.'),
)
VAR_26 = models.CharField(
_('E-Mail Password'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('Password for the above username'),
)
VAR_27 = models.CharField(
_('IMAP Folder'),
VAR_125=100,
blank=True,
null=True,
VAR_122=_('If using IMAP, what folder do you wish to fetch messages '
'from? This allows you to use one IMAP account for multiple '
'queues, by filtering messages on your IMAP server into separate '
'folders. Default: INBOX.'),
)
VAR_28 = models.CharField(
_('E-Mail Local Directory'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('If using a local directory, what directory VAR_153 do you '
'wish to poll for new VAR_73? '
'Example: /var/lib/mail/helpdesk/'),
)
VAR_29 = models.CharField(
_('Django auth permission name'),
VAR_125=72, # based on FUNC_15() pre-pending chars to VAR_12
blank=True,
null=True,
editable=False,
VAR_122=_('Name used in the django.contrib.auth permission system'),
)
VAR_30 = models.IntegerField(
_('E-Mail Check Interval'),
VAR_122=_('How often do you wish to check this mailbox? (in Minutes)'),
blank=True,
null=True,
default='5',
)
VAR_31 = models.DateTimeField(
blank=True,
null=True,
editable=False,
)
VAR_32 = models.CharField(
_('Socks Proxy Type'),
VAR_125=8,
VAR_158=(('socks4', _('SOCKS4')), ('socks5', _('SOCKS5'))),
blank=True,
null=True,
VAR_122=_('SOCKS4 or SOCKS5 allows you to proxy your connections through a SOCKS server.'),
)
VAR_33 = models.GenericIPAddressField(
_('Socks Proxy Host'),
blank=True,
null=True,
VAR_122=_('Socks proxy IP address. Default: 127.0.0.1'),
)
VAR_34 = models.IntegerField(
_('Socks Proxy Port'),
blank=True,
null=True,
VAR_122=_('Socks proxy port number. Default: 9150 (default TOR port)'),
)
VAR_35 = models.CharField(
_('Logging Type'),
VAR_125=5,
VAR_158=(
('none', _('None')),
('debug', _('Debug')),
('info', _('Information')),
('warn', _('Warning')),
('error', _('Error')),
('crit', _('Critical'))
),
blank=True,
null=True,
VAR_122=_('Set the default logging level. All messages at that '
'level or above will be logged to the directory set '
'below. If no level is set, logging will be disabled.'),
)
VAR_36 = models.CharField(
_('Logging Directory'),
VAR_125=200,
blank=True,
null=True,
VAR_122=_('If logging is VAR_107, what directory should we use to '
'store log files for this VAR_48? '
'The standard logging mechanims are used if no directory is set'),
)
VAR_37 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
related_name='default_owner',
blank=True,
null=True,
VAR_136=_('Default owner'),
)
VAR_38 = models.DurationField(
VAR_122=_("Time to be spent on this CLASS_1 in total"),
blank=True, null=True
)
def __str__(self):
return "%s" % self.title
class CLASS_22:
VAR_129 = ('title',)
VAR_136 = _('Queue')
VAR_137 = _('Queues')
def FUNC_12(self):
if not self.email_address:
VAR_161 = re.match(".*<(?P<VAR_73>.*@*.)>", settings.DEFAULT_FROM_EMAIL)
if VAR_161 is not None:
return u'NO QUEUE EMAIL ADDRESS DEFINED %s' % settings.DEFAULT_FROM_EMAIL
else:
return u'NO QUEUE EMAIL ADDRESS DEFINED <%s>' % settings.DEFAULT_FROM_EMAIL
else:
return u'%s <%s>' % (self.title, self.email_address)
VAR_39 = property(FUNC_12)
@property
def VAR_0(self):
VAR_138 = datetime.timedelta(0)
for val in self.ticket_set.all():
if val.time_spent:
VAR_138 = VAR_138 + val.time_spent
return VAR_138
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
def FUNC_15(self):
VAR_139 = "queue_access_%s" % self.slug
self.permission_name = "helpdesk.%s" % VAR_139
return VAR_139
def FUNC_16(self, *VAR_40, **VAR_7):
if self.email_box_type == 'imap' and not self.email_box_imap_folder:
self.email_box_imap_folder = 'INBOX'
if self.socks_proxy_type:
if not self.socks_proxy_host:
self.socks_proxy_host = '127.0.0.1'
if not self.socks_proxy_port:
self.socks_proxy_port = 9150
else:
self.socks_proxy_host = None
self.socks_proxy_port = None
if not self.email_box_port:
if self.email_box_type == 'imap' and self.email_box_ssl:
self.email_box_port = 993
elif self.email_box_type == 'imap' and not self.email_box_ssl:
self.email_box_port = 143
elif self.email_box_type == 'pop3' and self.email_box_ssl:
self.email_box_port = 995
elif self.email_box_type == 'pop3' and not self.email_box_ssl:
self.email_box_port = 110
if not self.id:
VAR_139 = self.prepare_permission_name()
Permission.objects.create(
VAR_90=_("Permission for VAR_48: ") + self.title,
content_type=ContentType.objects.get_for_model(self.__class__),
codename=VAR_139,
)
super(CLASS_1, self).save(*VAR_40, **VAR_7)
def FUNC_17(self, *VAR_40, **VAR_7):
VAR_29 = self.permission_name
super(CLASS_1, self).delete(*VAR_40, **VAR_7)
if VAR_29:
try:
VAR_164 = Permission.objects.get(codename=VAR_29[9:])
VAR_164.delete()
except ObjectDoesNotExist:
pass
def FUNC_2():
return str(uuid.uuid4())
class CLASS_2(models.Model):
VAR_41 = 1
VAR_42 = 2
VAR_43 = 3
VAR_44 = 4
VAR_45 = 5
VAR_46 = (
(VAR_41, _('Open')),
(VAR_42, _('Reopened')),
(VAR_43, _('Resolved')),
(VAR_44, _('Closed')),
(VAR_45, _('Duplicate')),
)
VAR_47 = (
(1, _('1. Critical')),
(2, _('2. High')),
(3, _('3. Normal')),
(4, _('4. Low')),
(5, _('5. Very Low')),
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=200,
)
VAR_48 = models.ForeignKey(
CLASS_1,
on_delete=models.CASCADE,
VAR_136=_('Queue'),
)
VAR_6 = models.DateTimeField(
_('Created'),
blank=True,
VAR_122=_('Date this VAR_65 was first created'),
)
VAR_49 = models.DateTimeField(
_('Modified'),
blank=True,
VAR_122=_('Date this VAR_65 was most recently changed.'),
)
VAR_50 = models.EmailField(
_('Submitter E-Mail'),
blank=True,
null=True,
VAR_122=_('The submitter will receive an VAR_73 for all VAR_78 '
'follow-ups left for this task.'),
)
VAR_51 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='assigned_to',
blank=True,
null=True,
VAR_136=_('Assigned to'),
)
VAR_52 = models.IntegerField(
_('Status'),
VAR_158=VAR_46,
default=VAR_41,
)
VAR_53 = models.BooleanField(
_('On Hold'),
blank=True,
default=False,
VAR_122=_('If a VAR_65 is on hold, it will not automatically be escalated.'),
)
VAR_54 = models.TextField(
_('Description'),
blank=True,
null=True,
VAR_122=_('The content of the customers VAR_72.'),
)
VAR_55 = models.TextField(
_('Resolution'),
blank=True,
null=True,
VAR_122=_('The VAR_55 provided to the customer by our staff.'),
)
VAR_56 = models.IntegerField(
_('Priority'),
VAR_158=VAR_47,
default=3,
blank=3,
VAR_122=_('1 = Highest Priority, 5 = Low Priority'),
)
VAR_57 = models.DateTimeField(
_('Due on'),
blank=True,
null=True,
)
VAR_58 = models.DateTimeField(
blank=True,
null=True,
editable=False,
VAR_122=_('The VAR_76 this VAR_65 was last escalated - updated '
'automatically by management/commands/escalate_tickets.py.'),
)
VAR_59 = models.CharField(
_("Secret key needed for viewing/editing VAR_65 by non-logged in users"),
VAR_125=36,
default=FUNC_2,
)
VAR_60 = models.ForeignKey(
"KBItem",
blank=True,
null=True,
on_delete=models.CASCADE,
VAR_136=_('Knowledge base item the VAR_74 was viewing when they VAR_6 this VAR_65.'),
)
VAR_61 = models.ForeignKey(
'self',
VAR_136=_('merged to'),
related_name='merged_tickets',
on_delete=models.CASCADE,
null=True,
blank=True
)
@property
def VAR_0(self):
VAR_138 = datetime.timedelta(0)
for val in self.followup_set.all():
if val.time_spent:
VAR_138 = VAR_138 + val.time_spent
return VAR_138
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
def FUNC_18(self, VAR_62, VAR_63=None, **VAR_7):
VAR_140 = set()
if VAR_63 is not None:
VAR_140.update(VAR_63)
VAR_140.add(self.queue.email_address)
def FUNC_48(VAR_73):
return VAR_73 and VAR_73 not in VAR_140
def FUNC_18(VAR_141, VAR_142):
if VAR_142 and VAR_142 not in VAR_140 and VAR_141 in VAR_62:
VAR_165, VAR_166 = VAR_62[VAR_141]
send_templated_mail(VAR_165, VAR_166, VAR_142, VAR_5=self.queue.from_address, **VAR_7)
VAR_140.add(VAR_142)
FUNC_18('submitter', self.submitter_email)
FUNC_18('ticket_cc', self.queue.updated_ticket_cc)
FUNC_18('new_ticket_cc', self.queue.new_ticket_cc)
if self.assigned_to:
FUNC_18('assigned_to', self.assigned_to.email)
if self.queue.enable_notifications_on_email_events:
for cc in self.ticketcc_set.all():
FUNC_18('ticket_cc', cc.email_address)
return VAR_140
def FUNC_19(self):
if not self.assigned_to:
return _('Unassigned')
else:
if self.assigned_to.get_full_name():
return self.assigned_to.get_full_name()
else:
return self.assigned_to.get_username()
VAR_64 = property(FUNC_19)
def FUNC_20(self):
return u"[%s]" % self.ticket_for_url
VAR_65 = property(FUNC_20)
def FUNC_21(self):
return u"%s-%s" % (self.queue.slug, self.id)
VAR_66 = property(FUNC_21)
def FUNC_22(self):
if self.priority == 2:
return "warning"
elif self.priority == 1:
return "danger"
elif self.priority == 5:
return "success"
else:
return ""
VAR_67 = property(FUNC_22)
def FUNC_23(self):
VAR_143 = ''
if self.on_hold:
VAR_143 = _(' - On Hold')
VAR_144 = ''
if not self.can_be_resolved:
VAR_144 = _(' - Open dependencies')
return u'%s%s%s' % (self.get_status_display(), VAR_143, VAR_144)
VAR_68 = property(FUNC_23)
def FUNC_24(self):
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
VAR_162 = Site.objects.get_current()
except ImproperlyConfigured:
VAR_162 = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
VAR_163 = 'https'
else:
VAR_163 = 'http'
return u"%s://%s%s?VAR_65=%s&VAR_73=%s&key=%s" % (
VAR_163,
VAR_162.domain,
reverse('helpdesk:public_view'),
self.ticket_for_url,
self.submitter_email,
self.secret_key
)
VAR_69 = property(FUNC_24)
def FUNC_25(self):
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
try:
VAR_162 = Site.objects.get_current()
except ImproperlyConfigured:
VAR_162 = Site(domain='configure-django-sites.com')
if helpdesk_settings.HELPDESK_USE_HTTPS_IN_EMAIL_LINK:
VAR_163 = 'https'
else:
VAR_163 = 'http'
return u"%s://%s%s" % (
VAR_163,
VAR_162.domain,
reverse('helpdesk:view',
VAR_40=[self.id])
)
VAR_70 = property(FUNC_25)
def FUNC_26(self):
VAR_145 = (CLASS_2.OPEN_STATUS, CLASS_2.REOPENED_STATUS)
return CLASS_21.objects.filter(VAR_65=self).filter(
depends_on__status__in=VAR_145).count() == 0
VAR_71 = property(FUNC_26)
def FUNC_27(self):
VAR_146 = get_user_model()
try:
return VAR_146.objects.get(VAR_73=self.submitter_email)
except (VAR_146.DoesNotExist, VAR_146.MultipleObjectsReturned):
return None
class CLASS_22:
VAR_147 = "created"
VAR_129 = ('id',)
VAR_136 = _('Ticket')
VAR_137 = _('Tickets')
def __str__(self):
return '%s %s' % (self.id, self.title)
def FUNC_28(self):
from django.urls import reverse
return reverse('helpdesk:view', VAR_40=(self.id,))
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.id:
self.created = timezone.now()
if not self.priority:
self.priority = 3
self.modified = timezone.now()
if len(self.title) > 200:
self.title = self.title[:197] + "..."
super(CLASS_2, self).save(*VAR_40, **VAR_7)
@staticmethod
def FUNC_29(VAR_72):
VAR_148 = VAR_72.split('-')
VAR_48 = '-'.join(VAR_148[0:-1])
return VAR_48, VAR_148[-1]
def FUNC_1(self):
return FUNC_1(self.description)
@property
def FUNC_30(self):
return FUNC_1(self.resolution)
def FUNC_31(self, VAR_73=None, VAR_74=None, VAR_75=None):
if VAR_75:
VAR_73 = VAR_75.display
elif VAR_74:
if VAR_74.email:
VAR_73 = VAR_74.email
else:
return
elif not VAR_73:
raise ValueError('You must provide at least one parameter to get the VAR_73 from')
VAR_149 = [x.display for x in self.ticketcc_set.all()]
if self.submitter_email:
VAR_149.append(self.submitter_email)
if self.assigned_to and self.assigned_to.email:
VAR_149.append(self.assigned_to.email)
if VAR_73 not in VAR_149:
if VAR_75:
VAR_75.ticket = self
VAR_75.save(update_fields=['ticket'])
elif VAR_74:
VAR_75 = self.ticketcc_set.create(VAR_74=user)
else:
VAR_75 = self.ticketcc_set.create(VAR_73=email)
return VAR_75
class CLASS_3(models.Manager):
def FUNC_32(self):
return self.filter(VAR_78=False)
def FUNC_33(self):
return self.filter(VAR_78=True)
class CLASS_4(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_76 = models.DateTimeField(
_('Date'),
default=timezone.now
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=200,
blank=True,
null=True,
)
VAR_77 = models.TextField(
_('Comment'),
blank=True,
null=True,
)
VAR_78 = models.BooleanField(
_('Public'),
blank=True,
default=False,
VAR_122=_(
'Public tickets are viewable by the submitter and all '
'staff, but non-VAR_78 tickets can only be seen by staff.'
),
)
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
VAR_136=_('User'),
)
VAR_79 = models.IntegerField(
_('New Status'),
VAR_158=CLASS_2.STATUS_CHOICES,
blank=True,
null=True,
VAR_122=_('If the VAR_52 was changed, what was it changed to?'),
)
VAR_80 = models.CharField(
_('E-Mail ID'),
VAR_125=256,
blank=True,
null=True,
VAR_122=_("The Message ID of the submitter's VAR_73."),
editable=False,
)
VAR_81 = CLASS_3()
VAR_0 = models.DurationField(
VAR_122=_("Time spent on this follow up"),
blank=True, null=True
)
class CLASS_22:
VAR_129 = ('date',)
VAR_136 = _('Follow-up')
VAR_137 = _('Follow-ups')
def __str__(self):
return '%s' % self.title
def FUNC_28(self):
return u"%s#VAR_82%s" % (self.ticket.get_absolute_url(), self.id)
def FUNC_16(self, *VAR_40, **VAR_7):
VAR_150 = self.ticket
VAR_150.modified = timezone.now()
VAR_150.save()
super(CLASS_4, self).save(*VAR_40, **VAR_7)
def FUNC_1(self):
return FUNC_1(self.comment)
@property
def FUNC_14(self):
return FUNC_0(self.time_spent)
class CLASS_5(models.Model):
VAR_82 = models.ForeignKey(
CLASS_4,
on_delete=models.CASCADE,
VAR_136=_('Follow-up'),
)
VAR_83 = models.CharField(
_('Field'),
VAR_125=100,
)
VAR_84 = models.TextField(
_('Old Value'),
blank=True,
null=True,
)
VAR_85 = models.TextField(
_('New Value'),
blank=True,
null=True,
)
def __str__(self):
VAR_151 = '%s ' % self.field
if not self.new_value:
VAR_151 += ugettext('removed')
elif not self.old_value:
VAR_151 += ugettext('set to %s') % self.new_value
else:
VAR_151 += ugettext('changed from "%(VAR_84)s" to "%(VAR_85)s"') % {
'old_value': self.old_value,
'new_value': self.new_value
}
return VAR_151
class CLASS_22:
VAR_136 = _('Ticket change')
VAR_137 = _('Ticket changes')
def FUNC_3(VAR_2, VAR_3):
return VAR_2.attachment_path(VAR_3)
class CLASS_6(models.Model):
VAR_86 = models.FileField(
_('File'),
upload_to=FUNC_3,
VAR_125=1000,
validators=[validate_file_extension]
)
VAR_3 = models.CharField(
_('Filename'),
blank=True,
VAR_125=1000,
)
VAR_87 = models.CharField(
_('MIME Type'),
blank=True,
VAR_125=255,
)
VAR_88 = models.IntegerField(
_('Size'),
blank=True,
VAR_122=_('Size of this VAR_86 in bytes'),
)
def __str__(self):
return '%s' % self.filename
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.size:
self.size = self.get_size()
if not self.filename:
self.filename = self.get_filename()
if not self.mime_type:
self.mime_type = \
mimetypes.guess_type(self.filename, strict=False)[0] or \
'application/octet-stream'
return super(CLASS_6, self).save(*VAR_40, **VAR_7)
def FUNC_34(self):
return str(self.file)
def FUNC_35(self):
return self.file.file.size
def FUNC_3(self, VAR_3):
assert NotImplementedError(
"This method is to be implemented by CLASS_6 classes"
)
class CLASS_22:
VAR_129 = ('filename',)
VAR_136 = _('Attachment')
VAR_137 = _('Attachments')
VAR_152 = True
class CLASS_7(CLASS_6):
VAR_82 = models.ForeignKey(
CLASS_4,
on_delete=models.CASCADE,
VAR_136=_('Follow-up'),
)
def FUNC_3(self, VAR_3):
os.umask(0)
VAR_153 = 'helpdesk/attachments/{VAR_66}-{VAR_59}/{id_}'.format(
VAR_66=self.followup.ticket.ticket_for_url,
VAR_59=self.followup.ticket.secret_key,
id_=self.followup.id)
VAR_154 = os.path.join(settings.MEDIA_ROOT, VAR_153)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(VAR_154):
os.makedirs(VAR_154, 0o777)
return os.path.join(VAR_153, VAR_3)
class CLASS_8(CLASS_6):
VAR_60 = models.ForeignKey(
"KBItem",
on_delete=models.CASCADE,
VAR_136=_('Knowledge base item'),
)
def FUNC_3(self, VAR_3):
os.umask(0)
VAR_153 = 'helpdesk/attachments/kb/{VAR_99}/{kbi}'.format(
VAR_99=self.kbitem.category,
kbi=self.kbitem.id)
VAR_154 = os.path.join(settings.MEDIA_ROOT, VAR_153)
if settings.DEFAULT_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
if not os.path.exists(VAR_154):
os.makedirs(VAR_154, 0o777)
return os.path.join(VAR_153, VAR_3)
class CLASS_9(models.Model):
class CLASS_22:
VAR_129 = ('name',)
VAR_136 = _('Pre-set reply')
VAR_137 = _('Pre-set replies')
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank to allow this reply to be used for all '
'queues, or select those VAR_89 you wish to limit this reply to.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
VAR_122=_('Only used to assist users with selecting a reply - not '
'shown to the VAR_74.'),
)
VAR_91 = models.TextField(
_('Body'),
VAR_122=_('Context available: {{ VAR_65 }} - VAR_65 object (eg '
'{{ VAR_65.title }}); {{ VAR_48 }} - The VAR_48; and {{ VAR_74 }} '
'- the current VAR_74.'),
)
def __str__(self):
return '%s' % self.name
class CLASS_10(models.Model):
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank for this exclusion to be applied to all VAR_89, '
'or select those VAR_89 you wish to exclude with this entry.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
)
VAR_76 = models.DateField(
_('Date'),
VAR_122=_('Date on which escalation should not happen'),
)
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_136 = _('Escalation exclusion')
VAR_137 = _('Escalation exclusions')
class CLASS_11(models.Model):
VAR_92 = models.CharField(
_('Template Name'),
VAR_125=100,
)
VAR_93 = models.CharField(
_('Subject'),
VAR_125=100,
VAR_122=_('This will be prefixed with "[VAR_65.ticket] VAR_65.title"'
'. We recommend something simple such as "(Updated") or "(Closed)"'
' - the same VAR_166 is available as in VAR_95, below.'),
)
VAR_94 = models.CharField(
_('Heading'),
VAR_125=100,
VAR_122=_('In HTML e-mails, this will be the VAR_94 at the top of '
'the VAR_73 - the same VAR_166 is available as in VAR_95, '
'below.'),
)
VAR_95 = models.TextField(
_('Plain Text'),
VAR_122=_('The VAR_166 available to you includes {{ VAR_65 }}, '
'{{ VAR_48 }}, and depending on the time of the call: '
'{{ VAR_55 }} or {{ VAR_77 }}.'),
)
VAR_96 = models.TextField(
_('HTML'),
VAR_122=_('The same VAR_166 is available here as in VAR_95, above.'),
)
VAR_14 = models.CharField(
_('Locale'),
VAR_125=10,
blank=True,
null=True,
VAR_122=_('Locale of this VAR_165.'),
)
def __str__(self):
return '%s' % self.template_name
class CLASS_22:
VAR_129 = ('template_name', 'locale')
VAR_136 = _('e-mail template')
VAR_137 = _('e-mail templates')
class CLASS_12(models.Model):
VAR_90 = models.CharField(
_('Name of the category'),
VAR_125=100,
)
VAR_11 = models.CharField(
_('Title on knowledgebase page'),
VAR_125=100,
)
VAR_12 = models.SlugField(
_('Slug'),
)
VAR_54 = models.TextField(
_('Description'),
)
VAR_48 = models.ForeignKey(
CLASS_1,
blank=True,
null=True,
on_delete=models.CASCADE,
VAR_136=_('Default VAR_48 when creating a VAR_65 after viewing this VAR_99.'),
)
VAR_78 = models.BooleanField(
default=True,
VAR_136=_("Is CLASS_12 publicly visible?")
)
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_129 = ('title',)
VAR_136 = _('Knowledge base category')
VAR_137 = _('Knowledge base categories')
def FUNC_28(self):
from django.urls import reverse
return reverse('helpdesk:kb_category', VAR_7={'slug': self.slug})
class CLASS_13(models.Model):
VAR_97 = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='votes',
)
VAR_98 = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='downvotes',
)
VAR_99 = models.ForeignKey(
CLASS_12,
on_delete=models.CASCADE,
VAR_136=_('Category'),
)
VAR_11 = models.CharField(
_('Title'),
VAR_125=100,
)
VAR_100 = models.TextField(
_('Question'),
)
VAR_101 = models.TextField(
_('Answer'),
)
VAR_102 = models.IntegerField(
_('Votes'),
VAR_122=_('Total number of VAR_102 cast for this item'),
default=0,
)
VAR_103 = models.IntegerField(
_('Positive Votes'),
VAR_122=_('Number of VAR_102 for this item which were POSITIVE.'),
default=0,
)
VAR_104 = models.DateTimeField(
_('Last Updated'),
VAR_122=_('The VAR_76 on which this VAR_100 was most recently changed.'),
blank=True,
)
VAR_105 = models.ForeignKey(
helpdesk_settings.HELPDESK_TEAMS_MODEL,
on_delete=models.CASCADE,
VAR_136=_('Team'),
blank=True,
null=True,
)
VAR_106 = models.PositiveIntegerField(
_('Order'),
blank=True,
null=True,
)
VAR_107 = models.BooleanField(
_('Enabled to VAR_120 to users'),
default=True,
)
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.last_updated:
self.last_updated = timezone.now()
return super(CLASS_13, self).save(*VAR_40, **VAR_7)
def FUNC_36(self):
return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)
def FUNC_37(self):
if self.votes > 0:
return (self.recommendations / self.votes) * 10
else:
return _('Unrated')
VAR_108 = property(FUNC_37)
def __str__(self):
return '%s: %s' % (self.category.title, self.title)
class CLASS_22:
VAR_129 = ('order', 'title',)
VAR_136 = _('Knowledge base item')
VAR_137 = _('Knowledge base items')
def FUNC_28(self):
from django.urls import reverse
return str(reverse('helpdesk:kb_category', VAR_40=(self.category.slug,))) + "?VAR_60=" + str(self.pk)
def FUNC_38(self):
from django.urls import reverse
return str(reverse('helpdesk:list')) + "?VAR_60=" + str(self.pk)
def FUNC_39(self):
return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2)).count()
def FUNC_40(self):
return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2), assigned_to__isnull=True)
def FUNC_1(self):
return FUNC_1(self.answer)
class CLASS_14(models.Model):
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
VAR_136=_('User'),
)
VAR_11 = models.CharField(
_('Query Name'),
VAR_125=100,
VAR_122=_('User-provided VAR_90 for this query'),
)
VAR_109 = models.BooleanField(
_('Shared With Other Users?'),
blank=True,
default=False,
VAR_122=_('Should other users see this VAR_72?'),
)
VAR_72 = models.TextField(
_('Search Query'),
VAR_122=_('Pickled VAR_72 object. Be wary changing this.'),
)
def __str__(self):
if self.shared:
return '%s (*)' % self.title
else:
return '%s' % self.title
class CLASS_22:
VAR_136 = _('Saved search')
VAR_137 = _('Saved searches')
def FUNC_4(VAR_4):
from helpdesk.settings import DEFAULT_USER_SETTINGS
return DEFAULT_USER_SETTINGS[VAR_4]
def FUNC_5():
return FUNC_4('login_view_ticketlist')
def FUNC_6():
return FUNC_4('email_on_ticket_change')
def FUNC_7():
return FUNC_4('email_on_ticket_assign')
def FUNC_8():
return FUNC_4('tickets_per_page')
def FUNC_9():
return FUNC_4('use_email_as_submitter')
class CLASS_15(models.Model):
VAR_110 = ((10, '10'), (25, '25'), (50, '50'), (100, '100'))
VAR_74 = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="usersettings_helpdesk")
VAR_111 = models.TextField(
_('DEPRECATED! Settings Dictionary DEPRECATED!'),
VAR_122=_('DEPRECATED! This is a base64-encoded representation of a pickled Python dictionary. '
'Do not change this VAR_83 via the admin.'),
blank=True,
null=True,
)
VAR_112 = models.BooleanField(
VAR_136=_('Show CLASS_2 List on Login?'),
VAR_122=_('Display the VAR_65 list upon login? Otherwise, the dashboard is shown.'),
default=FUNC_5,
)
VAR_113 = models.BooleanField(
VAR_136=_('E-mail me on VAR_65 change?'),
VAR_122=_(
'If you\'re the VAR_65 owner and the VAR_65 is changed via the web by somebody else,'
'do you want to receive an e-mail?'
),
default=FUNC_6,
)
VAR_114 = models.BooleanField(
VAR_136=_('E-mail me when assigned a VAR_65?'),
VAR_122=_('If you are assigned a VAR_65 via the web, do you want to receive an e-mail?'),
default=FUNC_7,
)
VAR_115 = models.IntegerField(
VAR_136=_('Number of tickets to show per page'),
VAR_122=_('How many tickets do you want to see on the CLASS_2 List page?'),
default=FUNC_8,
VAR_158=VAR_110,
)
VAR_116 = models.BooleanField(
VAR_136=_('Use my e-mail address when submitting tickets?'),
VAR_122=_('When you submit a VAR_65, do you want to automatically '
'use your e-mail address as the submitter address? You '
'can type a different e-mail address when entering the '
'ticket if needed, this option only changes the default.'),
default=FUNC_9,
)
def __str__(self):
return 'Preferences for %s' % self.user
class CLASS_22:
VAR_136 = _('User Setting')
VAR_137 = _('User Settings')
def FUNC_10(VAR_5, VAR_2, VAR_6, **VAR_7):
if VAR_6:
CLASS_15.objects.create(VAR_74=VAR_2)
models.signals.post_save.connect(FUNC_10, VAR_5=settings.AUTH_USER_MODEL)
class CLASS_16(models.Model):
class CLASS_22:
VAR_136 = _('Ignored e-mail address')
VAR_137 = _('Ignored e-mail addresses')
VAR_89 = models.ManyToManyField(
CLASS_1,
blank=True,
VAR_122=_('Leave blank for this e-mail to be ignored on all VAR_89, '
'or select those VAR_89 you wish to ignore this e-mail for.'),
)
VAR_90 = models.CharField(
_('Name'),
VAR_125=100,
)
VAR_76 = models.DateField(
_('Date'),
VAR_122=_('Date on which this e-mail address was added'),
blank=True,
editable=False
)
VAR_13 = models.CharField(
_('E-Mail Address'),
VAR_125=150,
VAR_122=_('Enter a full e-mail address, or portions with '
'wildcards, eg *@domain.com or postmaster@*.'),
)
VAR_117 = models.BooleanField(
_('Save Emails in Mailbox?'),
blank=True,
default=False,
VAR_122=_('Do you want to FUNC_16 emails from this address in the mailbox? '
'If this is unticked, emails from this address will be deleted.'),
)
def __str__(self):
return '%s' % self.name
def FUNC_16(self, *VAR_40, **VAR_7):
if not self.date:
self.date = timezone.now()
return super(CLASS_16, self).save(*VAR_40, **VAR_7)
def FUNC_41(self):
VAR_89 = self.queues.all().order_by('title')
if len(VAR_89) == 0:
return '*'
else:
return ', '.join([str(q) for q in VAR_89])
def FUNC_42(self, VAR_73):
VAR_155 = self.email_address.split("@")
VAR_156 = VAR_73.split("@")
if self.email_address == VAR_73 or \
VAR_155[0] == "*" and VAR_155[1] == VAR_156[1] or \
VAR_155[1] == "*" and VAR_155[0] == VAR_156[0] or \
VAR_155[0] == "*" and VAR_155[1] == "*":
return True
else:
return False
class CLASS_17(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_74 = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
blank=True,
null=True,
VAR_122=_('User who wishes to receive updates for this VAR_65.'),
VAR_136=_('User'),
)
VAR_73 = models.EmailField(
_('E-Mail Address'),
blank=True,
null=True,
VAR_122=_('For non-VAR_74 followers, enter their e-mail address'),
)
VAR_118 = models.BooleanField(
_('Can View CLASS_2?'),
blank=True,
default=False,
VAR_122=_('Can this CC login to view the VAR_65 details?'),
)
VAR_119 = models.BooleanField(
_('Can Update CLASS_2?'),
blank=True,
default=False,
VAR_122=_('Can this CC login and update the VAR_65?'),
)
def FUNC_43(self):
if self.user and self.user.email is not None:
return self.user.email
else:
return self.email
VAR_13 = property(FUNC_43)
def FUNC_44(self):
if self.user:
return self.user
else:
return self.email
VAR_120 = property(FUNC_44)
def __str__(self):
return '%s for %s' % (self.display, self.ticket.title)
def FUNC_45(self):
if self.user and not self.user.email:
raise ValidationError('User has no VAR_73 address')
class CLASS_18(models.Manager):
def FUNC_46(self):
return super(CLASS_18, self).get_queryset().order_by('ordering')
class CLASS_19(models.Model):
VAR_90 = models.SlugField(
_('Field Name'),
VAR_122=_('As used in the database and behind the scenes. '
'Must be unique and consist of only lowercase letters with no punctuation.'),
unique=True,
)
VAR_121 = models.CharField(
_('Label'),
VAR_125=30,
VAR_122=_('The VAR_120 VAR_121 for this field'),
)
VAR_122 = models.TextField(
_('Help Text'),
VAR_122=_('Shown to the VAR_74 when editing the ticket'),
blank=True,
null=True
)
VAR_123 = (
('varchar', _('Character (single line)')),
('text', _('Text (multi-line)')),
('integer', _('Integer')),
('decimal', _('Decimal')),
('list', _('List')),
('boolean', _('Boolean (checkbox yes/no)')),
('date', _('Date')),
('time', _('Time')),
('datetime', _('Date & Time')),
('email', _('E-Mail Address')),
('url', _('URL')),
('ipaddress', _('IP Address')),
('slug', _('Slug')),
)
VAR_124 = models.CharField(
_('Data Type'),
VAR_125=100,
VAR_122=_('Allows you to restrict the data entered into this field'),
VAR_158=VAR_123,
)
VAR_125 = models.IntegerField(
_('Maximum Length (characters)'),
blank=True,
null=True,
)
VAR_126 = models.IntegerField(
_('Decimal Places'),
VAR_122=_('Only used for decimal fields'),
blank=True,
null=True,
)
VAR_127 = models.BooleanField(
_('Add empty first choice to List?'),
default=False,
VAR_122=_('Only for List: adds an empty first entry to the VAR_158 list, '
'which enforces that the VAR_74 makes an active choice.'),
)
VAR_128 = models.TextField(
_('List Values'),
VAR_122=_('For list fields only. Enter one option per line.'),
blank=True,
null=True,
)
VAR_129 = models.IntegerField(
_('Ordering'),
VAR_122=_('Lower numbers are displayed first; higher numbers are listed later'),
blank=True,
null=True,
)
def FUNC_47(self):
VAR_157 = StringIO(self.list_values)
VAR_158 = [[item.strip(), item.strip()] for item in VAR_157.readlines()]
VAR_157.close()
return VAR_158
VAR_130 = property(FUNC_47)
VAR_131 = models.BooleanField(
_('Required?'),
VAR_122=_('Does the VAR_74 have to enter a VAR_133 for this VAR_83?'),
default=False,
)
VAR_132 = models.BooleanField(
_('Staff Only?'),
VAR_122=_('If this is ticked, then the VAR_78 submission form '
'will NOT show this field'),
default=False,
)
VAR_81 = CLASS_18()
def __str__(self):
return '%s' % self.name
class CLASS_22:
VAR_136 = _('Custom field')
VAR_137 = _('Custom fields')
class CLASS_20(models.Model):
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
)
VAR_83 = models.ForeignKey(
CLASS_19,
on_delete=models.CASCADE,
VAR_136=_('Field'),
)
VAR_133 = models.TextField(blank=True, null=True)
def __str__(self):
return '%s / %s' % (self.ticket, self.field)
class CLASS_22:
VAR_159 = (('ticket', 'field'),)
VAR_136 = _('Ticket custom VAR_83 value')
VAR_137 = _('Ticket custom VAR_83 values')
class CLASS_21(models.Model):
class CLASS_22:
VAR_159 = (('ticket', 'depends_on'),)
VAR_136 = _('Ticket dependency')
VAR_137 = _('Ticket dependencies')
VAR_65 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Ticket'),
related_name='ticketdependency',
)
VAR_134 = models.ForeignKey(
CLASS_2,
on_delete=models.CASCADE,
VAR_136=_('Depends On Ticket'),
related_name='depends_on',
)
def __str__(self):
return '%s / %s' % (self.ticket, self.depends_on)
| [
3,
5,
9,
23,
27,
28,
30,
32,
34,
36,
37,
47,
48,
53,
54,
58,
60,
62,
64,
69,
71,
81,
82,
87,
90,
92,
97,
105,
114,
123,
130,
138,
146,
156,
167,
176,
187,
196,
205,
213,
221,
229,
240,
250,
259,
267,
272,
274,
283,
290,
297,
315,
325,
334,
339,
342,
347,
355,
358,
365,
376,
380,
385,
389,
393,
402,
412,
414,
415,
417,
423,
425,
429,
430,
437,
438,
441,
442,
449,
455,
459,
465,
473,
481,
486,
492,
498,
504,
512,
521,
527,
534,
541,
548,
556,
562,
570,
576,
584,
593,
604,
608,
612,
615,
617,
622,
624,
629,
631,
633,
636,
639,
641,
644,
650,
660,
673,
677,
680,
685,
699,
712,
738,
762,
773,
780,
786,
789,
793,
796,
798,
801,
803,
806,
808,
811,
812,
816,
819,
823,
829,
841,
845,
846,
852,
853,
863,
864,
866,
869,
872,
873,
879,
882,
886,
892,
897,
904,
910,
920,
928,
936,
945,
947,
952,
957,
960,
963,
969,
972,
976,
977,
983,
989,
994,
1000,
1006,
1019,
1023,
1024,
1028,
1029,
1035,
1042,
1048,
1054,
1060,
1063,
1065,
1068,
1071,
1076,
1078,
1081,
1084,
1092,
1098,
1099,
1101,
1107,
1109,
1120,
1121,
1123,
1129,
1131,
1141,
1142,
1150,
1158,
1165,
1172,
1179,
1182,
1183,
1188,
1190,
1194,
1201,
1206,
1211,
1214,
1218,
1219,
1224,
1228,
1233,
1241,
1249,
1256,
1261,
1269,
1272,
1277,
1278,
1284,
1289,
1294,
1298,
1302,
1310,
1315,
1318,
1323,
1327,
1328,
1347,
1352,
1356,
1360,
1366,
1372,
1378,
1386,
1392,
1397,
1402,
1405,
1413,
1416,
1421,
1425,
1429,
1432,
1435,
1438,
1439,
1456,
1462,
1469,
1474,
1480,
1484,
1485,
1489,
1490,
1493,
1494,
1497,
1498,
1501,
1502,
1505,
1506,
1509,
1510,
1518,
1523,
1531,
1537,
1546,
1552,
1559,
1568,
1571,
1575,
1576,
1582,
1588,
1589,
1591,
1592,
1602,
1609,
1614,
1621,
1628,
1636,
1639,
1644,
1654,
1663,
1666,
1669,
1677,
1678,
1684,
1688,
1694,
1703,
1710,
1717,
1724,
1731,
1738,
1741,
1745,
1746,
1748,
1751,
1752,
1757,
1764,
1770,
1777,
1793,
1800,
1806,
1813,
1820,
1827,
1834,
1841,
1847,
1854,
1856,
1859,
1863,
1864,
1871,
1877,
1879,
1882,
1887,
1888,
1899,
1906,
1913,
1916,
1,
2,
3,
4,
5,
6,
7,
8,
84,
85,
86,
87,
88,
89,
90,
91,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
875,
876,
877,
878,
879,
880,
881,
882,
883,
884,
885,
979,
980,
981,
982,
1026,
1031,
1032,
1033,
1034,
1144,
1145,
1146,
1147,
1148,
1149,
1150,
1151,
1152,
1153,
1185,
1186,
1187,
1188,
1189,
1190,
1191,
1192,
1193,
1221,
1222,
1223,
1224,
1225,
1226,
1227,
1280,
1281,
1282,
1283,
1330,
1331,
1332,
1333,
1441,
1442,
1443,
1444,
1445,
1446,
1447,
1448,
1449,
1450,
1512,
1513,
1514,
1515,
1516,
1578,
1579,
1580,
1581,
1582,
1583,
1584,
1585,
1594,
1595,
1596,
1597,
1598,
1680,
1681,
1682,
1683,
1684,
1685,
1686,
1687,
1754,
1755,
1756,
1890,
1891,
1892,
1893,
1894,
349,
350,
351,
352,
353,
368,
369,
370,
382,
383,
384,
596,
597,
598,
610,
611,
612,
613,
614,
615,
616,
617,
618,
619,
620,
621,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
662,
663,
664,
675,
676,
682,
687,
688,
689,
701,
702,
703,
714,
715,
716,
717,
740,
741,
742,
743,
764,
765,
766,
767,
768,
825,
826,
827,
828,
829,
830,
831,
832,
833,
834,
1086,
1087,
1088,
1407,
1646,
1647,
1648,
1656,
1657,
1658,
1659,
1660,
1661,
1662,
1663,
1664,
1665
] | [
3,
5,
9,
23,
27,
28,
30,
32,
34,
36,
37,
47,
48,
53,
54,
58,
60,
62,
64,
69,
71,
81,
82,
87,
90,
92,
97,
105,
114,
123,
130,
138,
146,
156,
167,
176,
187,
196,
205,
213,
221,
229,
240,
250,
259,
267,
272,
274,
283,
290,
297,
315,
325,
334,
339,
342,
347,
355,
358,
365,
376,
380,
385,
389,
393,
402,
412,
414,
415,
417,
423,
425,
429,
430,
437,
438,
441,
442,
449,
455,
459,
465,
473,
481,
486,
492,
498,
504,
512,
521,
527,
534,
541,
548,
556,
562,
570,
576,
584,
593,
604,
608,
612,
615,
617,
622,
624,
629,
631,
633,
636,
639,
641,
644,
650,
660,
673,
677,
680,
685,
699,
712,
738,
762,
773,
780,
786,
789,
793,
796,
798,
801,
803,
806,
808,
811,
812,
816,
819,
823,
829,
841,
845,
846,
852,
853,
863,
864,
866,
869,
872,
873,
879,
882,
886,
892,
897,
904,
910,
920,
928,
936,
945,
947,
952,
957,
960,
963,
969,
972,
976,
977,
983,
989,
994,
1000,
1006,
1019,
1023,
1024,
1028,
1029,
1035,
1042,
1048,
1054,
1060,
1063,
1065,
1068,
1071,
1076,
1078,
1081,
1084,
1092,
1098,
1099,
1101,
1107,
1109,
1120,
1121,
1123,
1129,
1131,
1141,
1142,
1150,
1158,
1165,
1172,
1179,
1182,
1183,
1188,
1190,
1194,
1201,
1206,
1211,
1214,
1218,
1219,
1224,
1228,
1233,
1241,
1249,
1256,
1261,
1269,
1272,
1277,
1278,
1284,
1289,
1294,
1298,
1302,
1310,
1315,
1318,
1323,
1327,
1328,
1347,
1352,
1356,
1360,
1366,
1372,
1378,
1386,
1392,
1397,
1402,
1405,
1413,
1416,
1421,
1425,
1429,
1432,
1435,
1438,
1439,
1456,
1462,
1469,
1474,
1480,
1484,
1485,
1489,
1490,
1493,
1494,
1497,
1498,
1501,
1502,
1505,
1506,
1509,
1510,
1518,
1523,
1531,
1537,
1546,
1552,
1559,
1568,
1571,
1575,
1576,
1582,
1588,
1589,
1591,
1592,
1602,
1609,
1614,
1621,
1628,
1636,
1639,
1644,
1654,
1663,
1666,
1669,
1677,
1678,
1684,
1688,
1694,
1703,
1710,
1717,
1724,
1731,
1738,
1741,
1745,
1746,
1748,
1751,
1752,
1757,
1764,
1770,
1777,
1793,
1800,
1806,
1813,
1820,
1827,
1834,
1841,
1847,
1854,
1856,
1859,
1863,
1864,
1871,
1877,
1879,
1882,
1887,
1888,
1899,
1906,
1913,
1916,
1,
2,
3,
4,
5,
6,
7,
8,
84,
85,
86,
87,
88,
89,
90,
91,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
875,
876,
877,
878,
879,
880,
881,
882,
883,
884,
885,
979,
980,
981,
982,
1026,
1031,
1032,
1033,
1034,
1144,
1145,
1146,
1147,
1148,
1149,
1150,
1151,
1152,
1153,
1185,
1186,
1187,
1188,
1189,
1190,
1191,
1192,
1193,
1221,
1222,
1223,
1224,
1225,
1226,
1227,
1280,
1281,
1282,
1283,
1330,
1331,
1332,
1333,
1441,
1442,
1443,
1444,
1445,
1446,
1447,
1448,
1449,
1450,
1512,
1513,
1514,
1515,
1516,
1578,
1579,
1580,
1581,
1582,
1583,
1584,
1585,
1594,
1595,
1596,
1597,
1598,
1680,
1681,
1682,
1683,
1684,
1685,
1686,
1687,
1754,
1755,
1756,
1890,
1891,
1892,
1893,
1894,
349,
350,
351,
352,
353,
368,
369,
370,
382,
383,
384,
596,
597,
598,
610,
611,
612,
613,
614,
615,
616,
617,
618,
619,
620,
621,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
662,
663,
664,
675,
676,
682,
687,
688,
689,
701,
702,
703,
714,
715,
716,
717,
740,
741,
742,
743,
764,
765,
766,
767,
768,
825,
826,
827,
828,
829,
830,
831,
832,
833,
834,
1086,
1087,
1088,
1407,
1646,
1647,
1648,
1656,
1657,
1658,
1659,
1660,
1661,
1662,
1663,
1664,
1665
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
"""
Pre-conditions:
Metadata of a data set is stored in a table <dataset> in a MySQL database.
Files (objects) belonging to a dataset are stored under DATAROOT/<dataset>/.
MySQL table stores relative path to the above directory.
Table provides keyword search to get list of objects.
Database login info is obtained from DiamondConfig.
MySQL table is indexed with:
FULLTEXT (title, keywords, description)
Requires:
pip install mysql-connector-python==8.0.6
"""
import datetime
import os
from flask import Blueprint, url_for, Response, \
stream_with_context, abort, jsonify, send_file
import logging
import mysql.connector
from werkzeug.datastructures import Headers
from xml.sax.saxutils import quoteattr
BASEURL = 'yfcc100m_mysql'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local path, otherwise http.
DATAROOT = None
DB_HOST = DB_DBNAME = DB_USER = DB_PASSWORD = DB_PORT = None
_log = logging.getLogger(__name__)
yfcc100m_s3_image_prefix = 'https://multimedia-commons.s3-us-west-2.amazonaws.com/data/images/'
def init(config):
global DATAROOT # pylint: disable=global-statement
DATAROOT = config.dataroot
global DB_HOST, DB_DBNAME, DB_USER, DB_PASSWORD, DB_PORT
DB_HOST = config.yfcc100m_db_host
DB_DBNAME = config.yfcc100m_db_dbname
DB_USER = config.yfcc100m_db_user
DB_PASSWORD = config.yfcc100m_db_password
DB_PORT = config.yfcc100m_db_port
scope_blueprint = Blueprint('mysql_store', __name__)
@scope_blueprint.route('/scope/<dataset>')
@scope_blueprint.route('/scope/<dataset>/keywords/<keywords>')
@scope_blueprint.route('/scope/<dataset>/modulo/<int:divisor>/<expression>')
@scope_blueprint.route(
'/scope/<dataset>/keywords/<keywords>/modulo/<int:divisor>/<expression>')
def get_scope(dataset, keywords=None, divisor=None, expression=None):
"""
:param expression: Can be "<3", "=3", ">3", etc.
:param dataset:
:param keywords: a string of comma-separated keywords
:param divisor: positive int
:return:
"""
# cursor.execute() can't substitute table name
query = "SELECT sequence_no, rel_path, download_link FROM " + dataset
conditions = []
substitutes = []
if keywords:
conditions.append("MATCH (title, keywords, description) AGAINST(%s)")
substitutes.append(keywords)
if divisor:
# TODO sanity check expression
conditions.append("(sequence_no % %s) " + expression)
substitutes.extend([divisor])
if conditions:
query += " WHERE " + ' AND '.join(conditions)
_log.debug("Query used: %s, substitutes: %s", query, substitutes)
def generate():
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
database=DB_DBNAME,
port=DB_PORT)
cursor = cnx.cursor()
cursor.execute(query, substitutes)
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
for seq_no, rel_path, download_link in cursor:
yield '<count adjust="1"/>\n'
yield _get_object_element(dataset, seq_no, rel_path,
download_link) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/<dataset>/<int:seq_no>')
def get_object_id(dataset, seq_no):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(dataset, seq_no, None, None),
"200 OK",
headers=headers)
@scope_blueprint.route('/obj/<dataset>/<path:rel_path>')
def get_object_src_http(dataset, rel_path):
path = _get_obj_abosolute_path(dataset, rel_path)
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
return response
def _get_obj_abosolute_path(dataset, rel_path):
return os.path.join(DATAROOT, dataset, rel_path)
def _get_object_element(dataset, seq_no, rel_path, download_link):
"""If rel_path and download_link are not None, we are called from scope.
Otherwise we are called from ID and need to run SQL query to fetch these attrs."""
if rel_path is None:
query = "SELECT rel_path, download_link FROM " + \
dataset + \
" WHERE sequence_no = %s"
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
database=DB_DBNAME,
port=DB_PORT)
cursor = cnx.cursor()
cursor.execute(query, (seq_no,))
row = cursor.fetchone()
if not row:
return None
rel_path, download_link = row[0], row[1]
if LOCAL_OBJ_URI:
src_uri = 'file://' + os.path.join(DATAROOT, dataset, rel_path)
else:
src_uri = url_for('.get_object_src_http', dataset=dataset, rel_path=rel_path)
return '<object id={} src={} hyperfind.external-link={} />' \
.format(
quoteattr(url_for('.get_object_id', dataset=dataset, seq_no=seq_no)),
quoteattr(src_uri),
quoteattr(download_link))
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
"""
Pre-conditions:
Metadata of a data set is stored in a table <dataset> in a MySQL database.
Files (objects) belonging to a dataset are stored under DATAROOT/<dataset>/.
MySQL table stores relative path to the above directory.
Table provides keyword search to get list of objects.
Database login info is obtained from DiamondConfig.
MySQL table is indexed with:
FULLTEXT (title, keywords, description)
Requires:
pip install mysql-connector-python==8.0.6
"""
import datetime
import os
from flask import Blueprint, url_for, Response, \
stream_with_context, abort, jsonify, send_file
import logging
import mysql.connector
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from xml.sax.saxutils import quoteattr
BASEURL = 'yfcc100m_mysql'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local path, otherwise http.
DATAROOT = None
DB_HOST = DB_DBNAME = DB_USER = DB_PASSWORD = DB_PORT = None
_log = logging.getLogger(__name__)
yfcc100m_s3_image_prefix = 'https://multimedia-commons.s3-us-west-2.amazonaws.com/data/images/'
def init(config):
global DATAROOT # pylint: disable=global-statement
DATAROOT = config.dataroot
global DB_HOST, DB_DBNAME, DB_USER, DB_PASSWORD, DB_PORT
DB_HOST = config.yfcc100m_db_host
DB_DBNAME = config.yfcc100m_db_dbname
DB_USER = config.yfcc100m_db_user
DB_PASSWORD = config.yfcc100m_db_password
DB_PORT = config.yfcc100m_db_port
scope_blueprint = Blueprint('mysql_store', __name__)
@scope_blueprint.route('/scope/<dataset>')
@scope_blueprint.route('/scope/<dataset>/keywords/<keywords>')
@scope_blueprint.route('/scope/<dataset>/modulo/<int:divisor>/<expression>')
@scope_blueprint.route(
'/scope/<dataset>/keywords/<keywords>/modulo/<int:divisor>/<expression>')
def get_scope(dataset, keywords=None, divisor=None, expression=None):
"""
:param expression: Can be "<3", "=3", ">3", etc.
:param dataset:
:param keywords: a string of comma-separated keywords
:param divisor: positive int
:return:
"""
# cursor.execute() can't substitute table name
query = "SELECT sequence_no, rel_path, download_link FROM " + dataset
conditions = []
substitutes = []
if keywords:
conditions.append("MATCH (title, keywords, description) AGAINST(%s)")
substitutes.append(keywords)
if divisor:
# TODO sanity check expression
conditions.append("(sequence_no % %s) " + expression)
substitutes.extend([divisor])
if conditions:
query += " WHERE " + ' AND '.join(conditions)
_log.debug("Query used: %s, substitutes: %s", query, substitutes)
def generate():
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
database=DB_DBNAME,
port=DB_PORT)
cursor = cnx.cursor()
cursor.execute(query, substitutes)
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
for seq_no, rel_path, download_link in cursor:
yield '<count adjust="1"/>\n'
yield _get_object_element(dataset, seq_no, rel_path,
download_link) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/<dataset>/<int:seq_no>')
def get_object_id(dataset, seq_no):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(dataset, seq_no, None, None),
"200 OK",
headers=headers)
@scope_blueprint.route('/obj/<dataset>/<path:rel_path>')
def get_object_src_http(dataset, rel_path):
path = _get_obj_absolute_path(dataset, rel_path)
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
return response
def _get_obj_absolute_path(dataset, rel_path):
return safe_join(DATAROOT, dataset, rel_path)
def _get_object_element(dataset, seq_no, rel_path, download_link):
"""If rel_path and download_link are not None, we are called from scope.
Otherwise we are called from ID and need to run SQL query to fetch these attrs."""
if rel_path is None:
query = "SELECT rel_path, download_link FROM " + \
dataset + \
" WHERE sequence_no = %s"
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
database=DB_DBNAME,
port=DB_PORT)
cursor = cnx.cursor()
cursor.execute(query, (seq_no,))
row = cursor.fetchone()
if not row:
return None
rel_path, download_link = row[0], row[1]
if LOCAL_OBJ_URI:
src_uri = 'file://' + _get_obj_absolute_path(dataset, rel_path)
else:
src_uri = url_for('.get_object_src_http', dataset=dataset, rel_path=rel_path)
return '<object id={} src={} hyperfind.external-link={} />' \
.format(
quoteattr(url_for('.get_object_id', dataset=dataset, seq_no=seq_no)),
quoteattr(src_uri),
quoteattr(download_link))
| path_disclosure | {
"code": [
" path = _get_obj_abosolute_path(dataset, rel_path)",
"def _get_obj_abosolute_path(dataset, rel_path):",
" return os.path.join(DATAROOT, dataset, rel_path)",
" src_uri = 'file://' + os.path.join(DATAROOT, dataset, rel_path)"
],
"line_no": [
127,
136,
137,
165
]
} | {
"code": [
"from werkzeug.security import safe_join",
" path = _get_obj_absolute_path(dataset, rel_path)",
"def _get_obj_absolute_path(dataset, rel_path):",
" src_uri = 'file://' + _get_obj_absolute_path(dataset, rel_path)"
],
"line_no": [
32,
128,
137,
166
]
} |
import datetime
import os
from flask import Blueprint, url_for, Response, \
stream_with_context, abort, jsonify, send_file
import logging
import mysql.connector
from werkzeug.datastructures import Headers
from xml.sax.saxutils import quoteattr
VAR_0 = 'yfcc100m_mysql'
VAR_1 = False
VAR_2 = True # if true, return local VAR_24, otherwise http.
VAR_3 = None
VAR_4 = VAR_5 = VAR_6 = VAR_7 = VAR_8 = None
VAR_9 = logging.getLogger(__name__)
VAR_10 = 'https://multimedia-commons.s3-us-west-2.amazonaws.com/data/images/'
def FUNC_0(VAR_11):
global VAR_3 # pylint: disable=global-statement
VAR_3 = VAR_11.dataroot
global VAR_4, VAR_5, VAR_6, VAR_7, VAR_8
VAR_4 = VAR_11.yfcc100m_db_host
VAR_5 = VAR_11.yfcc100m_db_dbname
VAR_6 = VAR_11.yfcc100m_db_user
VAR_7 = VAR_11.yfcc100m_db_password
VAR_8 = VAR_11.yfcc100m_db_port
VAR_12 = Blueprint('mysql_store', __name__)
@VAR_12.route('/scope/<VAR_13>')
@VAR_12.route('/scope/<VAR_13>/VAR_14/<keywords>')
@VAR_12.route('/scope/<VAR_13>/modulo/<int:VAR_15>/<VAR_16>')
@VAR_12.route(
'/scope/<VAR_13>/VAR_14/<keywords>/modulo/<int:VAR_15>/<VAR_16>')
def FUNC_1(VAR_13, VAR_14=None, VAR_15=None, VAR_16=None):
VAR_20 = "SELECT sequence_no, VAR_18, VAR_19 FROM " + VAR_13
VAR_21 = []
VAR_22 = []
if VAR_14:
VAR_21.append("MATCH (title, VAR_14, description) AGAINST(%s)")
VAR_22.append(VAR_14)
if VAR_15:
VAR_21.append("(sequence_no % %s) " + VAR_16)
VAR_22.extend([VAR_15])
if VAR_21:
VAR_20 += " WHERE " + ' AND '.join(VAR_21)
VAR_9.debug("Query used: %s, VAR_22: %s", VAR_20, VAR_22)
def FUNC_6():
VAR_26 = mysql.connector.connect(user=VAR_6,
password=VAR_7,
host=VAR_4,
database=VAR_5,
port=VAR_8)
VAR_27 = VAR_26.cursor()
VAR_27.execute(VAR_20, VAR_22)
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
for VAR_17, VAR_18, VAR_19 in VAR_27:
yield '<count adjust="1"/>\n'
yield FUNC_5(VAR_13, VAR_17, VAR_18,
VAR_19) + '\n'
yield '</objectlist>\n'
VAR_23 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_6()),
status="200 OK",
VAR_23=headers)
@VAR_12.route('/id/<VAR_13>/<int:VAR_17>')
def FUNC_2(VAR_13, VAR_17):
VAR_23 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_5(VAR_13, VAR_17, None, None),
"200 OK",
VAR_23=headers)
@VAR_12.route('/obj/<VAR_13>/<VAR_24:VAR_18>')
def FUNC_3(VAR_13, VAR_18):
VAR_24 = FUNC_4(VAR_13, VAR_18)
VAR_25 = send_file(VAR_24,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
return VAR_25
def FUNC_4(VAR_13, VAR_18):
return os.path.join(VAR_3, VAR_13, VAR_18)
def FUNC_5(VAR_13, VAR_17, VAR_18, VAR_19):
if VAR_18 is None:
VAR_20 = "SELECT VAR_18, VAR_19 FROM " + \
VAR_13 + \
" WHERE sequence_no = %s"
VAR_26 = mysql.connector.connect(user=VAR_6,
password=VAR_7,
host=VAR_4,
database=VAR_5,
port=VAR_8)
VAR_27 = VAR_26.cursor()
VAR_27.execute(VAR_20, (VAR_17,))
VAR_28 = VAR_27.fetchone()
if not VAR_28:
return None
VAR_18, VAR_19 = VAR_28[0], VAR_28[1]
if VAR_2:
VAR_29 = 'file://' + os.path.join(VAR_3, VAR_13, VAR_18)
else:
VAR_29 = url_for('.get_object_src_http', VAR_13=dataset, VAR_18=rel_path)
return '<object id={} src={} hyperfind.external-link={} />' \
.format(
quoteattr(url_for('.get_object_id', VAR_13=dataset, VAR_17=seq_no)),
quoteattr(VAR_29),
quoteattr(VAR_19))
|
import datetime
import os
from flask import Blueprint, url_for, Response, \
stream_with_context, abort, jsonify, send_file
import logging
import mysql.connector
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from xml.sax.saxutils import quoteattr
VAR_0 = 'yfcc100m_mysql'
VAR_1 = False
VAR_2 = True # if true, return local VAR_24, otherwise http.
VAR_3 = None
VAR_4 = VAR_5 = VAR_6 = VAR_7 = VAR_8 = None
VAR_9 = logging.getLogger(__name__)
VAR_10 = 'https://multimedia-commons.s3-us-west-2.amazonaws.com/data/images/'
def FUNC_0(VAR_11):
global VAR_3 # pylint: disable=global-statement
VAR_3 = VAR_11.dataroot
global VAR_4, VAR_5, VAR_6, VAR_7, VAR_8
VAR_4 = VAR_11.yfcc100m_db_host
VAR_5 = VAR_11.yfcc100m_db_dbname
VAR_6 = VAR_11.yfcc100m_db_user
VAR_7 = VAR_11.yfcc100m_db_password
VAR_8 = VAR_11.yfcc100m_db_port
VAR_12 = Blueprint('mysql_store', __name__)
@VAR_12.route('/scope/<VAR_13>')
@VAR_12.route('/scope/<VAR_13>/VAR_14/<keywords>')
@VAR_12.route('/scope/<VAR_13>/modulo/<int:VAR_15>/<VAR_16>')
@VAR_12.route(
'/scope/<VAR_13>/VAR_14/<keywords>/modulo/<int:VAR_15>/<VAR_16>')
def FUNC_1(VAR_13, VAR_14=None, VAR_15=None, VAR_16=None):
VAR_20 = "SELECT sequence_no, VAR_18, VAR_19 FROM " + VAR_13
VAR_21 = []
VAR_22 = []
if VAR_14:
VAR_21.append("MATCH (title, VAR_14, description) AGAINST(%s)")
VAR_22.append(VAR_14)
if VAR_15:
VAR_21.append("(sequence_no % %s) " + VAR_16)
VAR_22.extend([VAR_15])
if VAR_21:
VAR_20 += " WHERE " + ' AND '.join(VAR_21)
VAR_9.debug("Query used: %s, VAR_22: %s", VAR_20, VAR_22)
def FUNC_6():
VAR_26 = mysql.connector.connect(user=VAR_6,
password=VAR_7,
host=VAR_4,
database=VAR_5,
port=VAR_8)
VAR_27 = VAR_26.cursor()
VAR_27.execute(VAR_20, VAR_22)
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
for VAR_17, VAR_18, VAR_19 in VAR_27:
yield '<count adjust="1"/>\n'
yield FUNC_5(VAR_13, VAR_17, VAR_18,
VAR_19) + '\n'
yield '</objectlist>\n'
VAR_23 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_6()),
status="200 OK",
VAR_23=headers)
@VAR_12.route('/id/<VAR_13>/<int:VAR_17>')
def FUNC_2(VAR_13, VAR_17):
VAR_23 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_5(VAR_13, VAR_17, None, None),
"200 OK",
VAR_23=headers)
@VAR_12.route('/obj/<VAR_13>/<VAR_24:VAR_18>')
def FUNC_3(VAR_13, VAR_18):
VAR_24 = FUNC_4(VAR_13, VAR_18)
VAR_25 = send_file(VAR_24,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
return VAR_25
def FUNC_4(VAR_13, VAR_18):
return safe_join(VAR_3, VAR_13, VAR_18)
def FUNC_5(VAR_13, VAR_17, VAR_18, VAR_19):
if VAR_18 is None:
VAR_20 = "SELECT VAR_18, VAR_19 FROM " + \
VAR_13 + \
" WHERE sequence_no = %s"
VAR_26 = mysql.connector.connect(user=VAR_6,
password=VAR_7,
host=VAR_4,
database=VAR_5,
port=VAR_8)
VAR_27 = VAR_26.cursor()
VAR_27.execute(VAR_20, (VAR_17,))
VAR_28 = VAR_27.fetchone()
if not VAR_28:
return None
VAR_18, VAR_19 = VAR_28[0], VAR_28[1]
if VAR_2:
VAR_29 = 'file://' + FUNC_4(VAR_13, VAR_18)
else:
VAR_29 = url_for('.get_object_src_http', VAR_13=dataset, VAR_18=rel_path)
return '<object id={} src={} hyperfind.external-link={} />' \
.format(
quoteattr(url_for('.get_object_id', VAR_13=dataset, VAR_17=seq_no)),
quoteattr(VAR_29),
quoteattr(VAR_19))
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
21,
33,
39,
41,
43,
44,
54,
55,
57,
58,
66,
73,
80,
82,
85,
88,
90,
99,
103,
109,
111,
113,
117,
124,
134,
135,
138,
139,
143,
148,
156,
158,
161,
163,
168,
174,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
65,
66,
67,
68,
69,
70,
71,
72,
141,
142
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
21,
34,
40,
42,
44,
45,
55,
56,
58,
59,
67,
74,
81,
83,
86,
89,
91,
100,
104,
110,
112,
114,
118,
125,
135,
136,
139,
140,
144,
149,
157,
159,
162,
164,
169,
175,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
66,
67,
68,
69,
70,
71,
72,
73,
142,
143
] |
2CWE-601
| import datetime
import logging
import re
from flask import abort, current_app, flash, g, redirect, request, session, url_for
from flask_babel import lazy_gettext
from flask_login import login_user, logout_user
import jwt
from werkzeug.security import generate_password_hash
from wtforms import PasswordField, validators
from wtforms.validators import EqualTo
from .decorators import has_access
from .forms import LoginForm_db, LoginForm_oid, ResetPasswordForm, UserInfoEdit
from .._compat import as_unicode
from ..actions import action
from ..baseviews import BaseView
from ..charts.views import DirectByChartView
from ..fieldwidgets import BS3PasswordFieldWidget
from ..utils.base import lazy_formatter_gettext
from ..views import expose, ModelView, SimpleFormView
from ..widgets import ListWidget, ShowWidget
log = logging.getLogger(__name__)
class PermissionModelView(ModelView):
route_base = "/permissions"
base_permissions = ["can_list"]
list_title = lazy_gettext("List Base Permissions")
show_title = lazy_gettext("Show Base Permission")
add_title = lazy_gettext("Add Base Permission")
edit_title = lazy_gettext("Edit Base Permission")
label_columns = {"name": lazy_gettext("Name")}
class ViewMenuModelView(ModelView):
route_base = "/viewmenus"
base_permissions = ["can_list"]
list_title = lazy_gettext("List View Menus")
show_title = lazy_gettext("Show View Menu")
add_title = lazy_gettext("Add View Menu")
edit_title = lazy_gettext("Edit View Menu")
label_columns = {"name": lazy_gettext("Name")}
class PermissionViewModelView(ModelView):
route_base = "/permissionviews"
base_permissions = ["can_list"]
list_title = lazy_gettext("List Permissions on Views/Menus")
show_title = lazy_gettext("Show Permission on Views/Menus")
add_title = lazy_gettext("Add Permission on Views/Menus")
edit_title = lazy_gettext("Edit Permission on Views/Menus")
label_columns = {
"permission": lazy_gettext("Permission"),
"view_menu": lazy_gettext("View/Menu"),
}
list_columns = ["permission", "view_menu"]
class ResetMyPasswordView(SimpleFormView):
"""
View for resetting own user password
"""
route_base = "/resetmypassword"
form = ResetPasswordForm
form_title = lazy_gettext("Reset Password Form")
redirect_url = "/"
message = lazy_gettext("Password Changed")
def form_post(self, form):
self.appbuilder.sm.reset_password(g.user.id, form.password.data)
flash(as_unicode(self.message), "info")
class ResetPasswordView(SimpleFormView):
"""
View for reseting all users password
"""
route_base = "/resetpassword"
form = ResetPasswordForm
form_title = lazy_gettext("Reset Password Form")
redirect_url = "/"
message = lazy_gettext("Password Changed")
def form_post(self, form):
pk = request.args.get("pk")
self.appbuilder.sm.reset_password(pk, form.password.data)
flash(as_unicode(self.message), "info")
class UserInfoEditView(SimpleFormView):
form = UserInfoEdit
form_title = lazy_gettext("Edit User Information")
redirect_url = "/"
message = lazy_gettext("User information changed")
def form_get(self, form):
item = self.appbuilder.sm.get_user_by_id(g.user.id)
# fills the form generic solution
for key, value in form.data.items():
if key == "csrf_token":
continue
form_field = getattr(form, key)
form_field.data = getattr(item, key)
def form_post(self, form):
form = self.form.refresh(request.form)
item = self.appbuilder.sm.get_user_by_id(g.user.id)
form.populate_obj(item)
self.appbuilder.sm.update_user(item)
flash(as_unicode(self.message), "info")
def _roles_custom_formatter(string: str) -> str:
if current_app.config.get("AUTH_ROLES_SYNC_AT_LOGIN", False):
string += (
". <div class='alert alert-warning' role='alert'>"
"AUTH_ROLES_SYNC_AT_LOGIN is enabled, changes to this field will "
"not persist between user logins."
"</div>"
)
return string
class UserModelView(ModelView):
route_base = "/users"
list_title = lazy_gettext("List Users")
show_title = lazy_gettext("Show User")
add_title = lazy_gettext("Add User")
edit_title = lazy_gettext("Edit User")
label_columns = {
"get_full_name": lazy_gettext("Full Name"),
"first_name": lazy_gettext("First Name"),
"last_name": lazy_gettext("Last Name"),
"username": lazy_gettext("User Name"),
"password": lazy_gettext("Password"),
"active": lazy_gettext("Is Active?"),
"email": lazy_gettext("Email"),
"roles": lazy_gettext("Role"),
"last_login": lazy_gettext("Last login"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed login count"),
"created_on": lazy_gettext("Created on"),
"created_by": lazy_gettext("Created by"),
"changed_on": lazy_gettext("Changed on"),
"changed_by": lazy_gettext("Changed by"),
}
description_columns = {
"first_name": lazy_gettext("Write the user first name or names"),
"last_name": lazy_gettext("Write the user last name"),
"username": lazy_gettext(
"Username valid for authentication on DB or LDAP, unused for OID auth"
),
"password": lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
"active": lazy_gettext(
"It's not a good policy to remove a user, just make it inactive"
),
"email": lazy_gettext("The user's email, this will also be used for OID auth"),
"roles": lazy_formatter_gettext(
"The user role on the application,"
" this will associate with a list of permissions",
_roles_custom_formatter,
),
"conf_password": lazy_gettext("Please rewrite the user's password to confirm"),
}
list_columns = ["first_name", "last_name", "username", "email", "active", "roles"]
show_fieldsets = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
(
lazy_gettext("Audit Info"),
{
"fields": [
"last_login",
"fail_login_count",
"created_on",
"created_by",
"changed_on",
"changed_by",
],
"expanded": False,
},
),
]
user_show_fieldsets = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
]
search_exclude_columns = ["password"]
add_columns = ["first_name", "last_name", "username", "active", "email", "roles"]
edit_columns = ["first_name", "last_name", "username", "active", "email", "roles"]
user_info_title = lazy_gettext("Your user information")
@expose("/userinfo/")
@has_access
def userinfo(self):
item = self.datamodel.get(g.user.id, self._base_filters)
widgets = self._get_show_widget(
g.user.id, item, show_fieldsets=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
title=self.user_info_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
@action("userinfoedit", lazy_gettext("Edit User"), "", "fa-edit", multiple=False)
def userinfoedit(self, item):
return redirect(
url_for(self.appbuilder.sm.userinfoeditview.__name__ + ".this_form_get")
)
class UserOIDModelView(UserModelView):
"""
View that add OID specifics to User view.
Override to implement your own custom view.
Then override useroidmodelview property on SecurityManager
"""
pass
class UserLDAPModelView(UserModelView):
"""
View that add LDAP specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserOAuthModelView(UserModelView):
"""
View that add OAUTH specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserRemoteUserModelView(UserModelView):
"""
View that add REMOTE_USER specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserDBModelView(UserModelView):
"""
View that add DB specifics to User view.
Override to implement your own custom view.
Then override userdbmodelview property on SecurityManager
"""
add_form_extra_fields = {
"password": PasswordField(
lazy_gettext("Password"),
description=lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
validators=[validators.DataRequired()],
widget=BS3PasswordFieldWidget(),
),
"conf_password": PasswordField(
lazy_gettext("Confirm Password"),
description=lazy_gettext("Please rewrite the user's password to confirm"),
validators=[
EqualTo("password", message=lazy_gettext("Passwords must match"))
],
widget=BS3PasswordFieldWidget(),
),
}
add_columns = [
"first_name",
"last_name",
"username",
"active",
"email",
"roles",
"password",
"conf_password",
]
@expose("/show/<pk>", methods=["GET"])
@has_access
def show(self, pk):
actions = dict()
actions["resetpasswords"] = self.actions.get("resetpasswords")
item = self.datamodel.get(pk, self._base_filters)
if not item:
abort(404)
widgets = self._get_show_widget(pk, item, actions=actions)
self.update_redirect()
return self.render_template(
self.show_template,
pk=pk,
title=self.show_title,
widgets=widgets,
appbuilder=self.appbuilder,
related_views=self._related_views,
)
@expose("/userinfo/")
@has_access
def userinfo(self):
actions = dict()
actions["resetmypassword"] = self.actions.get("resetmypassword")
actions["userinfoedit"] = self.actions.get("userinfoedit")
item = self.datamodel.get(g.user.id, self._base_filters)
widgets = self._get_show_widget(
g.user.id, item, actions=actions, show_fieldsets=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
title=self.user_info_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
@action(
"resetmypassword",
lazy_gettext("Reset my password"),
"",
"fa-lock",
multiple=False,
)
def resetmypassword(self, item):
return redirect(
url_for(self.appbuilder.sm.resetmypasswordview.__name__ + ".this_form_get")
)
@action(
"resetpasswords", lazy_gettext("Reset Password"), "", "fa-lock", multiple=False
)
def resetpasswords(self, item):
return redirect(
url_for(
self.appbuilder.sm.resetpasswordview.__name__ + ".this_form_get",
pk=item.id,
)
)
def pre_update(self, item):
item.changed_on = datetime.datetime.now()
item.changed_by_fk = g.user.id
def pre_add(self, item):
item.password = generate_password_hash(item.password)
class UserStatsChartView(DirectByChartView):
chart_title = lazy_gettext("User Statistics")
label_columns = {
"username": lazy_gettext("User Name"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed login count"),
}
search_columns = UserModelView.search_columns
definitions = [
{"label": "Login Count", "group": "username", "series": ["login_count"]},
{
"label": "Failed Login Count",
"group": "username",
"series": ["fail_login_count"],
},
]
class RoleListWidget(ListWidget):
template = "appbuilder/general/widgets/roles/list.html"
def __init__(self, **kwargs):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
class RoleShowWidget(ShowWidget):
template = "appbuilder/general/widgets/roles/show.html"
def __init__(self, **kwargs):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
class RoleModelView(ModelView):
route_base = "/roles"
list_title = lazy_gettext("List Roles")
show_title = lazy_gettext("Show Role")
add_title = lazy_gettext("Add Role")
edit_title = lazy_gettext("Edit Role")
list_widget = RoleListWidget
show_widget = RoleShowWidget
label_columns = {
"name": lazy_gettext("Name"),
"permissions": lazy_gettext("Permissions"),
}
list_columns = ["name", "permissions"]
show_columns = ["name", "permissions"]
edit_columns = ["name", "permissions"]
add_columns = edit_columns
order_columns = ["name"]
@action(
"copyrole",
lazy_gettext("Copy Role"),
lazy_gettext("Copy the selected roles?"),
icon="fa-copy",
single=False,
)
def copy_role(self, items):
self.update_redirect()
for item in items:
new_role = item.__class__()
new_role.name = item.name
new_role.permissions = item.permissions
new_role.name = new_role.name + " copy"
self.datamodel.add(new_role)
return redirect(self.get_redirect())
class RegisterUserModelView(ModelView):
route_base = "/registeruser"
base_permissions = ["can_list", "can_show", "can_delete"]
list_title = lazy_gettext("List of Registration Requests")
show_title = lazy_gettext("Show Registration")
list_columns = ["username", "registration_date", "email"]
show_exclude_columns = ["password"]
search_exclude_columns = ["password"]
class AuthView(BaseView):
route_base = ""
login_template = ""
invalid_login_message = lazy_gettext("Invalid login. Please try again.")
title = lazy_gettext("Sign In")
@expose("/login/", methods=["GET", "POST"])
def login(self):
pass
@expose("/logout/")
def logout(self):
logout_user()
return redirect(self.appbuilder.get_url_for_index)
class AuthDBView(AuthView):
login_template = "appbuilder/general/security/login_db.html"
@expose("/login/", methods=["GET", "POST"])
def login(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_db()
if form.validate_on_submit():
user = self.appbuilder.sm.auth_user_db(
form.username.data, form.password.data
)
if not user:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(user, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, title=self.title, form=form, appbuilder=self.appbuilder
)
class AuthLDAPView(AuthView):
login_template = "appbuilder/general/security/login_ldap.html"
@expose("/login/", methods=["GET", "POST"])
def login(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_db()
if form.validate_on_submit():
user = self.appbuilder.sm.auth_user_ldap(
form.username.data, form.password.data
)
if not user:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(user, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, title=self.title, form=form, appbuilder=self.appbuilder
)
"""
For Future Use, API Auth, must check howto keep REST stateless
"""
"""
@expose_api(name='auth',url='/api/auth')
def auth(self):
if g.user is not None and g.user.is_authenticated:
http_return_code = 401
response = make_response(
jsonify(
{
'message': 'Login Failed already authenticated',
'severity': 'critical'
}
),
http_return_code
)
username = str(request.args.get('username'))
password = str(request.args.get('password'))
user = self.appbuilder.sm.auth_user_ldap(username, password)
if not user:
http_return_code = 401
response = make_response(
jsonify(
{
'message': 'Login Failed',
'severity': 'critical'
}
),
http_return_code
)
else:
login_user(user, remember=False)
http_return_code = 201
response = make_response(
jsonify(
{
'message': 'Login Success',
'severity': 'info'
}
),
http_return_code
)
return response
"""
class AuthOIDView(AuthView):
login_template = "appbuilder/general/security/login_oid.html"
oid_ask_for = ["email"]
oid_ask_for_optional = []
def __init__(self):
super(AuthOIDView, self).__init__()
@expose("/login/", methods=["GET", "POST"])
def login(self, flag=True):
@self.appbuilder.sm.oid.loginhandler
def login_handler(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_oid()
if form.validate_on_submit():
session["remember_me"] = form.remember_me.data
return self.appbuilder.sm.oid.try_login(
form.openid.data,
ask_for=self.oid_ask_for,
ask_for_optional=self.oid_ask_for_optional,
)
return self.render_template(
self.login_template,
title=self.title,
form=form,
providers=self.appbuilder.sm.openid_providers,
appbuilder=self.appbuilder,
)
@self.appbuilder.sm.oid.after_login
def after_login(resp):
if resp.email is None or resp.email == "":
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
user = self.appbuilder.sm.auth_user_oid(resp.email)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
remember_me = False
if "remember_me" in session:
remember_me = session["remember_me"]
session.pop("remember_me", None)
login_user(user, remember=remember_me)
return redirect(self.appbuilder.get_url_for_index)
return login_handler(self)
class AuthOAuthView(AuthView):
login_template = "appbuilder/general/security/login_oauth.html"
@expose("/login/")
@expose("/login/<provider>")
@expose("/login/<provider>/<register>")
def login(self, provider=None, register=None):
log.debug("Provider: {0}".format(provider))
if g.user is not None and g.user.is_authenticated:
log.debug("Already authenticated {0}".format(g.user))
return redirect(self.appbuilder.get_url_for_index)
if provider is None:
if len(self.appbuilder.sm.oauth_providers) > 1:
return self.render_template(
self.login_template,
providers=self.appbuilder.sm.oauth_providers,
title=self.title,
appbuilder=self.appbuilder,
)
else:
provider = self.appbuilder.sm.oauth_providers[0]["name"]
log.debug("Going to call authorize for: {0}".format(provider))
state = jwt.encode(
request.args.to_dict(flat=False),
self.appbuilder.app.config["SECRET_KEY"],
algorithm="HS256",
)
try:
if register:
log.debug("Login to Register")
session["register"] = True
if provider == "twitter":
return self.appbuilder.sm.oauth_remotes[provider].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized",
provider=provider,
_external=True,
state=state,
)
)
else:
return self.appbuilder.sm.oauth_remotes[provider].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized", provider=provider, _external=True
),
state=state.decode("ascii") if isinstance(state, bytes) else state,
)
except Exception as e:
log.error("Error on OAuth authorize: {0}".format(e))
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
@expose("/oauth-authorized/<provider>")
def oauth_authorized(self, provider):
log.debug("Authorized init")
resp = self.appbuilder.sm.oauth_remotes[provider].authorize_access_token()
if resp is None:
flash(u"You denied the request to sign in.", "warning")
return redirect(self.appbuilder.get_url_for_login)
log.debug("OAUTH Authorized resp: {0}".format(resp))
# Retrieves specific user info from the provider
try:
self.appbuilder.sm.set_oauth_session(provider, resp)
userinfo = self.appbuilder.sm.oauth_user_info(provider, resp)
except Exception as e:
log.error("Error returning OAuth user info: {0}".format(e))
user = None
else:
log.debug("User info retrieved from {0}: {1}".format(provider, userinfo))
# User email is not whitelisted
if provider in self.appbuilder.sm.oauth_whitelists:
whitelist = self.appbuilder.sm.oauth_whitelists[provider]
allow = False
for e in whitelist:
if re.search(e, userinfo["email"]):
allow = True
break
if not allow:
flash(u"You are not authorized.", "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
log.debug("No whitelist for OAuth provider")
user = self.appbuilder.sm.auth_user_oauth(userinfo)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
login_user(user)
try:
state = jwt.decode(
request.args["state"],
self.appbuilder.app.config["SECRET_KEY"],
algorithms=["HS256"],
)
except jwt.InvalidTokenError:
raise Exception("State signature is not valid!")
try:
next_url = state["next"][0] or self.appbuilder.get_url_for_index
except (KeyError, IndexError):
next_url = self.appbuilder.get_url_for_index
return redirect(next_url)
class AuthRemoteUserView(AuthView):
login_template = ""
@expose("/login/")
def login(self):
username = request.environ.get("REMOTE_USER")
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
if username:
user = self.appbuilder.sm.auth_user_remote_user(username)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
else:
login_user(user)
else:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
| import datetime
import logging
import re
from typing import Optional
from urllib.parse import urlparse
from flask import (
abort,
current_app,
flash,
g,
redirect,
request,
Response,
session,
url_for,
)
from flask_babel import lazy_gettext
from flask_login import login_user, logout_user
import jwt
from werkzeug.security import generate_password_hash
from wtforms import PasswordField, validators
from wtforms.validators import EqualTo
from .decorators import has_access
from .forms import LoginForm_db, LoginForm_oid, ResetPasswordForm, UserInfoEdit
from .._compat import as_unicode
from ..actions import action
from ..baseviews import BaseView
from ..charts.views import DirectByChartView
from ..fieldwidgets import BS3PasswordFieldWidget
from ..utils.base import lazy_formatter_gettext
from ..views import expose, ModelView, SimpleFormView
from ..widgets import ListWidget, ShowWidget
log = logging.getLogger(__name__)
class PermissionModelView(ModelView):
route_base = "/permissions"
base_permissions = ["can_list"]
list_title = lazy_gettext("List Base Permissions")
show_title = lazy_gettext("Show Base Permission")
add_title = lazy_gettext("Add Base Permission")
edit_title = lazy_gettext("Edit Base Permission")
label_columns = {"name": lazy_gettext("Name")}
class ViewMenuModelView(ModelView):
route_base = "/viewmenus"
base_permissions = ["can_list"]
list_title = lazy_gettext("List View Menus")
show_title = lazy_gettext("Show View Menu")
add_title = lazy_gettext("Add View Menu")
edit_title = lazy_gettext("Edit View Menu")
label_columns = {"name": lazy_gettext("Name")}
class PermissionViewModelView(ModelView):
route_base = "/permissionviews"
base_permissions = ["can_list"]
list_title = lazy_gettext("List Permissions on Views/Menus")
show_title = lazy_gettext("Show Permission on Views/Menus")
add_title = lazy_gettext("Add Permission on Views/Menus")
edit_title = lazy_gettext("Edit Permission on Views/Menus")
label_columns = {
"permission": lazy_gettext("Permission"),
"view_menu": lazy_gettext("View/Menu"),
}
list_columns = ["permission", "view_menu"]
class ResetMyPasswordView(SimpleFormView):
"""
View for resetting own user password
"""
route_base = "/resetmypassword"
form = ResetPasswordForm
form_title = lazy_gettext("Reset Password Form")
redirect_url = "/"
message = lazy_gettext("Password Changed")
def form_post(self, form):
self.appbuilder.sm.reset_password(g.user.id, form.password.data)
flash(as_unicode(self.message), "info")
class ResetPasswordView(SimpleFormView):
"""
View for reseting all users password
"""
route_base = "/resetpassword"
form = ResetPasswordForm
form_title = lazy_gettext("Reset Password Form")
redirect_url = "/"
message = lazy_gettext("Password Changed")
def form_post(self, form):
pk = request.args.get("pk")
self.appbuilder.sm.reset_password(pk, form.password.data)
flash(as_unicode(self.message), "info")
class UserInfoEditView(SimpleFormView):
form = UserInfoEdit
form_title = lazy_gettext("Edit User Information")
redirect_url = "/"
message = lazy_gettext("User information changed")
def form_get(self, form):
item = self.appbuilder.sm.get_user_by_id(g.user.id)
# fills the form generic solution
for key, value in form.data.items():
if key == "csrf_token":
continue
form_field = getattr(form, key)
form_field.data = getattr(item, key)
def form_post(self, form):
form = self.form.refresh(request.form)
item = self.appbuilder.sm.get_user_by_id(g.user.id)
form.populate_obj(item)
self.appbuilder.sm.update_user(item)
flash(as_unicode(self.message), "info")
def _roles_custom_formatter(string: str) -> str:
if current_app.config.get("AUTH_ROLES_SYNC_AT_LOGIN", False):
string += (
". <div class='alert alert-warning' role='alert'>"
"AUTH_ROLES_SYNC_AT_LOGIN is enabled, changes to this field will "
"not persist between user logins."
"</div>"
)
return string
class UserModelView(ModelView):
route_base = "/users"
list_title = lazy_gettext("List Users")
show_title = lazy_gettext("Show User")
add_title = lazy_gettext("Add User")
edit_title = lazy_gettext("Edit User")
label_columns = {
"get_full_name": lazy_gettext("Full Name"),
"first_name": lazy_gettext("First Name"),
"last_name": lazy_gettext("Last Name"),
"username": lazy_gettext("User Name"),
"password": lazy_gettext("Password"),
"active": lazy_gettext("Is Active?"),
"email": lazy_gettext("Email"),
"roles": lazy_gettext("Role"),
"last_login": lazy_gettext("Last login"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed login count"),
"created_on": lazy_gettext("Created on"),
"created_by": lazy_gettext("Created by"),
"changed_on": lazy_gettext("Changed on"),
"changed_by": lazy_gettext("Changed by"),
}
description_columns = {
"first_name": lazy_gettext("Write the user first name or names"),
"last_name": lazy_gettext("Write the user last name"),
"username": lazy_gettext(
"Username valid for authentication on DB or LDAP, unused for OID auth"
),
"password": lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
"active": lazy_gettext(
"It's not a good policy to remove a user, just make it inactive"
),
"email": lazy_gettext("The user's email, this will also be used for OID auth"),
"roles": lazy_formatter_gettext(
"The user role on the application,"
" this will associate with a list of permissions",
_roles_custom_formatter,
),
"conf_password": lazy_gettext("Please rewrite the user's password to confirm"),
}
list_columns = ["first_name", "last_name", "username", "email", "active", "roles"]
show_fieldsets = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
(
lazy_gettext("Audit Info"),
{
"fields": [
"last_login",
"fail_login_count",
"created_on",
"created_by",
"changed_on",
"changed_by",
],
"expanded": False,
},
),
]
user_show_fieldsets = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
]
search_exclude_columns = ["password"]
add_columns = ["first_name", "last_name", "username", "active", "email", "roles"]
edit_columns = ["first_name", "last_name", "username", "active", "email", "roles"]
user_info_title = lazy_gettext("Your user information")
@expose("/userinfo/")
@has_access
def userinfo(self):
item = self.datamodel.get(g.user.id, self._base_filters)
widgets = self._get_show_widget(
g.user.id, item, show_fieldsets=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
title=self.user_info_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
@action("userinfoedit", lazy_gettext("Edit User"), "", "fa-edit", multiple=False)
def userinfoedit(self, item):
return redirect(
url_for(self.appbuilder.sm.userinfoeditview.__name__ + ".this_form_get")
)
class UserOIDModelView(UserModelView):
"""
View that add OID specifics to User view.
Override to implement your own custom view.
Then override useroidmodelview property on SecurityManager
"""
pass
class UserLDAPModelView(UserModelView):
"""
View that add LDAP specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserOAuthModelView(UserModelView):
"""
View that add OAUTH specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserRemoteUserModelView(UserModelView):
"""
View that add REMOTE_USER specifics to User view.
Override to implement your own custom view.
Then override userldapmodelview property on SecurityManager
"""
pass
class UserDBModelView(UserModelView):
"""
View that add DB specifics to User view.
Override to implement your own custom view.
Then override userdbmodelview property on SecurityManager
"""
add_form_extra_fields = {
"password": PasswordField(
lazy_gettext("Password"),
description=lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
validators=[validators.DataRequired()],
widget=BS3PasswordFieldWidget(),
),
"conf_password": PasswordField(
lazy_gettext("Confirm Password"),
description=lazy_gettext("Please rewrite the user's password to confirm"),
validators=[
EqualTo("password", message=lazy_gettext("Passwords must match"))
],
widget=BS3PasswordFieldWidget(),
),
}
add_columns = [
"first_name",
"last_name",
"username",
"active",
"email",
"roles",
"password",
"conf_password",
]
@expose("/show/<pk>", methods=["GET"])
@has_access
def show(self, pk):
actions = dict()
actions["resetpasswords"] = self.actions.get("resetpasswords")
item = self.datamodel.get(pk, self._base_filters)
if not item:
abort(404)
widgets = self._get_show_widget(pk, item, actions=actions)
self.update_redirect()
return self.render_template(
self.show_template,
pk=pk,
title=self.show_title,
widgets=widgets,
appbuilder=self.appbuilder,
related_views=self._related_views,
)
@expose("/userinfo/")
@has_access
def userinfo(self):
actions = dict()
actions["resetmypassword"] = self.actions.get("resetmypassword")
actions["userinfoedit"] = self.actions.get("userinfoedit")
item = self.datamodel.get(g.user.id, self._base_filters)
widgets = self._get_show_widget(
g.user.id, item, actions=actions, show_fieldsets=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
title=self.user_info_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
@action(
"resetmypassword",
lazy_gettext("Reset my password"),
"",
"fa-lock",
multiple=False,
)
def resetmypassword(self, item):
return redirect(
url_for(self.appbuilder.sm.resetmypasswordview.__name__ + ".this_form_get")
)
@action(
"resetpasswords", lazy_gettext("Reset Password"), "", "fa-lock", multiple=False
)
def resetpasswords(self, item):
return redirect(
url_for(
self.appbuilder.sm.resetpasswordview.__name__ + ".this_form_get",
pk=item.id,
)
)
def pre_update(self, item):
item.changed_on = datetime.datetime.now()
item.changed_by_fk = g.user.id
def pre_add(self, item):
item.password = generate_password_hash(item.password)
class UserStatsChartView(DirectByChartView):
chart_title = lazy_gettext("User Statistics")
label_columns = {
"username": lazy_gettext("User Name"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed login count"),
}
search_columns = UserModelView.search_columns
definitions = [
{"label": "Login Count", "group": "username", "series": ["login_count"]},
{
"label": "Failed Login Count",
"group": "username",
"series": ["fail_login_count"],
},
]
class RoleListWidget(ListWidget):
template = "appbuilder/general/widgets/roles/list.html"
def __init__(self, **kwargs):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
class RoleShowWidget(ShowWidget):
template = "appbuilder/general/widgets/roles/show.html"
def __init__(self, **kwargs):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
class RoleModelView(ModelView):
route_base = "/roles"
list_title = lazy_gettext("List Roles")
show_title = lazy_gettext("Show Role")
add_title = lazy_gettext("Add Role")
edit_title = lazy_gettext("Edit Role")
list_widget = RoleListWidget
show_widget = RoleShowWidget
label_columns = {
"name": lazy_gettext("Name"),
"permissions": lazy_gettext("Permissions"),
}
list_columns = ["name", "permissions"]
show_columns = ["name", "permissions"]
edit_columns = ["name", "permissions"]
add_columns = edit_columns
order_columns = ["name"]
@action(
"copyrole",
lazy_gettext("Copy Role"),
lazy_gettext("Copy the selected roles?"),
icon="fa-copy",
single=False,
)
def copy_role(self, items):
self.update_redirect()
for item in items:
new_role = item.__class__()
new_role.name = item.name
new_role.permissions = item.permissions
new_role.name = new_role.name + " copy"
self.datamodel.add(new_role)
return redirect(self.get_redirect())
class RegisterUserModelView(ModelView):
route_base = "/registeruser"
base_permissions = ["can_list", "can_show", "can_delete"]
list_title = lazy_gettext("List of Registration Requests")
show_title = lazy_gettext("Show Registration")
list_columns = ["username", "registration_date", "email"]
show_exclude_columns = ["password"]
search_exclude_columns = ["password"]
class AuthView(BaseView):
route_base = ""
login_template = ""
invalid_login_message = lazy_gettext("Invalid login. Please try again.")
title = lazy_gettext("Sign In")
@expose("/login/", methods=["GET", "POST"])
def login(self):
pass
@expose("/logout/")
def logout(self):
logout_user()
return redirect(self.appbuilder.get_url_for_index)
class AuthDBView(AuthView):
login_template = "appbuilder/general/security/login_db.html"
@expose("/login/", methods=["GET", "POST"])
def login(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_db()
if form.validate_on_submit():
user = self.appbuilder.sm.auth_user_db(
form.username.data, form.password.data
)
if not user:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(user, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, title=self.title, form=form, appbuilder=self.appbuilder
)
class AuthLDAPView(AuthView):
login_template = "appbuilder/general/security/login_ldap.html"
@expose("/login/", methods=["GET", "POST"])
def login(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_db()
if form.validate_on_submit():
user = self.appbuilder.sm.auth_user_ldap(
form.username.data, form.password.data
)
if not user:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(user, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, title=self.title, form=form, appbuilder=self.appbuilder
)
class AuthOIDView(AuthView):
login_template = "appbuilder/general/security/login_oid.html"
oid_ask_for = ["email"]
oid_ask_for_optional = []
def __init__(self):
super(AuthOIDView, self).__init__()
@expose("/login/", methods=["GET", "POST"])
def login(self, flag=True):
@self.appbuilder.sm.oid.loginhandler
def login_handler(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = LoginForm_oid()
if form.validate_on_submit():
session["remember_me"] = form.remember_me.data
return self.appbuilder.sm.oid.try_login(
form.openid.data,
ask_for=self.oid_ask_for,
ask_for_optional=self.oid_ask_for_optional,
)
return self.render_template(
self.login_template,
title=self.title,
form=form,
providers=self.appbuilder.sm.openid_providers,
appbuilder=self.appbuilder,
)
@self.appbuilder.sm.oid.after_login
def after_login(resp):
if resp.email is None or resp.email == "":
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
user = self.appbuilder.sm.auth_user_oid(resp.email)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
remember_me = False
if "remember_me" in session:
remember_me = session["remember_me"]
session.pop("remember_me", None)
login_user(user, remember=remember_me)
return redirect(self.appbuilder.get_url_for_index)
return login_handler(self)
class AuthOAuthView(AuthView):
login_template = "appbuilder/general/security/login_oauth.html"
@expose("/login/")
@expose("/login/<provider>")
@expose("/login/<provider>/<register>")
def login(
self, provider: Optional[str] = None, register: Optional[str] = None
) -> Response:
log.debug("Provider: {0}".format(provider))
if g.user is not None and g.user.is_authenticated:
log.debug("Already authenticated {0}".format(g.user))
return redirect(self.appbuilder.get_url_for_index)
if provider is None:
if len(self.appbuilder.sm.oauth_providers) > 1:
return self.render_template(
self.login_template,
providers=self.appbuilder.sm.oauth_providers,
title=self.title,
appbuilder=self.appbuilder,
)
else:
provider = self.appbuilder.sm.oauth_providers[0]["name"]
log.debug("Going to call authorize for: {0}".format(provider))
state = jwt.encode(
request.args.to_dict(flat=False),
self.appbuilder.app.config["SECRET_KEY"],
algorithm="HS256",
)
try:
if register:
log.debug("Login to Register")
session["register"] = True
if provider == "twitter":
return self.appbuilder.sm.oauth_remotes[provider].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized",
provider=provider,
_external=True,
state=state,
)
)
else:
return self.appbuilder.sm.oauth_remotes[provider].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized", provider=provider, _external=True
),
state=state.decode("ascii") if isinstance(state, bytes) else state,
)
except Exception as e:
log.error("Error on OAuth authorize: {0}".format(e))
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
@expose("/oauth-authorized/<provider>")
def oauth_authorized(self, provider: str) -> Response:
log.debug("Authorized init")
if provider not in self.appbuilder.sm.oauth_remotes:
flash(u"Provider not supported.", "warning")
log.warning("OAuth authorized got an unknown provider %s", provider)
return redirect(self.appbuilder.get_url_for_login)
resp = self.appbuilder.sm.oauth_remotes[provider].authorize_access_token()
if resp is None:
flash(u"You denied the request to sign in.", "warning")
return redirect(self.appbuilder.get_url_for_login)
log.debug("OAUTH Authorized resp: {0}".format(resp))
# Retrieves specific user info from the provider
try:
self.appbuilder.sm.set_oauth_session(provider, resp)
userinfo = self.appbuilder.sm.oauth_user_info(provider, resp)
except Exception as e:
log.error("Error returning OAuth user info: {0}".format(e))
user = None
else:
log.debug("User info retrieved from {0}: {1}".format(provider, userinfo))
# User email is not whitelisted
if provider in self.appbuilder.sm.oauth_whitelists:
whitelist = self.appbuilder.sm.oauth_whitelists[provider]
allow = False
for e in whitelist:
if re.search(e, userinfo["email"]):
allow = True
break
if not allow:
flash(u"You are not authorized.", "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
log.debug("No whitelist for OAuth provider")
user = self.appbuilder.sm.auth_user_oauth(userinfo)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
login_user(user)
try:
state = jwt.decode(
request.args["state"],
self.appbuilder.app.config["SECRET_KEY"],
algorithms=["HS256"],
)
except jwt.InvalidTokenError:
raise Exception("State signature is not valid!")
next_url = self.appbuilder.get_url_for_index
# Check if there is a next url on state
if "next" in state and len(state["next"]) > 0:
parsed_uri = urlparse(state["next"][0])
if parsed_uri.netloc != request.host:
log.warning("Got an invalid next URL: %s", parsed_uri.netloc)
else:
next_url = state["next"][0]
return redirect(next_url)
class AuthRemoteUserView(AuthView):
login_template = ""
@expose("/login/")
def login(self):
username = request.environ.get("REMOTE_USER")
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
if username:
user = self.appbuilder.sm.auth_user_remote_user(username)
if user is None:
flash(as_unicode(self.invalid_login_message), "warning")
else:
login_user(user)
else:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
| open_redirect | {
"code": [
"from flask import abort, current_app, flash, g, redirect, request, session, url_for",
" \"\"\"",
" For Future Use, API Auth, must check howto keep REST stateless",
" \"\"\"",
" \"\"\"",
" @expose_api(name='auth',url='/api/auth')",
" def auth(self):",
" if g.user is not None and g.user.is_authenticated:",
" http_return_code = 401",
" response = make_response(",
" jsonify(",
" {",
" 'message': 'Login Failed already authenticated',",
" 'severity': 'critical'",
" }",
" ),",
" http_return_code",
" )",
" username = str(request.args.get('username'))",
" password = str(request.args.get('password'))",
" user = self.appbuilder.sm.auth_user_ldap(username, password)",
" if not user:",
" http_return_code = 401",
" response = make_response(",
" jsonify(",
" {",
" 'message': 'Login Failed',",
" 'severity': 'critical'",
" }",
" ),",
" http_return_code",
" )",
" else:",
" login_user(user, remember=False)",
" http_return_code = 201",
" response = make_response(",
" jsonify(",
" {",
" 'message': 'Login Success',",
" 'severity': 'info'",
" }",
" ),",
" http_return_code",
" )",
" return response",
" \"\"\"",
" def login(self, provider=None, register=None):",
" def oauth_authorized(self, provider):",
" try:",
" next_url = state[\"next\"][0] or self.appbuilder.get_url_for_index",
" except (KeyError, IndexError):",
" next_url = self.appbuilder.get_url_for_index"
],
"line_no": [
5,
540,
541,
542,
544,
545,
546,
547,
548,
549,
550,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
564,
565,
566,
567,
568,
569,
570,
571,
572,
573,
574,
575,
576,
577,
578,
579,
580,
581,
582,
583,
584,
585,
644,
693,
738,
739,
740,
741
]
} | {
"code": [
"from urllib.parse import urlparse",
"from flask import (",
" abort,",
" current_app,",
" flash,",
" g,",
" request,",
" Response,",
" session,",
" url_for,",
")",
" def login(",
" self, provider: Optional[str] = None, register: Optional[str] = None",
" ) -> Response:",
" if provider not in self.appbuilder.sm.oauth_remotes:",
" flash(u\"Provider not supported.\", \"warning\")",
" log.warning(\"OAuth authorized got an unknown provider %s\", provider)",
" return redirect(self.appbuilder.get_url_for_login)",
" if \"next\" in state and len(state[\"next\"]) > 0:",
" parsed_uri = urlparse(state[\"next\"][0])",
" if parsed_uri.netloc != request.host:",
" log.warning(\"Got an invalid next URL: %s\", parsed_uri.netloc)",
" else:",
" next_url = state[\"next\"][0]"
],
"line_no": [
5,
7,
8,
9,
10,
11,
13,
14,
15,
16,
17,
609,
610,
611,
662,
663,
664,
665,
711,
712,
713,
714,
715,
716
]
} | import datetime
import .logging
import re
from flask import abort, current_app, flash, g, redirect, request, VAR_53, url_for
from flask_babel import lazy_gettext
from flask_login import .login_user, logout_user
import jwt
from werkzeug.security import generate_password_hash
from wtforms import PasswordField, validators
from wtforms.validators import EqualTo
from .decorators import has_access
from .forms import LoginForm_db, LoginForm_oid, ResetPasswordForm, UserInfoEdit
from .._compat import as_unicode
from ..actions import action
from ..baseviews import BaseView
from ..charts.views import DirectByChartView
from ..fieldwidgets import BS3PasswordFieldWidget
from ..utils.base import lazy_formatter_gettext
from ..views import expose, ModelView, SimpleFormView
from ..widgets import ListWidget, ShowWidget
VAR_0 = logging.getLogger(__name__)
class CLASS_0(ModelView):
VAR_2 = "/permissions"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List Base Permissions")
VAR_5 = lazy_gettext("Show Base Permission")
VAR_6 = lazy_gettext("Add Base Permission")
VAR_7 = lazy_gettext("Edit Base Permission")
VAR_8 = {"name": lazy_gettext("Name")}
class CLASS_1(ModelView):
VAR_2 = "/viewmenus"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List View Menus")
VAR_5 = lazy_gettext("Show View Menu")
VAR_6 = lazy_gettext("Add View Menu")
VAR_7 = lazy_gettext("Edit View Menu")
VAR_8 = {"name": lazy_gettext("Name")}
class CLASS_2(ModelView):
VAR_2 = "/permissionviews"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List Permissions on Views/Menus")
VAR_5 = lazy_gettext("Show Permission on Views/Menus")
VAR_6 = lazy_gettext("Add Permission on Views/Menus")
VAR_7 = lazy_gettext("Edit Permission on Views/Menus")
VAR_8 = {
"permission": lazy_gettext("Permission"),
"view_menu": lazy_gettext("View/Menu"),
}
VAR_9 = ["permission", "view_menu"]
class CLASS_3(SimpleFormView):
VAR_2 = "/resetmypassword"
VAR_10 = ResetPasswordForm
VAR_11 = lazy_gettext("Reset Password Form")
VAR_12 = "/"
VAR_13 = lazy_gettext("Password Changed")
def FUNC_1(self, VAR_10):
self.appbuilder.sm.reset_password(g.user.id, VAR_10.password.data)
flash(as_unicode(self.message), "info")
class CLASS_4(SimpleFormView):
VAR_2 = "/resetpassword"
VAR_10 = ResetPasswordForm
VAR_11 = lazy_gettext("Reset Password Form")
VAR_12 = "/"
VAR_13 = lazy_gettext("Password Changed")
def FUNC_1(self, VAR_10):
VAR_23 = request.args.get("pk")
self.appbuilder.sm.reset_password(VAR_23, VAR_10.password.data)
flash(as_unicode(self.message), "info")
class CLASS_5(SimpleFormView):
VAR_10 = UserInfoEdit
VAR_11 = lazy_gettext("Edit User Information")
VAR_12 = "/"
VAR_13 = lazy_gettext("User information changed")
def FUNC_2(self, VAR_10):
VAR_21 = self.appbuilder.sm.get_user_by_id(g.user.id)
for key, value in VAR_10.data.items():
if key == "csrf_token":
continue
VAR_48 = getattr(VAR_10, key)
VAR_48.data = getattr(VAR_21, key)
def FUNC_1(self, VAR_10):
VAR_10 = self.form.refresh(request.form)
VAR_21 = self.appbuilder.sm.get_user_by_id(g.user.id)
VAR_10.populate_obj(VAR_21)
self.appbuilder.sm.update_user(VAR_21)
flash(as_unicode(self.message), "info")
def FUNC_0(VAR_1: str) -> str:
if current_app.config.get("AUTH_ROLES_SYNC_AT_LOGIN", False):
VAR_1 += (
". <div class='alert alert-warning' role='alert'>"
"AUTH_ROLES_SYNC_AT_LOGIN is enabled, changes to this field will "
"not persist between VAR_50 logins."
"</div>"
)
return VAR_1
class CLASS_6(ModelView):
VAR_2 = "/users"
VAR_4 = lazy_gettext("List Users")
VAR_5 = lazy_gettext("Show User")
VAR_6 = lazy_gettext("Add User")
VAR_7 = lazy_gettext("Edit User")
VAR_8 = {
"get_full_name": lazy_gettext("Full Name"),
"first_name": lazy_gettext("First Name"),
"last_name": lazy_gettext("Last Name"),
"username": lazy_gettext("User Name"),
"password": lazy_gettext("Password"),
"active": lazy_gettext("Is Active?"),
"email": lazy_gettext("Email"),
"roles": lazy_gettext("Role"),
"last_login": lazy_gettext("Last login"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed FUNC_11 count"),
"created_on": lazy_gettext("Created on"),
"created_by": lazy_gettext("Created by"),
"changed_on": lazy_gettext("Changed on"),
"changed_by": lazy_gettext("Changed by"),
}
VAR_14 = {
"first_name": lazy_gettext("Write the VAR_50 first name or names"),
"last_name": lazy_gettext("Write the VAR_50 last name"),
"username": lazy_gettext(
"Username valid for authentication on DB or LDAP, unused for OID auth"
),
"password": lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
"active": lazy_gettext(
"It's not a good policy to remove a VAR_50, just make it inactive"
),
"email": lazy_gettext("The user's email, this will also be used for OID auth"),
"roles": lazy_formatter_gettext(
"The VAR_50 role on the application,"
" this will associate with a list of permissions",
FUNC_0,
),
"conf_password": lazy_gettext("Please rewrite the user's password to confirm"),
}
VAR_9 = ["first_name", "last_name", "username", "email", "active", "roles"]
VAR_15 = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
(
lazy_gettext("Audit Info"),
{
"fields": [
"last_login",
"fail_login_count",
"created_on",
"created_by",
"changed_on",
"changed_by",
],
"expanded": False,
},
),
]
VAR_16 = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
]
VAR_17 = ["password"]
VAR_18 = ["first_name", "last_name", "username", "active", "email", "roles"]
VAR_19 = ["first_name", "last_name", "username", "active", "email", "roles"]
VAR_20 = lazy_gettext("Your VAR_50 information")
@expose("/VAR_52/")
@has_access
def VAR_52(self):
VAR_21 = self.datamodel.get(g.user.id, self._base_filters)
VAR_43 = self._get_show_widget(
g.user.id, VAR_21, VAR_15=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_37=self.user_info_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
)
@action("userinfoedit", lazy_gettext("Edit User"), "", "fa-edit", multiple=False)
def FUNC_4(self, VAR_21):
return redirect(
url_for(self.appbuilder.sm.userinfoeditview.__name__ + ".this_form_get")
)
class CLASS_7(CLASS_6):
pass
class CLASS_8(CLASS_6):
pass
class CLASS_9(CLASS_6):
pass
class CLASS_10(CLASS_6):
pass
class CLASS_11(CLASS_6):
VAR_22 = {
"password": PasswordField(
lazy_gettext("Password"),
description=lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
validators=[validators.DataRequired()],
widget=BS3PasswordFieldWidget(),
),
"conf_password": PasswordField(
lazy_gettext("Confirm Password"),
description=lazy_gettext("Please rewrite the user's password to confirm"),
validators=[
EqualTo("password", VAR_13=lazy_gettext("Passwords must match"))
],
widget=BS3PasswordFieldWidget(),
),
}
VAR_18 = [
"first_name",
"last_name",
"username",
"active",
"email",
"roles",
"password",
"conf_password",
]
@expose("/FUNC_5/<VAR_23>", methods=["GET"])
@has_access
def FUNC_5(self, VAR_23):
VAR_44 = dict()
VAR_44["resetpasswords"] = self.actions.get("resetpasswords")
VAR_21 = self.datamodel.get(VAR_23, self._base_filters)
if not VAR_21:
abort(404)
VAR_43 = self._get_show_widget(VAR_23, VAR_21, VAR_44=actions)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_23=pk,
VAR_37=self.show_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
related_views=self._related_views,
)
@expose("/VAR_52/")
@has_access
def VAR_52(self):
VAR_44 = dict()
VAR_44["resetmypassword"] = self.actions.get("resetmypassword")
VAR_44["userinfoedit"] = self.actions.get("userinfoedit")
VAR_21 = self.datamodel.get(g.user.id, self._base_filters)
VAR_43 = self._get_show_widget(
g.user.id, VAR_21, VAR_44=actions, VAR_15=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_37=self.user_info_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
)
@action(
"resetmypassword",
lazy_gettext("Reset my password"),
"",
"fa-lock",
multiple=False,
)
def FUNC_6(self, VAR_21):
return redirect(
url_for(self.appbuilder.sm.resetmypasswordview.__name__ + ".this_form_get")
)
@action(
"resetpasswords", lazy_gettext("Reset Password"), "", "fa-lock", multiple=False
)
def FUNC_7(self, VAR_21):
return redirect(
url_for(
self.appbuilder.sm.resetpasswordview.__name__ + ".this_form_get",
VAR_23=VAR_21.id,
)
)
def FUNC_8(self, VAR_21):
item.changed_on = datetime.datetime.now()
VAR_21.changed_by_fk = g.user.id
def FUNC_9(self, VAR_21):
item.password = generate_password_hash(VAR_21.password)
class CLASS_12(DirectByChartView):
VAR_24 = lazy_gettext("User Statistics")
VAR_8 = {
"username": lazy_gettext("User Name"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed FUNC_11 count"),
}
VAR_25 = CLASS_6.search_columns
VAR_26 = [
{"label": "Login Count", "group": "username", "series": ["login_count"]},
{
"label": "Failed Login Count",
"group": "username",
"series": ["fail_login_count"],
},
]
class CLASS_13(ListWidget):
VAR_27 = "appbuilder/general/VAR_43/roles/list.html"
def __init__(self, **VAR_28):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**VAR_28)
class CLASS_14(ShowWidget):
VAR_27 = "appbuilder/general/VAR_43/roles/FUNC_5.html"
def __init__(self, **VAR_28):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**VAR_28)
class CLASS_15(ModelView):
VAR_2 = "/roles"
VAR_4 = lazy_gettext("List Roles")
VAR_5 = lazy_gettext("Show Role")
VAR_6 = lazy_gettext("Add Role")
VAR_7 = lazy_gettext("Edit Role")
VAR_29 = CLASS_13
VAR_30 = CLASS_14
VAR_8 = {
"name": lazy_gettext("Name"),
"permissions": lazy_gettext("Permissions"),
}
VAR_9 = ["name", "permissions"]
VAR_31 = ["name", "permissions"]
VAR_19 = ["name", "permissions"]
VAR_18 = VAR_19
VAR_32 = ["name"]
@action(
"copyrole",
lazy_gettext("Copy Role"),
lazy_gettext("Copy the selected roles?"),
icon="fa-copy",
single=False,
)
def FUNC_10(self, VAR_33):
self.update_redirect()
for VAR_21 in VAR_33:
VAR_49 = VAR_21.__class__()
VAR_49.name = VAR_21.name
VAR_49.permissions = VAR_21.permissions
VAR_49.name = VAR_49.name + " copy"
self.datamodel.add(VAR_49)
return redirect(self.get_redirect())
class CLASS_16(ModelView):
VAR_2 = "/registeruser"
VAR_3 = ["can_list", "can_show", "can_delete"]
VAR_4 = lazy_gettext("List of Registration Requests")
VAR_5 = lazy_gettext("Show Registration")
VAR_9 = ["username", "registration_date", "email"]
VAR_34 = ["password"]
VAR_17 = ["password"]
class CLASS_17(BaseView):
VAR_2 = ""
VAR_35 = ""
VAR_36 = lazy_gettext("Invalid FUNC_11. Please try again.")
VAR_37 = lazy_gettext("Sign In")
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
pass
@expose("/FUNC_12/")
def FUNC_12(self):
logout_user()
return redirect(self.appbuilder.get_url_for_index)
class CLASS_18(CLASS_17):
VAR_35 = "appbuilder/general/security/login_db.html"
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_db()
if VAR_10.validate_on_submit():
VAR_50 = self.appbuilder.sm.auth_user_db(
VAR_10.username.data, VAR_10.password.data
)
if not VAR_50:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(VAR_50, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, VAR_37=self.title, VAR_10=form, appbuilder=self.appbuilder
)
class CLASS_19(CLASS_17):
VAR_35 = "appbuilder/general/security/login_ldap.html"
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_db()
if VAR_10.validate_on_submit():
VAR_50 = self.appbuilder.sm.auth_user_ldap(
VAR_10.username.data, VAR_10.password.data
)
if not VAR_50:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(VAR_50, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, VAR_37=self.title, VAR_10=form, appbuilder=self.appbuilder
)
"""
For Future Use, API Auth, must check howto keep REST stateless
"""
"""
@expose_api(name='auth',url='/api/auth')
def auth(self):
if g.user is not None and g.user.is_authenticated:
http_return_code = 401
response = make_response(
jsonify(
{
'message': 'Login Failed already authenticated',
'severity': 'critical'
}
),
http_return_code
)
VAR_47 = str(request.args.get('username'))
password = str(request.args.get('password'))
VAR_50 = self.appbuilder.sm.auth_user_ldap(VAR_47, password)
if not VAR_50:
http_return_code = 401
response = make_response(
jsonify(
{
'message': 'Login Failed',
'severity': 'critical'
}
),
http_return_code
)
else:
login_user(VAR_50, remember=False)
http_return_code = 201
response = make_response(
jsonify(
{
'message': 'Login Success',
'severity': 'info'
}
),
http_return_code
)
return response
"""
class CLASS_20(CLASS_17):
VAR_35 = "appbuilder/general/security/login_oid.html"
VAR_38 = ["email"]
VAR_39 = []
def __init__(self):
super(CLASS_20, self).__init__()
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self, VAR_40=True):
@self.appbuilder.sm.oid.loginhandler
def FUNC_14(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_oid()
if VAR_10.validate_on_submit():
VAR_53["remember_me"] = VAR_10.remember_me.data
return self.appbuilder.sm.oid.try_login(
VAR_10.openid.data,
ask_for=self.oid_ask_for,
ask_for_optional=self.oid_ask_for_optional,
)
return self.render_template(
self.login_template,
VAR_37=self.title,
VAR_10=form,
providers=self.appbuilder.sm.openid_providers,
appbuilder=self.appbuilder,
)
@self.appbuilder.sm.oid.after_login
def FUNC_15(VAR_45):
if VAR_45.email is None or VAR_45.email == "":
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_50 = self.appbuilder.sm.auth_user_oid(VAR_45.email)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_51 = False
if "remember_me" in VAR_53:
VAR_51 = VAR_53["remember_me"]
VAR_53.pop("remember_me", None)
login_user(VAR_50, remember=VAR_51)
return redirect(self.appbuilder.get_url_for_index)
return FUNC_14(self)
class CLASS_21(CLASS_17):
VAR_35 = "appbuilder/general/security/login_oauth.html"
@expose("/FUNC_11/")
@expose("/FUNC_11/<VAR_41>")
@expose("/FUNC_11/<VAR_41>/<VAR_42>")
def FUNC_11(self, VAR_41=None, VAR_42=None):
VAR_0.debug("Provider: {0}".format(VAR_41))
if g.user is not None and g.user.is_authenticated:
VAR_0.debug("Already authenticated {0}".format(g.user))
return redirect(self.appbuilder.get_url_for_index)
if VAR_41 is None:
if len(self.appbuilder.sm.oauth_providers) > 1:
return self.render_template(
self.login_template,
providers=self.appbuilder.sm.oauth_providers,
VAR_37=self.title,
appbuilder=self.appbuilder,
)
else:
VAR_41 = self.appbuilder.sm.oauth_providers[0]["name"]
VAR_0.debug("Going to call authorize for: {0}".format(VAR_41))
VAR_46 = jwt.encode(
request.args.to_dict(flat=False),
self.appbuilder.app.config["SECRET_KEY"],
algorithm="HS256",
)
try:
if VAR_42:
VAR_0.debug("Login to Register")
VAR_53["register"] = True
if VAR_41 == "twitter":
return self.appbuilder.sm.oauth_remotes[VAR_41].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized",
VAR_41=provider,
_external=True,
VAR_46=state,
)
)
else:
return self.appbuilder.sm.oauth_remotes[VAR_41].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized", VAR_41=provider, _external=True
),
VAR_46=state.decode("ascii") if isinstance(VAR_46, bytes) else VAR_46,
)
except Exception as e:
VAR_0.error("Error on OAuth authorize: {0}".format(e))
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
@expose("/oauth-authorized/<VAR_41>")
def FUNC_13(self, VAR_41):
VAR_0.debug("Authorized init")
VAR_45 = self.appbuilder.sm.oauth_remotes[VAR_41].authorize_access_token()
if VAR_45 is None:
flash(u"You denied the request to sign in.", "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_0.debug("OAUTH Authorized VAR_45: {0}".format(VAR_45))
try:
self.appbuilder.sm.set_oauth_session(VAR_41, VAR_45)
VAR_52 = self.appbuilder.sm.oauth_user_info(VAR_41, VAR_45)
except Exception as e:
VAR_0.error("Error returning OAuth VAR_50 info: {0}".format(e))
VAR_50 = None
else:
VAR_0.debug("User info retrieved from {0}: {1}".format(VAR_41, VAR_52))
if VAR_41 in self.appbuilder.sm.oauth_whitelists:
VAR_54 = self.appbuilder.sm.oauth_whitelists[VAR_41]
VAR_55 = False
for e in VAR_54:
if re.search(e, VAR_52["email"]):
VAR_55 = True
break
if not VAR_55:
flash(u"You are not authorized.", "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
VAR_0.debug("No VAR_54 for OAuth provider")
VAR_50 = self.appbuilder.sm.auth_user_oauth(VAR_52)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
login_user(VAR_50)
try:
VAR_46 = jwt.decode(
request.args["state"],
self.appbuilder.app.config["SECRET_KEY"],
algorithms=["HS256"],
)
except jwt.InvalidTokenError:
raise Exception("State signature is not valid!")
try:
VAR_56 = VAR_46["next"][0] or self.appbuilder.get_url_for_index
except (KeyError, IndexError):
VAR_56 = self.appbuilder.get_url_for_index
return redirect(VAR_56)
class CLASS_22(CLASS_17):
VAR_35 = ""
@expose("/FUNC_11/")
def FUNC_11(self):
VAR_47 = request.environ.get("REMOTE_USER")
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
if VAR_47:
VAR_50 = self.appbuilder.sm.auth_user_remote_user(VAR_47)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
else:
login_user(VAR_50)
else:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
| import datetime
import .logging
import re
from typing import Optional
from urllib.parse import urlparse
from flask import (
abort,
current_app,
flash,
g,
redirect,
request,
Response,
VAR_54,
url_for,
)
from flask_babel import lazy_gettext
from flask_login import .login_user, logout_user
import jwt
from werkzeug.security import generate_password_hash
from wtforms import PasswordField, validators
from wtforms.validators import EqualTo
from .decorators import has_access
from .forms import LoginForm_db, LoginForm_oid, ResetPasswordForm, UserInfoEdit
from .._compat import as_unicode
from ..actions import action
from ..baseviews import BaseView
from ..charts.views import DirectByChartView
from ..fieldwidgets import BS3PasswordFieldWidget
from ..utils.base import lazy_formatter_gettext
from ..views import expose, ModelView, SimpleFormView
from ..widgets import ListWidget, ShowWidget
VAR_0 = logging.getLogger(__name__)
class CLASS_0(ModelView):
VAR_2 = "/permissions"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List Base Permissions")
VAR_5 = lazy_gettext("Show Base Permission")
VAR_6 = lazy_gettext("Add Base Permission")
VAR_7 = lazy_gettext("Edit Base Permission")
VAR_8 = {"name": lazy_gettext("Name")}
class CLASS_1(ModelView):
VAR_2 = "/viewmenus"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List View Menus")
VAR_5 = lazy_gettext("Show View Menu")
VAR_6 = lazy_gettext("Add View Menu")
VAR_7 = lazy_gettext("Edit View Menu")
VAR_8 = {"name": lazy_gettext("Name")}
class CLASS_2(ModelView):
VAR_2 = "/permissionviews"
VAR_3 = ["can_list"]
VAR_4 = lazy_gettext("List Permissions on Views/Menus")
VAR_5 = lazy_gettext("Show Permission on Views/Menus")
VAR_6 = lazy_gettext("Add Permission on Views/Menus")
VAR_7 = lazy_gettext("Edit Permission on Views/Menus")
VAR_8 = {
"permission": lazy_gettext("Permission"),
"view_menu": lazy_gettext("View/Menu"),
}
VAR_9 = ["permission", "view_menu"]
class CLASS_3(SimpleFormView):
VAR_2 = "/resetmypassword"
VAR_10 = ResetPasswordForm
VAR_11 = lazy_gettext("Reset Password Form")
VAR_12 = "/"
VAR_13 = lazy_gettext("Password Changed")
def FUNC_1(self, VAR_10):
self.appbuilder.sm.reset_password(g.user.id, VAR_10.password.data)
flash(as_unicode(self.message), "info")
class CLASS_4(SimpleFormView):
VAR_2 = "/resetpassword"
VAR_10 = ResetPasswordForm
VAR_11 = lazy_gettext("Reset Password Form")
VAR_12 = "/"
VAR_13 = lazy_gettext("Password Changed")
def FUNC_1(self, VAR_10):
VAR_23 = request.args.get("pk")
self.appbuilder.sm.reset_password(VAR_23, VAR_10.password.data)
flash(as_unicode(self.message), "info")
class CLASS_5(SimpleFormView):
VAR_10 = UserInfoEdit
VAR_11 = lazy_gettext("Edit User Information")
VAR_12 = "/"
VAR_13 = lazy_gettext("User information changed")
def FUNC_2(self, VAR_10):
VAR_21 = self.appbuilder.sm.get_user_by_id(g.user.id)
for key, value in VAR_10.data.items():
if key == "csrf_token":
continue
VAR_48 = getattr(VAR_10, key)
VAR_48.data = getattr(VAR_21, key)
def FUNC_1(self, VAR_10):
VAR_10 = self.form.refresh(request.form)
VAR_21 = self.appbuilder.sm.get_user_by_id(g.user.id)
VAR_10.populate_obj(VAR_21)
self.appbuilder.sm.update_user(VAR_21)
flash(as_unicode(self.message), "info")
def FUNC_0(VAR_1: str) -> str:
if current_app.config.get("AUTH_ROLES_SYNC_AT_LOGIN", False):
VAR_1 += (
". <div class='alert alert-warning' role='alert'>"
"AUTH_ROLES_SYNC_AT_LOGIN is enabled, changes to this field will "
"not persist between VAR_50 logins."
"</div>"
)
return VAR_1
class CLASS_6(ModelView):
VAR_2 = "/users"
VAR_4 = lazy_gettext("List Users")
VAR_5 = lazy_gettext("Show User")
VAR_6 = lazy_gettext("Add User")
VAR_7 = lazy_gettext("Edit User")
VAR_8 = {
"get_full_name": lazy_gettext("Full Name"),
"first_name": lazy_gettext("First Name"),
"last_name": lazy_gettext("Last Name"),
"username": lazy_gettext("User Name"),
"password": lazy_gettext("Password"),
"active": lazy_gettext("Is Active?"),
"email": lazy_gettext("Email"),
"roles": lazy_gettext("Role"),
"last_login": lazy_gettext("Last login"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed FUNC_11 count"),
"created_on": lazy_gettext("Created on"),
"created_by": lazy_gettext("Created by"),
"changed_on": lazy_gettext("Changed on"),
"changed_by": lazy_gettext("Changed by"),
}
VAR_14 = {
"first_name": lazy_gettext("Write the VAR_50 first name or names"),
"last_name": lazy_gettext("Write the VAR_50 last name"),
"username": lazy_gettext(
"Username valid for authentication on DB or LDAP, unused for OID auth"
),
"password": lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
"active": lazy_gettext(
"It's not a good policy to remove a VAR_50, just make it inactive"
),
"email": lazy_gettext("The user's email, this will also be used for OID auth"),
"roles": lazy_formatter_gettext(
"The VAR_50 role on the application,"
" this will associate with a list of permissions",
FUNC_0,
),
"conf_password": lazy_gettext("Please rewrite the user's password to confirm"),
}
VAR_9 = ["first_name", "last_name", "username", "email", "active", "roles"]
VAR_15 = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
(
lazy_gettext("Audit Info"),
{
"fields": [
"last_login",
"fail_login_count",
"created_on",
"created_by",
"changed_on",
"changed_by",
],
"expanded": False,
},
),
]
VAR_16 = [
(
lazy_gettext("User info"),
{"fields": ["username", "active", "roles", "login_count"]},
),
(
lazy_gettext("Personal Info"),
{"fields": ["first_name", "last_name", "email"], "expanded": True},
),
]
VAR_17 = ["password"]
VAR_18 = ["first_name", "last_name", "username", "active", "email", "roles"]
VAR_19 = ["first_name", "last_name", "username", "active", "email", "roles"]
VAR_20 = lazy_gettext("Your VAR_50 information")
@expose("/VAR_52/")
@has_access
def VAR_52(self):
VAR_21 = self.datamodel.get(g.user.id, self._base_filters)
VAR_43 = self._get_show_widget(
g.user.id, VAR_21, VAR_15=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_37=self.user_info_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
)
@action("userinfoedit", lazy_gettext("Edit User"), "", "fa-edit", multiple=False)
def FUNC_4(self, VAR_21):
return redirect(
url_for(self.appbuilder.sm.userinfoeditview.__name__ + ".this_form_get")
)
class CLASS_7(CLASS_6):
pass
class CLASS_8(CLASS_6):
pass
class CLASS_9(CLASS_6):
pass
class CLASS_10(CLASS_6):
pass
class CLASS_11(CLASS_6):
VAR_22 = {
"password": PasswordField(
lazy_gettext("Password"),
description=lazy_gettext(
"Please use a good password policy,"
" this application does not check this for you"
),
validators=[validators.DataRequired()],
widget=BS3PasswordFieldWidget(),
),
"conf_password": PasswordField(
lazy_gettext("Confirm Password"),
description=lazy_gettext("Please rewrite the user's password to confirm"),
validators=[
EqualTo("password", VAR_13=lazy_gettext("Passwords must match"))
],
widget=BS3PasswordFieldWidget(),
),
}
VAR_18 = [
"first_name",
"last_name",
"username",
"active",
"email",
"roles",
"password",
"conf_password",
]
@expose("/FUNC_5/<VAR_23>", methods=["GET"])
@has_access
def FUNC_5(self, VAR_23):
VAR_44 = dict()
VAR_44["resetpasswords"] = self.actions.get("resetpasswords")
VAR_21 = self.datamodel.get(VAR_23, self._base_filters)
if not VAR_21:
abort(404)
VAR_43 = self._get_show_widget(VAR_23, VAR_21, VAR_44=actions)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_23=pk,
VAR_37=self.show_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
related_views=self._related_views,
)
@expose("/VAR_52/")
@has_access
def VAR_52(self):
VAR_44 = dict()
VAR_44["resetmypassword"] = self.actions.get("resetmypassword")
VAR_44["userinfoedit"] = self.actions.get("userinfoedit")
VAR_21 = self.datamodel.get(g.user.id, self._base_filters)
VAR_43 = self._get_show_widget(
g.user.id, VAR_21, VAR_44=actions, VAR_15=self.user_show_fieldsets
)
self.update_redirect()
return self.render_template(
self.show_template,
VAR_37=self.user_info_title,
VAR_43=widgets,
appbuilder=self.appbuilder,
)
@action(
"resetmypassword",
lazy_gettext("Reset my password"),
"",
"fa-lock",
multiple=False,
)
def FUNC_6(self, VAR_21):
return redirect(
url_for(self.appbuilder.sm.resetmypasswordview.__name__ + ".this_form_get")
)
@action(
"resetpasswords", lazy_gettext("Reset Password"), "", "fa-lock", multiple=False
)
def FUNC_7(self, VAR_21):
return redirect(
url_for(
self.appbuilder.sm.resetpasswordview.__name__ + ".this_form_get",
VAR_23=VAR_21.id,
)
)
def FUNC_8(self, VAR_21):
item.changed_on = datetime.datetime.now()
VAR_21.changed_by_fk = g.user.id
def FUNC_9(self, VAR_21):
item.password = generate_password_hash(VAR_21.password)
class CLASS_12(DirectByChartView):
VAR_24 = lazy_gettext("User Statistics")
VAR_8 = {
"username": lazy_gettext("User Name"),
"login_count": lazy_gettext("Login count"),
"fail_login_count": lazy_gettext("Failed FUNC_11 count"),
}
VAR_25 = CLASS_6.search_columns
VAR_26 = [
{"label": "Login Count", "group": "username", "series": ["login_count"]},
{
"label": "Failed Login Count",
"group": "username",
"series": ["fail_login_count"],
},
]
class CLASS_13(ListWidget):
VAR_27 = "appbuilder/general/VAR_43/roles/list.html"
def __init__(self, **VAR_28):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**VAR_28)
class CLASS_14(ShowWidget):
VAR_27 = "appbuilder/general/VAR_43/roles/FUNC_5.html"
def __init__(self, **VAR_28):
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**VAR_28)
class CLASS_15(ModelView):
VAR_2 = "/roles"
VAR_4 = lazy_gettext("List Roles")
VAR_5 = lazy_gettext("Show Role")
VAR_6 = lazy_gettext("Add Role")
VAR_7 = lazy_gettext("Edit Role")
VAR_29 = CLASS_13
VAR_30 = CLASS_14
VAR_8 = {
"name": lazy_gettext("Name"),
"permissions": lazy_gettext("Permissions"),
}
VAR_9 = ["name", "permissions"]
VAR_31 = ["name", "permissions"]
VAR_19 = ["name", "permissions"]
VAR_18 = VAR_19
VAR_32 = ["name"]
@action(
"copyrole",
lazy_gettext("Copy Role"),
lazy_gettext("Copy the selected roles?"),
icon="fa-copy",
single=False,
)
def FUNC_10(self, VAR_33):
self.update_redirect()
for VAR_21 in VAR_33:
VAR_49 = VAR_21.__class__()
VAR_49.name = VAR_21.name
VAR_49.permissions = VAR_21.permissions
VAR_49.name = VAR_49.name + " copy"
self.datamodel.add(VAR_49)
return redirect(self.get_redirect())
class CLASS_16(ModelView):
VAR_2 = "/registeruser"
VAR_3 = ["can_list", "can_show", "can_delete"]
VAR_4 = lazy_gettext("List of Registration Requests")
VAR_5 = lazy_gettext("Show Registration")
VAR_9 = ["username", "registration_date", "email"]
VAR_34 = ["password"]
VAR_17 = ["password"]
class CLASS_17(BaseView):
VAR_2 = ""
VAR_35 = ""
VAR_36 = lazy_gettext("Invalid FUNC_11. Please try again.")
VAR_37 = lazy_gettext("Sign In")
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
pass
@expose("/FUNC_12/")
def FUNC_12(self):
logout_user()
return redirect(self.appbuilder.get_url_for_index)
class CLASS_18(CLASS_17):
VAR_35 = "appbuilder/general/security/login_db.html"
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_db()
if VAR_10.validate_on_submit():
VAR_50 = self.appbuilder.sm.auth_user_db(
VAR_10.username.data, VAR_10.password.data
)
if not VAR_50:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(VAR_50, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, VAR_37=self.title, VAR_10=form, appbuilder=self.appbuilder
)
class CLASS_19(CLASS_17):
VAR_35 = "appbuilder/general/security/login_ldap.html"
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_db()
if VAR_10.validate_on_submit():
VAR_50 = self.appbuilder.sm.auth_user_ldap(
VAR_10.username.data, VAR_10.password.data
)
if not VAR_50:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
login_user(VAR_50, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, VAR_37=self.title, VAR_10=form, appbuilder=self.appbuilder
)
class CLASS_20(CLASS_17):
VAR_35 = "appbuilder/general/security/login_oid.html"
VAR_38 = ["email"]
VAR_39 = []
def __init__(self):
super(CLASS_20, self).__init__()
@expose("/FUNC_11/", methods=["GET", "POST"])
def FUNC_11(self, VAR_40=True):
@self.appbuilder.sm.oid.loginhandler
def FUNC_14(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
VAR_10 = LoginForm_oid()
if VAR_10.validate_on_submit():
VAR_54["remember_me"] = VAR_10.remember_me.data
return self.appbuilder.sm.oid.try_login(
VAR_10.openid.data,
ask_for=self.oid_ask_for,
ask_for_optional=self.oid_ask_for_optional,
)
return self.render_template(
self.login_template,
VAR_37=self.title,
VAR_10=form,
providers=self.appbuilder.sm.openid_providers,
appbuilder=self.appbuilder,
)
@self.appbuilder.sm.oid.after_login
def FUNC_15(VAR_45):
if VAR_45.email is None or VAR_45.email == "":
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_50 = self.appbuilder.sm.auth_user_oid(VAR_45.email)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_51 = False
if "remember_me" in VAR_54:
VAR_51 = VAR_54["remember_me"]
VAR_54.pop("remember_me", None)
login_user(VAR_50, remember=VAR_51)
return redirect(self.appbuilder.get_url_for_index)
return FUNC_14(self)
class CLASS_21(CLASS_17):
VAR_35 = "appbuilder/general/security/login_oauth.html"
@expose("/FUNC_11/")
@expose("/FUNC_11/<VAR_41>")
@expose("/FUNC_11/<VAR_41>/<VAR_42>")
def FUNC_11(
self, VAR_41: Optional[str] = None, VAR_42: Optional[str] = None
) -> Response:
VAR_0.debug("Provider: {0}".format(VAR_41))
if g.user is not None and g.user.is_authenticated:
VAR_0.debug("Already authenticated {0}".format(g.user))
return redirect(self.appbuilder.get_url_for_index)
if VAR_41 is None:
if len(self.appbuilder.sm.oauth_providers) > 1:
return self.render_template(
self.login_template,
providers=self.appbuilder.sm.oauth_providers,
VAR_37=self.title,
appbuilder=self.appbuilder,
)
else:
VAR_41 = self.appbuilder.sm.oauth_providers[0]["name"]
VAR_0.debug("Going to call authorize for: {0}".format(VAR_41))
VAR_46 = jwt.encode(
request.args.to_dict(flat=False),
self.appbuilder.app.config["SECRET_KEY"],
algorithm="HS256",
)
try:
if VAR_42:
VAR_0.debug("Login to Register")
VAR_54["register"] = True
if VAR_41 == "twitter":
return self.appbuilder.sm.oauth_remotes[VAR_41].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized",
VAR_41=provider,
_external=True,
VAR_46=state,
)
)
else:
return self.appbuilder.sm.oauth_remotes[VAR_41].authorize_redirect(
redirect_uri=url_for(
".oauth_authorized", VAR_41=provider, _external=True
),
VAR_46=state.decode("ascii") if isinstance(VAR_46, bytes) else VAR_46,
)
except Exception as e:
VAR_0.error("Error on OAuth authorize: {0}".format(e))
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
@expose("/oauth-authorized/<VAR_41>")
def FUNC_13(self, VAR_41: str) -> Response:
VAR_0.debug("Authorized init")
if VAR_41 not in self.appbuilder.sm.oauth_remotes:
flash(u"Provider not supported.", "warning")
VAR_0.warning("OAuth authorized got an unknown VAR_41 %s", VAR_41)
return redirect(self.appbuilder.get_url_for_login)
VAR_45 = self.appbuilder.sm.oauth_remotes[VAR_41].authorize_access_token()
if VAR_45 is None:
flash(u"You denied the request to sign in.", "warning")
return redirect(self.appbuilder.get_url_for_login)
VAR_0.debug("OAUTH Authorized VAR_45: {0}".format(VAR_45))
try:
self.appbuilder.sm.set_oauth_session(VAR_41, VAR_45)
VAR_52 = self.appbuilder.sm.oauth_user_info(VAR_41, VAR_45)
except Exception as e:
VAR_0.error("Error returning OAuth VAR_50 info: {0}".format(e))
VAR_50 = None
else:
VAR_0.debug("User info retrieved from {0}: {1}".format(VAR_41, VAR_52))
if VAR_41 in self.appbuilder.sm.oauth_whitelists:
VAR_55 = self.appbuilder.sm.oauth_whitelists[VAR_41]
VAR_56 = False
for e in VAR_55:
if re.search(e, VAR_52["email"]):
VAR_56 = True
break
if not VAR_56:
flash(u"You are not authorized.", "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
VAR_0.debug("No VAR_55 for OAuth provider")
VAR_50 = self.appbuilder.sm.auth_user_oauth(VAR_52)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
else:
login_user(VAR_50)
try:
VAR_46 = jwt.decode(
request.args["state"],
self.appbuilder.app.config["SECRET_KEY"],
algorithms=["HS256"],
)
except jwt.InvalidTokenError:
raise Exception("State signature is not valid!")
VAR_53 = self.appbuilder.get_url_for_index
if "next" in VAR_46 and len(VAR_46["next"]) > 0:
VAR_57 = urlparse(VAR_46["next"][0])
if VAR_57.netloc != request.host:
VAR_0.warning("Got an invalid next URL: %s", VAR_57.netloc)
else:
VAR_53 = VAR_46["next"][0]
return redirect(VAR_53)
class CLASS_22(CLASS_17):
VAR_35 = ""
@expose("/FUNC_11/")
def FUNC_11(self):
VAR_47 = request.environ.get("REMOTE_USER")
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
if VAR_47:
VAR_50 = self.appbuilder.sm.auth_user_remote_user(VAR_47)
if VAR_50 is None:
flash(as_unicode(self.invalid_login_message), "warning")
else:
login_user(VAR_50)
else:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_index)
| [
4,
12,
23,
24,
26,
27,
31,
36,
38,
39,
43,
48,
50,
51,
55,
60,
66,
67,
72,
78,
82,
83,
88,
94,
99,
100,
106,
109,
115,
122,
123,
133,
134,
137,
142,
160,
182,
184,
209,
220,
222,
226,
241,
247,
248,
255,
257,
258,
265,
267,
268,
275,
277,
278,
285,
287,
288,
295,
315,
326,
345,
352,
364,
376,
387,
391,
394,
395,
403,
405,
414,
415,
418,
422,
423,
426,
430,
431,
434,
439,
442,
452,
469,
470,
479,
480,
486,
490,
495,
496,
499,
517,
518,
521,
539,
543,
586,
587,
592,
595,
617,
631,
634,
636,
637,
640,
649,
660,
691,
700,
709,
723,
737,
742,
744,
745,
748,
763,
69,
70,
71,
85,
86,
87,
250,
251,
252,
253,
254,
260,
261,
262,
263,
264,
270,
271,
272,
273,
274,
280,
281,
282,
283,
284,
290,
291,
292,
293,
294
] | [
6,
24,
35,
36,
38,
39,
43,
48,
50,
51,
55,
60,
62,
63,
67,
72,
78,
79,
84,
90,
94,
95,
100,
106,
111,
112,
118,
121,
127,
134,
135,
145,
146,
149,
154,
172,
194,
196,
221,
232,
234,
238,
253,
259,
260,
267,
269,
270,
277,
279,
280,
287,
289,
290,
297,
299,
300,
307,
327,
338,
357,
364,
376,
388,
399,
403,
406,
407,
415,
417,
426,
427,
430,
434,
435,
438,
442,
443,
446,
451,
454,
464,
481,
482,
491,
492,
498,
502,
507,
508,
511,
529,
530,
533,
551,
552,
557,
560,
582,
596,
599,
601,
602,
605,
616,
627,
658,
671,
680,
694,
708,
710,
718,
719,
722,
737,
81,
82,
83,
97,
98,
99,
262,
263,
264,
265,
266,
272,
273,
274,
275,
276,
282,
283,
284,
285,
286,
292,
293,
294,
295,
296,
302,
303,
304,
305,
306
] |
2CWE-601
| import re
import secrets
from typing import Any, Dict, Optional, Tuple
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpRequest, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from sentry_sdk import capture_exception
from statshog.defaults.django import statsd
from posthog.api.utils import get_token
from posthog.exceptions import RequestParsingError, generate_exception_response
from posthog.models import Team, User
from posthog.models.feature_flag import get_overridden_feature_flags
from posthog.utils import cors_response, load_data_from_request
from .utils import get_project_id
def on_permitted_domain(team: Team, request: HttpRequest) -> bool:
permitted_domains = ["127.0.0.1", "localhost"]
for url in team.app_urls:
hostname = parse_domain(url)
if hostname:
permitted_domains.append(hostname)
origin = parse_domain(request.headers.get("Origin"))
referer = parse_domain(request.headers.get("Referer"))
for permitted_domain in permitted_domains:
if "*" in permitted_domain:
pattern = "^{}$".format(permitted_domain.replace(".", "\\.").replace("*", "(.*)"))
if (origin and re.search(pattern, origin)) or (referer and re.search(pattern, referer)):
return True
else:
if permitted_domain == origin or permitted_domain == referer:
return True
return False
def decide_editor_params(request: HttpRequest) -> Tuple[Dict[str, Any], bool]:
if request.user.is_anonymous:
return {}, False
team = request.user.team
if team and on_permitted_domain(team, request):
response: Dict[str, Any] = {"isAuthenticated": True}
editor_params = {}
if request.user.toolbar_mode != "disabled":
editor_params["toolbarVersion"] = "toolbar"
if settings.JS_URL:
editor_params["jsURL"] = settings.JS_URL
response["editorParams"] = editor_params
return response, not request.user.temporary_token
else:
return {}, False
def parse_domain(url: Any) -> Optional[str]:
return urlparse(url).hostname
@csrf_exempt
def get_decide(request: HttpRequest):
response = {
"config": {"enable_collect_everything": True},
"editorParams": {},
"isAuthenticated": False,
"supportedCompression": ["gzip", "gzip-js", "lz64"],
}
if request.COOKIES.get(settings.TOOLBAR_COOKIE_NAME) and request.user.is_authenticated:
response["isAuthenticated"] = True
if settings.JS_URL and request.user.toolbar_mode == User.TOOLBAR:
response["editorParams"] = {"jsURL": settings.JS_URL, "toolbarVersion": "toolbar"}
if request.user.is_authenticated:
r, update_user_token = decide_editor_params(request)
response.update(r)
if update_user_token:
request.user.temporary_token = secrets.token_urlsafe(32)
request.user.save()
response["featureFlags"] = []
response["sessionRecording"] = False
if request.method == "POST":
try:
data = load_data_from_request(request)
api_version_string = request.GET.get("v")
# NOTE: This does not support semantic versioning e.g. 2.1.0
api_version = int(api_version_string) if api_version_string else 1
except ValueError:
# default value added because of bug in posthog-js 1.19.0
# see https://sentry.io/organizations/posthog2/issues/2738865125/?project=1899813
# as a tombstone if the below statsd counter hasn't seen errors for N days
# then it is likely that no clients are running posthog-js 1.19.0
# and this defaulting could be removed
statsd.incr(
f"posthog_cloud_decide_defaulted_api_version_on_value_error",
tags={"endpoint": "decide", "api_version_string": api_version_string},
)
api_version = 2
except RequestParsingError as error:
capture_exception(error) # We still capture this on Sentry to identify actual potential bugs
return cors_response(
request,
generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"),
)
token = get_token(data, request)
team = Team.objects.get_team_from_token(token)
if team is None and token:
project_id = get_project_id(data, request)
if not project_id:
return cors_response(
request,
generate_exception_response(
"decide",
"Project API key invalid. You can find your project API key in PostHog project settings.",
code="invalid_api_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
user = User.objects.get_from_personal_api_key(token)
if user is None:
return cors_response(
request,
generate_exception_response(
"decide",
"Invalid Personal API key.",
code="invalid_personal_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
team = user.teams.get(id=project_id)
if team:
feature_flags = get_overridden_feature_flags(team.pk, data["distinct_id"], data.get("groups", {}))
response["featureFlags"] = feature_flags if api_version >= 2 else list(feature_flags.keys())
if team.session_recording_opt_in and (on_permitted_domain(team, request) or len(team.app_urls) == 0):
response["sessionRecording"] = {"endpoint": "/s/"}
statsd.incr(
f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide",},
)
return cors_response(request, JsonResponse(response))
| import re
import secrets
from typing import Any, Dict, Optional, Tuple
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpRequest, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from sentry_sdk import capture_exception
from statshog.defaults.django import statsd
from posthog.api.utils import get_token
from posthog.exceptions import RequestParsingError, generate_exception_response
from posthog.models import Team, User
from posthog.models.feature_flag import get_overridden_feature_flags
from posthog.utils import cors_response, load_data_from_request
from .utils import get_project_id
def on_permitted_domain(team: Team, request: HttpRequest) -> bool:
origin = parse_domain(request.headers.get("Origin"))
referer = parse_domain(request.headers.get("Referer"))
return hostname_in_app_urls(team, origin) or hostname_in_app_urls(team, referer)
def hostname_in_app_urls(team: Team, hostname: Optional[str]) -> bool:
if not hostname:
return False
permitted_domains = ["127.0.0.1", "localhost"]
for url in team.app_urls:
host = parse_domain(url)
if host:
permitted_domains.append(host)
for permitted_domain in permitted_domains:
if "*" in permitted_domain:
pattern = "^{}$".format(re.escape(permitted_domain).replace("\\*", "(.*)"))
if re.search(pattern, hostname):
return True
elif permitted_domain == hostname:
return True
return False
def decide_editor_params(request: HttpRequest) -> Tuple[Dict[str, Any], bool]:
if request.user.is_anonymous:
return {}, False
team = request.user.team
if team and on_permitted_domain(team, request):
response: Dict[str, Any] = {"isAuthenticated": True}
editor_params = {}
if request.user.toolbar_mode != "disabled":
editor_params["toolbarVersion"] = "toolbar"
if settings.JS_URL:
editor_params["jsURL"] = settings.JS_URL
response["editorParams"] = editor_params
return response, not request.user.temporary_token
else:
return {}, False
def parse_domain(url: Any) -> Optional[str]:
return urlparse(url).hostname
@csrf_exempt
def get_decide(request: HttpRequest):
response = {
"config": {"enable_collect_everything": True},
"editorParams": {},
"isAuthenticated": False,
"supportedCompression": ["gzip", "gzip-js", "lz64"],
}
if request.COOKIES.get(settings.TOOLBAR_COOKIE_NAME) and request.user.is_authenticated:
response["isAuthenticated"] = True
if settings.JS_URL and request.user.toolbar_mode == User.TOOLBAR:
response["editorParams"] = {"jsURL": settings.JS_URL, "toolbarVersion": "toolbar"}
if request.user.is_authenticated:
r, update_user_token = decide_editor_params(request)
response.update(r)
if update_user_token:
request.user.temporary_token = secrets.token_urlsafe(32)
request.user.save()
response["featureFlags"] = []
response["sessionRecording"] = False
if request.method == "POST":
try:
data = load_data_from_request(request)
api_version_string = request.GET.get("v")
# NOTE: This does not support semantic versioning e.g. 2.1.0
api_version = int(api_version_string) if api_version_string else 1
except ValueError:
# default value added because of bug in posthog-js 1.19.0
# see https://sentry.io/organizations/posthog2/issues/2738865125/?project=1899813
# as a tombstone if the below statsd counter hasn't seen errors for N days
# then it is likely that no clients are running posthog-js 1.19.0
# and this defaulting could be removed
statsd.incr(
f"posthog_cloud_decide_defaulted_api_version_on_value_error",
tags={"endpoint": "decide", "api_version_string": api_version_string},
)
api_version = 2
except RequestParsingError as error:
capture_exception(error) # We still capture this on Sentry to identify actual potential bugs
return cors_response(
request,
generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"),
)
token = get_token(data, request)
team = Team.objects.get_team_from_token(token)
if team is None and token:
project_id = get_project_id(data, request)
if not project_id:
return cors_response(
request,
generate_exception_response(
"decide",
"Project API key invalid. You can find your project API key in PostHog project settings.",
code="invalid_api_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
user = User.objects.get_from_personal_api_key(token)
if user is None:
return cors_response(
request,
generate_exception_response(
"decide",
"Invalid Personal API key.",
code="invalid_personal_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
team = user.teams.get(id=project_id)
if team:
feature_flags = get_overridden_feature_flags(team.pk, data["distinct_id"], data.get("groups", {}))
response["featureFlags"] = feature_flags if api_version >= 2 else list(feature_flags.keys())
if team.session_recording_opt_in and (on_permitted_domain(team, request) or len(team.app_urls) == 0):
response["sessionRecording"] = {"endpoint": "/s/"}
statsd.incr(
f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide",},
)
return cors_response(request, JsonResponse(response))
| open_redirect | {
"code": [
" hostname = parse_domain(url)",
" if hostname:",
" permitted_domains.append(hostname)",
" origin = parse_domain(request.headers.get(\"Origin\"))",
" referer = parse_domain(request.headers.get(\"Referer\"))",
" pattern = \"^{}$\".format(permitted_domain.replace(\".\", \"\\\\.\").replace(\"*\", \"(.*)\"))",
" if (origin and re.search(pattern, origin)) or (referer and re.search(pattern, referer)):",
" return True",
" else:",
" if permitted_domain == origin or permitted_domain == referer:"
],
"line_no": [
26,
27,
28,
30,
31,
34,
35,
36,
37,
38
]
} | {
"code": [
" origin = parse_domain(request.headers.get(\"Origin\"))",
" return hostname_in_app_urls(team, origin) or hostname_in_app_urls(team, referer)",
"def hostname_in_app_urls(team: Team, hostname: Optional[str]) -> bool:",
" return False",
" host = parse_domain(url)",
" if host:",
" permitted_domains.append(host)",
" if re.search(pattern, hostname):",
" elif permitted_domain == hostname:",
" return True"
],
"line_no": [
23,
25,
28,
30,
35,
36,
37,
42,
44,
45
]
} | import .re
import secrets
from typing import Any, Dict, Optional, Tuple
from urllib.parse import .urlparse
from django.conf import settings
from django.http import HttpRequest, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from sentry_sdk import capture_exception
from statshog.defaults.django import statsd
from posthog.api.utils import get_token
from posthog.exceptions import RequestParsingError, generate_exception_response
from posthog.models import Team, User
from posthog.models.feature_flag import get_overridden_feature_flags
from posthog.utils import cors_response, load_data_from_request
from .utils import get_project_id
def FUNC_0(VAR_0: Team, VAR_1: HttpRequest) -> bool:
VAR_3 = ["127.0.0.1", "localhost"]
for VAR_2 in VAR_0.app_urls:
VAR_7 = FUNC_2(VAR_2)
if VAR_7:
VAR_3.append(VAR_7)
VAR_4 = FUNC_2(VAR_1.headers.get("Origin"))
VAR_5 = FUNC_2(VAR_1.headers.get("Referer"))
for permitted_domain in VAR_3:
if "*" in permitted_domain:
VAR_12 = "^{}$".format(permitted_domain.replace(".", "\\.").replace("*", "(.*)"))
if (VAR_4 and re.search(VAR_12, VAR_4)) or (VAR_5 and re.search(VAR_12, VAR_5)):
return True
else:
if permitted_domain == VAR_4 or permitted_domain == VAR_5:
return True
return False
def FUNC_1(VAR_1: HttpRequest) -> Tuple[Dict[str, Any], bool]:
if VAR_1.user.is_anonymous:
return {}, False
VAR_0 = VAR_1.user.team
if VAR_0 and FUNC_0(VAR_0, VAR_1):
VAR_6: Dict[str, Any] = {"isAuthenticated": True}
VAR_8 = {}
if VAR_1.user.toolbar_mode != "disabled":
VAR_8["toolbarVersion"] = "toolbar"
if settings.JS_URL:
VAR_8["jsURL"] = settings.JS_URL
VAR_6["editorParams"] = VAR_8
return VAR_6, not VAR_1.user.temporary_token
else:
return {}, False
def FUNC_2(VAR_2: Any) -> Optional[str]:
return urlparse(VAR_2).hostname
@csrf_exempt
def FUNC_3(VAR_1: HttpRequest):
VAR_6 = {
"config": {"enable_collect_everything": True},
"editorParams": {},
"isAuthenticated": False,
"supportedCompression": ["gzip", "gzip-js", "lz64"],
}
if VAR_1.COOKIES.get(settings.TOOLBAR_COOKIE_NAME) and VAR_1.user.is_authenticated:
VAR_6["isAuthenticated"] = True
if settings.JS_URL and VAR_1.user.toolbar_mode == User.TOOLBAR:
VAR_6["editorParams"] = {"jsURL": settings.JS_URL, "toolbarVersion": "toolbar"}
if VAR_1.user.is_authenticated:
VAR_9, VAR_10 = FUNC_1(VAR_1)
VAR_6.update(VAR_9)
if VAR_10:
VAR_1.user.temporary_token = secrets.token_urlsafe(32)
VAR_1.user.save()
VAR_6["featureFlags"] = []
VAR_6["sessionRecording"] = False
if VAR_1.method == "POST":
try:
VAR_13 = load_data_from_request(VAR_1)
VAR_14 = VAR_1.GET.get("v")
VAR_15 = int(VAR_14) if VAR_14 else 1
except ValueError:
statsd.incr(
f"posthog_cloud_decide_defaulted_api_version_on_value_error",
tags={"endpoint": "decide", "api_version_string": VAR_14},
)
VAR_15 = 2
except RequestParsingError as error:
capture_exception(error) # We still capture this on Sentry to identify actual potential bugs
return cors_response(
VAR_1,
generate_exception_response("decide", f"Malformed VAR_1 VAR_13: {error}", code="malformed_data"),
)
VAR_11 = get_token(VAR_13, VAR_1)
VAR_0 = Team.objects.get_team_from_token(VAR_11)
if VAR_0 is None and VAR_11:
VAR_16 = get_project_id(VAR_13, VAR_1)
if not VAR_16:
return cors_response(
VAR_1,
generate_exception_response(
"decide",
"Project API key invalid. You can find your project API key in PostHog project settings.",
code="invalid_api_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
VAR_17 = User.objects.get_from_personal_api_key(VAR_11)
if VAR_17 is None:
return cors_response(
VAR_1,
generate_exception_response(
"decide",
"Invalid Personal API key.",
code="invalid_personal_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
VAR_0 = VAR_17.teams.get(id=VAR_16)
if VAR_0:
VAR_18 = get_overridden_feature_flags(VAR_0.pk, VAR_13["distinct_id"], VAR_13.get("groups", {}))
VAR_6["featureFlags"] = VAR_18 if VAR_15 >= 2 else list(VAR_18.keys())
if VAR_0.session_recording_opt_in and (FUNC_0(VAR_0, VAR_1) or len(VAR_0.app_urls) == 0):
VAR_6["sessionRecording"] = {"endpoint": "/s/"}
statsd.incr(
f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide",},
)
return cors_response(VAR_1, JsonResponse(VAR_6))
| import .re
import secrets
from typing import Any, Dict, Optional, Tuple
from urllib.parse import .urlparse
from django.conf import settings
from django.http import HttpRequest, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from sentry_sdk import capture_exception
from statshog.defaults.django import statsd
from posthog.api.utils import get_token
from posthog.exceptions import RequestParsingError, generate_exception_response
from posthog.models import Team, User
from posthog.models.feature_flag import get_overridden_feature_flags
from posthog.utils import cors_response, load_data_from_request
from .utils import get_project_id
def FUNC_0(VAR_0: Team, VAR_1: HttpRequest) -> bool:
VAR_4 = FUNC_3(VAR_1.headers.get("Origin"))
VAR_5 = FUNC_3(VAR_1.headers.get("Referer"))
return FUNC_1(VAR_0, VAR_4) or FUNC_1(VAR_0, VAR_5)
def FUNC_1(VAR_0: Team, VAR_2: Optional[str]) -> bool:
if not VAR_2:
return False
VAR_6 = ["127.0.0.1", "localhost"]
for VAR_3 in VAR_0.app_urls:
VAR_8 = FUNC_3(VAR_3)
if VAR_8:
VAR_6.append(VAR_8)
for permitted_domain in VAR_6:
if "*" in permitted_domain:
VAR_13 = "^{}$".format(re.escape(permitted_domain).replace("\\*", "(.*)"))
if re.search(VAR_13, VAR_2):
return True
elif permitted_domain == VAR_2:
return True
return False
def FUNC_2(VAR_1: HttpRequest) -> Tuple[Dict[str, Any], bool]:
if VAR_1.user.is_anonymous:
return {}, False
VAR_0 = VAR_1.user.team
if VAR_0 and FUNC_0(VAR_0, VAR_1):
VAR_7: Dict[str, Any] = {"isAuthenticated": True}
VAR_9 = {}
if VAR_1.user.toolbar_mode != "disabled":
VAR_9["toolbarVersion"] = "toolbar"
if settings.JS_URL:
VAR_9["jsURL"] = settings.JS_URL
VAR_7["editorParams"] = VAR_9
return VAR_7, not VAR_1.user.temporary_token
else:
return {}, False
def FUNC_3(VAR_3: Any) -> Optional[str]:
return urlparse(VAR_3).hostname
@csrf_exempt
def FUNC_4(VAR_1: HttpRequest):
VAR_7 = {
"config": {"enable_collect_everything": True},
"editorParams": {},
"isAuthenticated": False,
"supportedCompression": ["gzip", "gzip-js", "lz64"],
}
if VAR_1.COOKIES.get(settings.TOOLBAR_COOKIE_NAME) and VAR_1.user.is_authenticated:
VAR_7["isAuthenticated"] = True
if settings.JS_URL and VAR_1.user.toolbar_mode == User.TOOLBAR:
VAR_7["editorParams"] = {"jsURL": settings.JS_URL, "toolbarVersion": "toolbar"}
if VAR_1.user.is_authenticated:
VAR_10, VAR_11 = FUNC_2(VAR_1)
VAR_7.update(VAR_10)
if VAR_11:
VAR_1.user.temporary_token = secrets.token_urlsafe(32)
VAR_1.user.save()
VAR_7["featureFlags"] = []
VAR_7["sessionRecording"] = False
if VAR_1.method == "POST":
try:
VAR_14 = load_data_from_request(VAR_1)
VAR_15 = VAR_1.GET.get("v")
VAR_16 = int(VAR_15) if VAR_15 else 1
except ValueError:
statsd.incr(
f"posthog_cloud_decide_defaulted_api_version_on_value_error",
tags={"endpoint": "decide", "api_version_string": VAR_15},
)
VAR_16 = 2
except RequestParsingError as error:
capture_exception(error) # We still capture this on Sentry to identify actual potential bugs
return cors_response(
VAR_1,
generate_exception_response("decide", f"Malformed VAR_1 VAR_14: {error}", code="malformed_data"),
)
VAR_12 = get_token(VAR_14, VAR_1)
VAR_0 = Team.objects.get_team_from_token(VAR_12)
if VAR_0 is None and VAR_12:
VAR_17 = get_project_id(VAR_14, VAR_1)
if not VAR_17:
return cors_response(
VAR_1,
generate_exception_response(
"decide",
"Project API key invalid. You can find your project API key in PostHog project settings.",
code="invalid_api_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
VAR_18 = User.objects.get_from_personal_api_key(VAR_12)
if VAR_18 is None:
return cors_response(
VAR_1,
generate_exception_response(
"decide",
"Invalid Personal API key.",
code="invalid_personal_key",
type="authentication_error",
status_code=status.HTTP_401_UNAUTHORIZED,
),
)
VAR_0 = VAR_18.teams.get(id=VAR_17)
if VAR_0:
VAR_19 = get_overridden_feature_flags(VAR_0.pk, VAR_14["distinct_id"], VAR_14.get("groups", {}))
VAR_7["featureFlags"] = VAR_19 if VAR_16 >= 2 else list(VAR_19.keys())
if VAR_0.session_recording_opt_in and (FUNC_0(VAR_0, VAR_1) or len(VAR_0.app_urls) == 0):
VAR_7["sessionRecording"] = {"endpoint": "/s/"}
statsd.incr(
f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide",},
)
return cors_response(VAR_1, JsonResponse(VAR_7))
| [
5,
12,
18,
20,
21,
24,
29,
41,
42,
46,
51,
54,
57,
62,
63,
66,
67,
76,
81,
88,
91,
96,
99,
100,
101,
102,
103,
115,
120,
132,
146,
150,
157
] | [
5,
12,
18,
20,
21,
26,
27,
31,
33,
38,
46,
48,
49,
53,
58,
61,
64,
69,
70,
73,
74,
83,
88,
95,
98,
103,
106,
107,
108,
109,
110,
122,
127,
139,
153,
157,
164
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /profile paths."""
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
myid = "@1234ABCD:test"
PATH_PREFIX = "/_matrix/client/r0"
class MockHandlerProfileTestCase(unittest.TestCase):
""" Tests rest layer of profile management.
Todo: move these into ProfileTestCase
"""
@defer.inlineCallbacks
def setUp(self):
self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
hs = yield setup_test_homeserver(
self.addCleanup,
"test",
http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def _get_user_by_req(request=None, allow_guest=False):
return synapse.types.create_requester(myid)
hs.get_auth().get_user_by_req = _get_user_by_req
profile.register_servlets(hs, self.mock_resource)
@defer.inlineCallbacks
def test_get_my_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Frank")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Frank"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def test_set_my_name_noauth(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = AuthError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_other_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Bob")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Bob"}, response)
@defer.inlineCallbacks
def test_set_other_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = SynapseError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_my_avatar(self):
mocked_get = self.mock_handler.get_avatar_url
mocked_get.return_value = defer.succeed("http://my.server/me.png")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_avatar(self):
mocked_set = self.mock_handler.set_avatar_url
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (myid),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif")
class ProfileTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
self.hs = self.setup_test_homeserver()
return self.hs
def prepare(self, reactor, clock, hs):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def test_set_displayname(self):
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 200, channel.result)
res = self.get_displayname()
self.assertEqual(res, "test")
def test_set_displayname_too_long(self):
"""Attempts to set a stupid displayname should get a 400"""
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 400, channel.result)
res = self.get_displayname()
self.assertEqual(res, "owner")
def get_displayname(self):
request, channel = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(channel.code, 200, channel.result)
return channel.json_body["displayname"]
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User owning the requested profile.
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def test_no_auth(self):
self.try_fetch_profile(401)
def test_not_in_shared_room(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, access_token=self.requester_tok)
def test_in_shared_room(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def try_fetch_profile(self, expected_code, access_token=None):
self.request_profile(expected_code, access_token=access_token)
self.request_profile(
expected_code, url_suffix="/displayname", access_token=access_token
)
self.request_profile(
expected_code, url_suffix="/avatar_url", access_token=access_token
)
def request_profile(self, expected_code, url_suffix="", access_token=None):
request, channel = self.make_request(
"GET", self.profile_url + url_suffix, access_token=access_token
)
self.assertEqual(channel.code, expected_code, channel.result)
def ensure_requester_left_room(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
# We don't care whether the leave request didn't return a 200 (e.g.
# if the user isn't already in the room), because we only want to
# make sure the user isn't in the room.
pass
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def test_can_lookup_own_profile(self):
"""Tests that a user can lookup their own profile without having to be in a room
if 'require_auth_for_profile_requests' is set to true in the server's config.
"""
request, channel = self.make_request(
"GET", "/profile/" + self.requester, access_token=self.requester_tok
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /profile paths."""
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
myid = "@1234ABCD:test"
PATH_PREFIX = "/_matrix/client/r0"
class MockHandlerProfileTestCase(unittest.TestCase):
""" Tests rest layer of profile management.
Todo: move these into ProfileTestCase
"""
@defer.inlineCallbacks
def setUp(self):
self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
hs = yield setup_test_homeserver(
self.addCleanup,
"test",
federation_http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def _get_user_by_req(request=None, allow_guest=False):
return synapse.types.create_requester(myid)
hs.get_auth().get_user_by_req = _get_user_by_req
profile.register_servlets(hs, self.mock_resource)
@defer.inlineCallbacks
def test_get_my_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Frank")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Frank"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def test_set_my_name_noauth(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = AuthError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_other_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Bob")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Bob"}, response)
@defer.inlineCallbacks
def test_set_other_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = SynapseError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_my_avatar(self):
mocked_get = self.mock_handler.get_avatar_url
mocked_get.return_value = defer.succeed("http://my.server/me.png")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_avatar(self):
mocked_set = self.mock_handler.set_avatar_url
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (myid),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif")
class ProfileTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
self.hs = self.setup_test_homeserver()
return self.hs
def prepare(self, reactor, clock, hs):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def test_set_displayname(self):
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 200, channel.result)
res = self.get_displayname()
self.assertEqual(res, "test")
def test_set_displayname_too_long(self):
"""Attempts to set a stupid displayname should get a 400"""
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 400, channel.result)
res = self.get_displayname()
self.assertEqual(res, "owner")
def get_displayname(self):
request, channel = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(channel.code, 200, channel.result)
return channel.json_body["displayname"]
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User owning the requested profile.
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def test_no_auth(self):
self.try_fetch_profile(401)
def test_not_in_shared_room(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, access_token=self.requester_tok)
def test_in_shared_room(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def try_fetch_profile(self, expected_code, access_token=None):
self.request_profile(expected_code, access_token=access_token)
self.request_profile(
expected_code, url_suffix="/displayname", access_token=access_token
)
self.request_profile(
expected_code, url_suffix="/avatar_url", access_token=access_token
)
def request_profile(self, expected_code, url_suffix="", access_token=None):
request, channel = self.make_request(
"GET", self.profile_url + url_suffix, access_token=access_token
)
self.assertEqual(channel.code, expected_code, channel.result)
def ensure_requester_left_room(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
# We don't care whether the leave request didn't return a 200 (e.g.
# if the user isn't already in the room), because we only want to
# make sure the user isn't in the room.
pass
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def test_can_lookup_own_profile(self):
"""Tests that a user can lookup their own profile without having to be in a room
if 'require_auth_for_profile_requests' is set to true in the server's config.
"""
request, channel = self.make_request(
"GET", "/profile/" + self.requester, access_token=self.requester_tok
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
66
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
66
]
} |
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
VAR_0 = "@1234ABCD:test"
VAR_1 = "/_matrix/client/r0"
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_0(self):
self.mock_resource = MockHttpResource(prefix=VAR_1)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
VAR_5 = yield setup_test_homeserver(
self.addCleanup,
"test",
http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def FUNC_20(VAR_9=None, VAR_10=False):
return synapse.types.create_requester(VAR_0)
VAR_5.get_auth().get_user_by_req = FUNC_20
profile.register_servlets(VAR_5, self.mock_resource)
@defer.inlineCallbacks
def FUNC_1(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Frank")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Frank"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_2(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (VAR_0), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def FUNC_3(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = AuthError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= VAR_12 < 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_4(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Bob")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Bob"}, VAR_13)
@defer.inlineCallbacks
def FUNC_5(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = SynapseError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= VAR_12 <= 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_6(self):
VAR_11 = self.mock_handler.get_avatar_url
VAR_11.return_value = defer.succeed("http://my.server/me.png")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_7(self):
VAR_14 = self.mock_handler.set_avatar_url
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (VAR_0),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "http://my.server/pic.gif")
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
self.hs = self.setup_test_homeserver()
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def FUNC_10(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "test")
def FUNC_11(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 400, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "owner")
def FUNC_12(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
return VAR_15.json_body["displayname"]
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def FUNC_13(self):
self.try_fetch_profile(401)
def FUNC_14(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, VAR_7=self.requester_tok)
def FUNC_15(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def FUNC_16(self, VAR_6, VAR_7=None):
self.request_profile(VAR_6, VAR_7=access_token)
self.request_profile(
VAR_6, VAR_8="/displayname", VAR_7=access_token
)
self.request_profile(
VAR_6, VAR_8="/avatar_url", VAR_7=access_token
)
def FUNC_17(self, VAR_6, VAR_8="", VAR_7=None):
VAR_9, VAR_15 = self.make_request(
"GET", self.profile_url + VAR_8, VAR_7=access_token
)
self.assertEqual(VAR_15.code, VAR_6, VAR_15.result)
def FUNC_18(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
pass
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def FUNC_19(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/" + self.requester, VAR_7=self.requester_tok
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
|
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
VAR_0 = "@1234ABCD:test"
VAR_1 = "/_matrix/client/r0"
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_0(self):
self.mock_resource = MockHttpResource(prefix=VAR_1)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
VAR_5 = yield setup_test_homeserver(
self.addCleanup,
"test",
federation_http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def FUNC_20(VAR_9=None, VAR_10=False):
return synapse.types.create_requester(VAR_0)
VAR_5.get_auth().get_user_by_req = FUNC_20
profile.register_servlets(VAR_5, self.mock_resource)
@defer.inlineCallbacks
def FUNC_1(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Frank")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Frank"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_2(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (VAR_0), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def FUNC_3(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = AuthError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= VAR_12 < 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_4(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Bob")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Bob"}, VAR_13)
@defer.inlineCallbacks
def FUNC_5(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = SynapseError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= VAR_12 <= 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_6(self):
VAR_11 = self.mock_handler.get_avatar_url
VAR_11.return_value = defer.succeed("http://my.server/me.png")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_7(self):
VAR_14 = self.mock_handler.set_avatar_url
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (VAR_0),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "http://my.server/pic.gif")
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
self.hs = self.setup_test_homeserver()
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def FUNC_10(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "test")
def FUNC_11(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 400, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "owner")
def FUNC_12(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
return VAR_15.json_body["displayname"]
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def FUNC_13(self):
self.try_fetch_profile(401)
def FUNC_14(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, VAR_7=self.requester_tok)
def FUNC_15(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def FUNC_16(self, VAR_6, VAR_7=None):
self.request_profile(VAR_6, VAR_7=access_token)
self.request_profile(
VAR_6, VAR_8="/displayname", VAR_7=access_token
)
self.request_profile(
VAR_6, VAR_8="/avatar_url", VAR_7=access_token
)
def FUNC_17(self, VAR_6, VAR_8="", VAR_7=None):
VAR_9, VAR_15 = self.make_request(
"GET", self.profile_url + VAR_8, VAR_7=access_token
)
self.assertEqual(VAR_15.code, VAR_6, VAR_15.result)
def FUNC_18(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
pass
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def FUNC_19(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/" + self.requester, VAR_7=self.requester_tok
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
22,
27,
29,
31,
34,
35,
38,
41,
54,
62,
72,
75,
77,
79,
84,
88,
92,
97,
101,
106,
111,
117,
119,
124,
128,
131,
136,
142,
144,
149,
153,
157,
162,
168,
173,
174,
176,
182,
186,
190,
199,
202,
212,
215,
222,
223,
225,
232,
234,
239,
241,
243,
247,
248,
251,
253,
256,
259,
261,
264,
266,
268,
271,
275,
279,
285,
292,
293,
294,
296,
297,
299,
305,
311,
313,
315,
318,
327,
334,
341,
16,
37,
38,
39,
40,
204,
320,
321,
322
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
22,
27,
29,
31,
34,
35,
38,
41,
54,
62,
72,
75,
77,
79,
84,
88,
92,
97,
101,
106,
111,
117,
119,
124,
128,
131,
136,
142,
144,
149,
153,
157,
162,
168,
173,
174,
176,
182,
186,
190,
199,
202,
212,
215,
222,
223,
225,
232,
234,
239,
241,
243,
247,
248,
251,
253,
256,
259,
261,
264,
266,
268,
271,
275,
279,
285,
292,
293,
294,
296,
297,
299,
305,
311,
313,
315,
318,
327,
334,
341,
16,
37,
38,
39,
40,
204,
320,
321,
322
] |
1CWE-79
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import email.mime.multipart
import email.utils
import logging
import urllib.parse
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar
import bleach
import jinja2
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import StoreError
from synapse.config.emailconfig import EmailSubjectConfig
from synapse.events import EventBase
from synapse.logging.context import make_deferred_yieldable
from synapse.push.presentable_names import (
calculate_room_name,
descriptor_from_member_events,
name_from_member_event,
)
from synapse.types import StateMap, UserID
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
logger = logging.getLogger(__name__)
T = TypeVar("T")
CONTEXT_BEFORE = 1
CONTEXT_AFTER = 1
# From https://github.com/matrix-org/matrix-react-sdk/blob/master/src/HtmlUtils.js
ALLOWED_TAGS = [
"font", # custom to matrix for IRC-style font coloring
"del", # for markdown
# deliberately no h1/h2 to stop people shouting.
"h3",
"h4",
"h5",
"h6",
"blockquote",
"p",
"a",
"ul",
"ol",
"nl",
"li",
"b",
"i",
"u",
"strong",
"em",
"strike",
"code",
"hr",
"br",
"div",
"table",
"thead",
"caption",
"tbody",
"tr",
"th",
"td",
"pre",
]
ALLOWED_ATTRS = {
# custom ones first:
"font": ["color"], # custom to matrix
"a": ["href", "name", "target"], # remote target: custom to matrix
# We don't currently allow img itself by default, but this
# would make sense if we did
"img": ["src"],
}
# When bleach release a version with this option, we can specify schemes
# ALLOWED_SCHEMES = ["http", "https", "ftp", "mailto"]
class Mailer:
def __init__(
self,
hs: "HomeServer",
app_name: str,
template_html: jinja2.Template,
template_text: jinja2.Template,
):
self.hs = hs
self.template_html = template_html
self.template_text = template_text
self.sendmail = self.hs.get_sendmail()
self.store = self.hs.get_datastore()
self.macaroon_gen = self.hs.get_macaroon_generator()
self.state_handler = self.hs.get_state_handler()
self.storage = hs.get_storage()
self.app_name = app_name
self.email_subjects = hs.config.email_subjects # type: EmailSubjectConfig
logger.info("Created Mailer for app_name %s" % app_name)
async def send_password_reset_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a password reset link to a user
Args:
email_address: Email address we're sending the password
reset to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_synapse/client/password_reset/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.password_reset
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_registration_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a registration confirmation link to a user
Args:
email_address: Email address we're sending the registration
link to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/registration/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_add_threepid_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a validation link to a user for adding a 3pid to their account
Args:
email_address: Email address we're sending the validation link to
token: Unique token generated by the server to verify the email was received
client_secret: Unique token generated by the client to group together
multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/add_threepid/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_notification_mail(
self,
app_id: str,
user_id: str,
email_address: str,
push_actions: Iterable[Dict[str, Any]],
reason: Dict[str, Any],
) -> None:
"""Send email regarding a user's room notifications"""
rooms_in_order = deduped_ordered_list([pa["room_id"] for pa in push_actions])
notif_events = await self.store.get_events(
[pa["event_id"] for pa in push_actions]
)
notifs_by_room = {} # type: Dict[str, List[Dict[str, Any]]]
for pa in push_actions:
notifs_by_room.setdefault(pa["room_id"], []).append(pa)
# collect the current state for all the rooms in which we have
# notifications
state_by_room = {}
try:
user_display_name = await self.store.get_profile_displayname(
UserID.from_string(user_id).localpart
)
if user_display_name is None:
user_display_name = user_id
except StoreError:
user_display_name = user_id
async def _fetch_room_state(room_id):
room_state = await self.store.get_current_state_ids(room_id)
state_by_room[room_id] = room_state
# Run at most 3 of these at once: sync does 10 at a time but email
# notifs are much less realtime than sync so we can afford to wait a bit.
await concurrently_execute(_fetch_room_state, rooms_in_order, 3)
# actually sort our so-called rooms_in_order list, most recent room first
rooms_in_order.sort(key=lambda r: -(notifs_by_room[r][-1]["received_ts"] or 0))
rooms = []
for r in rooms_in_order:
roomvars = await self.get_room_vars(
r, user_id, notifs_by_room[r], notif_events, state_by_room[r]
)
rooms.append(roomvars)
reason["room_name"] = await calculate_room_name(
self.store,
state_by_room[reason["room_id"]],
user_id,
fallback_to_members=True,
)
summary_text = await self.make_summary_text(
notifs_by_room, state_by_room, notif_events, user_id, reason
)
template_vars = {
"user_display_name": user_display_name,
"unsubscribe_link": self.make_unsubscribe_link(
user_id, app_id, email_address
),
"summary_text": summary_text,
"rooms": rooms,
"reason": reason,
}
await self.send_email(email_address, summary_text, template_vars)
async def send_email(
self, email_address: str, subject: str, extra_template_vars: Dict[str, Any]
) -> None:
"""Send an email with the given information and template text"""
try:
from_string = self.hs.config.email_notif_from % {"app": self.app_name}
except TypeError:
from_string = self.hs.config.email_notif_from
raw_from = email.utils.parseaddr(from_string)[1]
raw_to = email.utils.parseaddr(email_address)[1]
if raw_to == "":
raise RuntimeError("Invalid 'to' address")
template_vars = {
"app_name": self.app_name,
"server_name": self.hs.config.server.server_name,
}
template_vars.update(extra_template_vars)
html_text = self.template_html.render(**template_vars)
html_part = MIMEText(html_text, "html", "utf8")
plain_text = self.template_text.render(**template_vars)
text_part = MIMEText(plain_text, "plain", "utf8")
multipart_msg = MIMEMultipart("alternative")
multipart_msg["Subject"] = subject
multipart_msg["From"] = from_string
multipart_msg["To"] = email_address
multipart_msg["Date"] = email.utils.formatdate()
multipart_msg["Message-ID"] = email.utils.make_msgid()
multipart_msg.attach(text_part)
multipart_msg.attach(html_part)
logger.info("Sending email to %s" % email_address)
await make_deferred_yieldable(
self.sendmail(
self.hs.config.email_smtp_host,
raw_from,
raw_to,
multipart_msg.as_string().encode("utf8"),
reactor=self.hs.get_reactor(),
port=self.hs.config.email_smtp_port,
requireAuthentication=self.hs.config.email_smtp_user is not None,
username=self.hs.config.email_smtp_user,
password=self.hs.config.email_smtp_pass,
requireTransportSecurity=self.hs.config.require_transport_security,
)
)
async def get_room_vars(
self,
room_id: str,
user_id: str,
notifs: Iterable[Dict[str, Any]],
notif_events: Dict[str, EventBase],
room_state_ids: StateMap[str],
) -> Dict[str, Any]:
# Check if one of the notifs is an invite event for the user.
is_invite = False
for n in notifs:
ev = notif_events[n["event_id"]]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
is_invite = True
break
room_name = await calculate_room_name(self.store, room_state_ids, user_id)
room_vars = {
"title": room_name,
"hash": string_ordinal_total(room_id), # See sender avatar hash
"notifs": [],
"invite": is_invite,
"link": self.make_room_link(room_id),
} # type: Dict[str, Any]
if not is_invite:
for n in notifs:
notifvars = await self.get_notif_vars(
n, user_id, notif_events[n["event_id"]], room_state_ids
)
# merge overlapping notifs together.
# relies on the notifs being in chronological order.
merge = False
if room_vars["notifs"] and "messages" in room_vars["notifs"][-1]:
prev_messages = room_vars["notifs"][-1]["messages"]
for message in notifvars["messages"]:
pm = list(
filter(lambda pm: pm["id"] == message["id"], prev_messages)
)
if pm:
if not message["is_historical"]:
pm[0]["is_historical"] = False
merge = True
elif merge:
# we're merging, so append any remaining messages
# in this notif to the previous one
prev_messages.append(message)
if not merge:
room_vars["notifs"].append(notifvars)
return room_vars
async def get_notif_vars(
self,
notif: Dict[str, Any],
user_id: str,
notif_event: EventBase,
room_state_ids: StateMap[str],
) -> Dict[str, Any]:
results = await self.store.get_events_around(
notif["room_id"],
notif["event_id"],
before_limit=CONTEXT_BEFORE,
after_limit=CONTEXT_AFTER,
)
ret = {
"link": self.make_notif_link(notif),
"ts": notif["received_ts"],
"messages": [],
}
the_events = await filter_events_for_client(
self.storage, user_id, results["events_before"]
)
the_events.append(notif_event)
for event in the_events:
messagevars = await self.get_message_vars(notif, event, room_state_ids)
if messagevars is not None:
ret["messages"].append(messagevars)
return ret
async def get_message_vars(
self, notif: Dict[str, Any], event: EventBase, room_state_ids: StateMap[str]
) -> Optional[Dict[str, Any]]:
if event.type != EventTypes.Message and event.type != EventTypes.Encrypted:
return None
sender_state_event_id = room_state_ids[("m.room.member", event.sender)]
sender_state_event = await self.store.get_event(sender_state_event_id)
sender_name = name_from_member_event(sender_state_event)
sender_avatar_url = sender_state_event.content.get("avatar_url")
# 'hash' for deterministically picking default images: use
# sender_hash % the number of default images to choose from
sender_hash = string_ordinal_total(event.sender)
ret = {
"event_type": event.type,
"is_historical": event.event_id != notif["event_id"],
"id": event.event_id,
"ts": event.origin_server_ts,
"sender_name": sender_name,
"sender_avatar_url": sender_avatar_url,
"sender_hash": sender_hash,
}
# Encrypted messages don't have any additional useful information.
if event.type == EventTypes.Encrypted:
return ret
msgtype = event.content.get("msgtype")
ret["msgtype"] = msgtype
if msgtype == "m.text":
self.add_text_message_vars(ret, event)
elif msgtype == "m.image":
self.add_image_message_vars(ret, event)
if "body" in event.content:
ret["body_text_plain"] = event.content["body"]
return ret
def add_text_message_vars(
self, messagevars: Dict[str, Any], event: EventBase
) -> None:
msgformat = event.content.get("format")
messagevars["format"] = msgformat
formatted_body = event.content.get("formatted_body")
body = event.content.get("body")
if msgformat == "org.matrix.custom.html" and formatted_body:
messagevars["body_text_html"] = safe_markup(formatted_body)
elif body:
messagevars["body_text_html"] = safe_text(body)
def add_image_message_vars(
self, messagevars: Dict[str, Any], event: EventBase
) -> None:
"""
Potentially add an image URL to the message variables.
"""
if "url" in event.content:
messagevars["image_url"] = event.content["url"]
async def make_summary_text(
self,
notifs_by_room: Dict[str, List[Dict[str, Any]]],
room_state_ids: Dict[str, StateMap[str]],
notif_events: Dict[str, EventBase],
user_id: str,
reason: Dict[str, Any],
):
if len(notifs_by_room) == 1:
# Only one room has new stuff
room_id = list(notifs_by_room.keys())[0]
# If the room has some kind of name, use it, but we don't
# want the generated-from-names one here otherwise we'll
# end up with, "new message from Bob in the Bob room"
room_name = await calculate_room_name(
self.store, room_state_ids[room_id], user_id, fallback_to_members=False
)
# See if one of the notifs is an invite event for the user
invite_event = None
for n in notifs_by_room[room_id]:
ev = notif_events[n["event_id"]]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
invite_event = ev
break
if invite_event:
inviter_member_event_id = room_state_ids[room_id].get(
("m.room.member", invite_event.sender)
)
inviter_name = invite_event.sender
if inviter_member_event_id:
inviter_member_event = await self.store.get_event(
inviter_member_event_id, allow_none=True
)
if inviter_member_event:
inviter_name = name_from_member_event(inviter_member_event)
if room_name is None:
return self.email_subjects.invite_from_person % {
"person": inviter_name,
"app": self.app_name,
}
else:
return self.email_subjects.invite_from_person_to_room % {
"person": inviter_name,
"room": room_name,
"app": self.app_name,
}
sender_name = None
if len(notifs_by_room[room_id]) == 1:
# There is just the one notification, so give some detail
event = notif_events[notifs_by_room[room_id][0]["event_id"]]
if ("m.room.member", event.sender) in room_state_ids[room_id]:
state_event_id = room_state_ids[room_id][
("m.room.member", event.sender)
]
state_event = await self.store.get_event(state_event_id)
sender_name = name_from_member_event(state_event)
if sender_name is not None and room_name is not None:
return self.email_subjects.message_from_person_in_room % {
"person": sender_name,
"room": room_name,
"app": self.app_name,
}
elif sender_name is not None:
return self.email_subjects.message_from_person % {
"person": sender_name,
"app": self.app_name,
}
else:
# There's more than one notification for this room, so just
# say there are several
if room_name is not None:
return self.email_subjects.messages_in_room % {
"room": room_name,
"app": self.app_name,
}
else:
# If the room doesn't have a name, say who the messages
# are from explicitly to avoid, "messages in the Bob room"
sender_ids = list(
{
notif_events[n["event_id"]].sender
for n in notifs_by_room[room_id]
}
)
member_events = await self.store.get_events(
[
room_state_ids[room_id][("m.room.member", s)]
for s in sender_ids
]
)
return self.email_subjects.messages_from_person % {
"person": descriptor_from_member_events(member_events.values()),
"app": self.app_name,
}
else:
# Stuff's happened in multiple different rooms
# ...but we still refer to the 'reason' room which triggered the mail
if reason["room_name"] is not None:
return self.email_subjects.messages_in_room_and_others % {
"room": reason["room_name"],
"app": self.app_name,
}
else:
# If the reason room doesn't have a name, say who the messages
# are from explicitly to avoid, "messages in the Bob room"
room_id = reason["room_id"]
sender_ids = list(
{
notif_events[n["event_id"]].sender
for n in notifs_by_room[room_id]
}
)
member_events = await self.store.get_events(
[room_state_ids[room_id][("m.room.member", s)] for s in sender_ids]
)
return self.email_subjects.messages_from_person_and_others % {
"person": descriptor_from_member_events(member_events.values()),
"app": self.app_name,
}
def make_room_link(self, room_id: str) -> str:
if self.hs.config.email_riot_base_url:
base_url = "%s/#/room" % (self.hs.config.email_riot_base_url)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
base_url = "https://vector.im/beta/#/room"
else:
base_url = "https://matrix.to/#"
return "%s/%s" % (base_url, room_id)
def make_notif_link(self, notif: Dict[str, str]) -> str:
if self.hs.config.email_riot_base_url:
return "%s/#/room/%s/%s" % (
self.hs.config.email_riot_base_url,
notif["room_id"],
notif["event_id"],
)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
return "https://vector.im/beta/#/room/%s/%s" % (
notif["room_id"],
notif["event_id"],
)
else:
return "https://matrix.to/#/%s/%s" % (notif["room_id"], notif["event_id"])
def make_unsubscribe_link(
self, user_id: str, app_id: str, email_address: str
) -> str:
params = {
"access_token": self.macaroon_gen.generate_delete_pusher_token(user_id),
"app_id": app_id,
"pushkey": email_address,
}
# XXX: make r0 once API is stable
return "%s_matrix/client/unstable/pushers/remove?%s" % (
self.hs.config.public_baseurl,
urllib.parse.urlencode(params),
)
def safe_markup(raw_html: str) -> jinja2.Markup:
return jinja2.Markup(
bleach.linkify(
bleach.clean(
raw_html,
tags=ALLOWED_TAGS,
attributes=ALLOWED_ATTRS,
# bleach master has this, but it isn't released yet
# protocols=ALLOWED_SCHEMES,
strip=True,
)
)
)
def safe_text(raw_text: str) -> jinja2.Markup:
"""
Process text: treat it as HTML but escape any tags (ie. just escape the
HTML) then linkify it.
"""
return jinja2.Markup(
bleach.linkify(bleach.clean(raw_text, tags=[], attributes={}, strip=False))
)
def deduped_ordered_list(it: Iterable[T]) -> List[T]:
seen = set()
ret = []
for item in it:
if item not in seen:
seen.add(item)
ret.append(item)
return ret
def string_ordinal_total(s: str) -> int:
tot = 0
for c in s:
tot += ord(c)
return tot
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import email.mime.multipart
import email.utils
import logging
import urllib.parse
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar
import bleach
import jinja2
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import StoreError
from synapse.config.emailconfig import EmailSubjectConfig
from synapse.events import EventBase
from synapse.logging.context import make_deferred_yieldable
from synapse.push.presentable_names import (
calculate_room_name,
descriptor_from_member_events,
name_from_member_event,
)
from synapse.types import StateMap, UserID
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
logger = logging.getLogger(__name__)
T = TypeVar("T")
CONTEXT_BEFORE = 1
CONTEXT_AFTER = 1
# From https://github.com/matrix-org/matrix-react-sdk/blob/master/src/HtmlUtils.js
ALLOWED_TAGS = [
"font", # custom to matrix for IRC-style font coloring
"del", # for markdown
# deliberately no h1/h2 to stop people shouting.
"h3",
"h4",
"h5",
"h6",
"blockquote",
"p",
"a",
"ul",
"ol",
"nl",
"li",
"b",
"i",
"u",
"strong",
"em",
"strike",
"code",
"hr",
"br",
"div",
"table",
"thead",
"caption",
"tbody",
"tr",
"th",
"td",
"pre",
]
ALLOWED_ATTRS = {
# custom ones first:
"font": ["color"], # custom to matrix
"a": ["href", "name", "target"], # remote target: custom to matrix
# We don't currently allow img itself by default, but this
# would make sense if we did
"img": ["src"],
}
# When bleach release a version with this option, we can specify schemes
# ALLOWED_SCHEMES = ["http", "https", "ftp", "mailto"]
class Mailer:
def __init__(
self,
hs: "HomeServer",
app_name: str,
template_html: jinja2.Template,
template_text: jinja2.Template,
):
self.hs = hs
self.template_html = template_html
self.template_text = template_text
self.sendmail = self.hs.get_sendmail()
self.store = self.hs.get_datastore()
self.macaroon_gen = self.hs.get_macaroon_generator()
self.state_handler = self.hs.get_state_handler()
self.storage = hs.get_storage()
self.app_name = app_name
self.email_subjects = hs.config.email_subjects # type: EmailSubjectConfig
logger.info("Created Mailer for app_name %s" % app_name)
async def send_password_reset_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a password reset link to a user
Args:
email_address: Email address we're sending the password
reset to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_synapse/client/password_reset/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.password_reset
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_registration_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a registration confirmation link to a user
Args:
email_address: Email address we're sending the registration
link to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/registration/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_add_threepid_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a validation link to a user for adding a 3pid to their account
Args:
email_address: Email address we're sending the validation link to
token: Unique token generated by the server to verify the email was received
client_secret: Unique token generated by the client to group together
multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/add_threepid/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
template_vars,
)
async def send_notification_mail(
self,
app_id: str,
user_id: str,
email_address: str,
push_actions: Iterable[Dict[str, Any]],
reason: Dict[str, Any],
) -> None:
"""Send email regarding a user's room notifications"""
rooms_in_order = deduped_ordered_list([pa["room_id"] for pa in push_actions])
notif_events = await self.store.get_events(
[pa["event_id"] for pa in push_actions]
)
notifs_by_room = {} # type: Dict[str, List[Dict[str, Any]]]
for pa in push_actions:
notifs_by_room.setdefault(pa["room_id"], []).append(pa)
# collect the current state for all the rooms in which we have
# notifications
state_by_room = {}
try:
user_display_name = await self.store.get_profile_displayname(
UserID.from_string(user_id).localpart
)
if user_display_name is None:
user_display_name = user_id
except StoreError:
user_display_name = user_id
async def _fetch_room_state(room_id):
room_state = await self.store.get_current_state_ids(room_id)
state_by_room[room_id] = room_state
# Run at most 3 of these at once: sync does 10 at a time but email
# notifs are much less realtime than sync so we can afford to wait a bit.
await concurrently_execute(_fetch_room_state, rooms_in_order, 3)
# actually sort our so-called rooms_in_order list, most recent room first
rooms_in_order.sort(key=lambda r: -(notifs_by_room[r][-1]["received_ts"] or 0))
rooms = []
for r in rooms_in_order:
roomvars = await self.get_room_vars(
r, user_id, notifs_by_room[r], notif_events, state_by_room[r]
)
rooms.append(roomvars)
reason["room_name"] = await calculate_room_name(
self.store,
state_by_room[reason["room_id"]],
user_id,
fallback_to_members=True,
)
summary_text = await self.make_summary_text(
notifs_by_room, state_by_room, notif_events, user_id, reason
)
template_vars = {
"user_display_name": user_display_name,
"unsubscribe_link": self.make_unsubscribe_link(
user_id, app_id, email_address
),
"summary_text": summary_text,
"rooms": rooms,
"reason": reason,
}
await self.send_email(email_address, summary_text, template_vars)
async def send_email(
self, email_address: str, subject: str, extra_template_vars: Dict[str, Any]
) -> None:
"""Send an email with the given information and template text"""
try:
from_string = self.hs.config.email_notif_from % {"app": self.app_name}
except TypeError:
from_string = self.hs.config.email_notif_from
raw_from = email.utils.parseaddr(from_string)[1]
raw_to = email.utils.parseaddr(email_address)[1]
if raw_to == "":
raise RuntimeError("Invalid 'to' address")
template_vars = {
"app_name": self.app_name,
"server_name": self.hs.config.server.server_name,
}
template_vars.update(extra_template_vars)
html_text = self.template_html.render(**template_vars)
html_part = MIMEText(html_text, "html", "utf8")
plain_text = self.template_text.render(**template_vars)
text_part = MIMEText(plain_text, "plain", "utf8")
multipart_msg = MIMEMultipart("alternative")
multipart_msg["Subject"] = subject
multipart_msg["From"] = from_string
multipart_msg["To"] = email_address
multipart_msg["Date"] = email.utils.formatdate()
multipart_msg["Message-ID"] = email.utils.make_msgid()
multipart_msg.attach(text_part)
multipart_msg.attach(html_part)
logger.info("Sending email to %s" % email_address)
await make_deferred_yieldable(
self.sendmail(
self.hs.config.email_smtp_host,
raw_from,
raw_to,
multipart_msg.as_string().encode("utf8"),
reactor=self.hs.get_reactor(),
port=self.hs.config.email_smtp_port,
requireAuthentication=self.hs.config.email_smtp_user is not None,
username=self.hs.config.email_smtp_user,
password=self.hs.config.email_smtp_pass,
requireTransportSecurity=self.hs.config.require_transport_security,
)
)
async def get_room_vars(
self,
room_id: str,
user_id: str,
notifs: Iterable[Dict[str, Any]],
notif_events: Dict[str, EventBase],
room_state_ids: StateMap[str],
) -> Dict[str, Any]:
# Check if one of the notifs is an invite event for the user.
is_invite = False
for n in notifs:
ev = notif_events[n["event_id"]]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
is_invite = True
break
room_name = await calculate_room_name(self.store, room_state_ids, user_id)
room_vars = {
"title": room_name,
"hash": string_ordinal_total(room_id), # See sender avatar hash
"notifs": [],
"invite": is_invite,
"link": self.make_room_link(room_id),
} # type: Dict[str, Any]
if not is_invite:
for n in notifs:
notifvars = await self.get_notif_vars(
n, user_id, notif_events[n["event_id"]], room_state_ids
)
# merge overlapping notifs together.
# relies on the notifs being in chronological order.
merge = False
if room_vars["notifs"] and "messages" in room_vars["notifs"][-1]:
prev_messages = room_vars["notifs"][-1]["messages"]
for message in notifvars["messages"]:
pm = list(
filter(lambda pm: pm["id"] == message["id"], prev_messages)
)
if pm:
if not message["is_historical"]:
pm[0]["is_historical"] = False
merge = True
elif merge:
# we're merging, so append any remaining messages
# in this notif to the previous one
prev_messages.append(message)
if not merge:
room_vars["notifs"].append(notifvars)
return room_vars
async def get_notif_vars(
self,
notif: Dict[str, Any],
user_id: str,
notif_event: EventBase,
room_state_ids: StateMap[str],
) -> Dict[str, Any]:
results = await self.store.get_events_around(
notif["room_id"],
notif["event_id"],
before_limit=CONTEXT_BEFORE,
after_limit=CONTEXT_AFTER,
)
ret = {
"link": self.make_notif_link(notif),
"ts": notif["received_ts"],
"messages": [],
}
the_events = await filter_events_for_client(
self.storage, user_id, results["events_before"]
)
the_events.append(notif_event)
for event in the_events:
messagevars = await self.get_message_vars(notif, event, room_state_ids)
if messagevars is not None:
ret["messages"].append(messagevars)
return ret
async def get_message_vars(
self, notif: Dict[str, Any], event: EventBase, room_state_ids: StateMap[str]
) -> Optional[Dict[str, Any]]:
if event.type != EventTypes.Message and event.type != EventTypes.Encrypted:
return None
sender_state_event_id = room_state_ids[("m.room.member", event.sender)]
sender_state_event = await self.store.get_event(sender_state_event_id)
sender_name = name_from_member_event(sender_state_event)
sender_avatar_url = sender_state_event.content.get("avatar_url")
# 'hash' for deterministically picking default images: use
# sender_hash % the number of default images to choose from
sender_hash = string_ordinal_total(event.sender)
ret = {
"event_type": event.type,
"is_historical": event.event_id != notif["event_id"],
"id": event.event_id,
"ts": event.origin_server_ts,
"sender_name": sender_name,
"sender_avatar_url": sender_avatar_url,
"sender_hash": sender_hash,
}
# Encrypted messages don't have any additional useful information.
if event.type == EventTypes.Encrypted:
return ret
msgtype = event.content.get("msgtype")
ret["msgtype"] = msgtype
if msgtype == "m.text":
self.add_text_message_vars(ret, event)
elif msgtype == "m.image":
self.add_image_message_vars(ret, event)
if "body" in event.content:
ret["body_text_plain"] = event.content["body"]
return ret
def add_text_message_vars(
self, messagevars: Dict[str, Any], event: EventBase
) -> None:
msgformat = event.content.get("format")
messagevars["format"] = msgformat
formatted_body = event.content.get("formatted_body")
body = event.content.get("body")
if msgformat == "org.matrix.custom.html" and formatted_body:
messagevars["body_text_html"] = safe_markup(formatted_body)
elif body:
messagevars["body_text_html"] = safe_text(body)
def add_image_message_vars(
self, messagevars: Dict[str, Any], event: EventBase
) -> None:
"""
Potentially add an image URL to the message variables.
"""
if "url" in event.content:
messagevars["image_url"] = event.content["url"]
async def make_summary_text(
self,
notifs_by_room: Dict[str, List[Dict[str, Any]]],
room_state_ids: Dict[str, StateMap[str]],
notif_events: Dict[str, EventBase],
user_id: str,
reason: Dict[str, Any],
):
if len(notifs_by_room) == 1:
# Only one room has new stuff
room_id = list(notifs_by_room.keys())[0]
# If the room has some kind of name, use it, but we don't
# want the generated-from-names one here otherwise we'll
# end up with, "new message from Bob in the Bob room"
room_name = await calculate_room_name(
self.store, room_state_ids[room_id], user_id, fallback_to_members=False
)
# See if one of the notifs is an invite event for the user
invite_event = None
for n in notifs_by_room[room_id]:
ev = notif_events[n["event_id"]]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
invite_event = ev
break
if invite_event:
inviter_member_event_id = room_state_ids[room_id].get(
("m.room.member", invite_event.sender)
)
inviter_name = invite_event.sender
if inviter_member_event_id:
inviter_member_event = await self.store.get_event(
inviter_member_event_id, allow_none=True
)
if inviter_member_event:
inviter_name = name_from_member_event(inviter_member_event)
if room_name is None:
return self.email_subjects.invite_from_person % {
"person": inviter_name,
"app": self.app_name,
}
else:
return self.email_subjects.invite_from_person_to_room % {
"person": inviter_name,
"room": room_name,
"app": self.app_name,
}
sender_name = None
if len(notifs_by_room[room_id]) == 1:
# There is just the one notification, so give some detail
event = notif_events[notifs_by_room[room_id][0]["event_id"]]
if ("m.room.member", event.sender) in room_state_ids[room_id]:
state_event_id = room_state_ids[room_id][
("m.room.member", event.sender)
]
state_event = await self.store.get_event(state_event_id)
sender_name = name_from_member_event(state_event)
if sender_name is not None and room_name is not None:
return self.email_subjects.message_from_person_in_room % {
"person": sender_name,
"room": room_name,
"app": self.app_name,
}
elif sender_name is not None:
return self.email_subjects.message_from_person % {
"person": sender_name,
"app": self.app_name,
}
else:
# There's more than one notification for this room, so just
# say there are several
if room_name is not None:
return self.email_subjects.messages_in_room % {
"room": room_name,
"app": self.app_name,
}
else:
# If the room doesn't have a name, say who the messages
# are from explicitly to avoid, "messages in the Bob room"
sender_ids = list(
{
notif_events[n["event_id"]].sender
for n in notifs_by_room[room_id]
}
)
member_events = await self.store.get_events(
[
room_state_ids[room_id][("m.room.member", s)]
for s in sender_ids
]
)
return self.email_subjects.messages_from_person % {
"person": descriptor_from_member_events(member_events.values()),
"app": self.app_name,
}
else:
# Stuff's happened in multiple different rooms
# ...but we still refer to the 'reason' room which triggered the mail
if reason["room_name"] is not None:
return self.email_subjects.messages_in_room_and_others % {
"room": reason["room_name"],
"app": self.app_name,
}
else:
# If the reason room doesn't have a name, say who the messages
# are from explicitly to avoid, "messages in the Bob room"
room_id = reason["room_id"]
sender_ids = list(
{
notif_events[n["event_id"]].sender
for n in notifs_by_room[room_id]
}
)
member_events = await self.store.get_events(
[room_state_ids[room_id][("m.room.member", s)] for s in sender_ids]
)
return self.email_subjects.messages_from_person_and_others % {
"person": descriptor_from_member_events(member_events.values()),
"app": self.app_name,
}
def make_room_link(self, room_id: str) -> str:
if self.hs.config.email_riot_base_url:
base_url = "%s/#/room" % (self.hs.config.email_riot_base_url)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
base_url = "https://vector.im/beta/#/room"
else:
base_url = "https://matrix.to/#"
return "%s/%s" % (base_url, room_id)
def make_notif_link(self, notif: Dict[str, str]) -> str:
if self.hs.config.email_riot_base_url:
return "%s/#/room/%s/%s" % (
self.hs.config.email_riot_base_url,
notif["room_id"],
notif["event_id"],
)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
return "https://vector.im/beta/#/room/%s/%s" % (
notif["room_id"],
notif["event_id"],
)
else:
return "https://matrix.to/#/%s/%s" % (notif["room_id"], notif["event_id"])
def make_unsubscribe_link(
self, user_id: str, app_id: str, email_address: str
) -> str:
params = {
"access_token": self.macaroon_gen.generate_delete_pusher_token(user_id),
"app_id": app_id,
"pushkey": email_address,
}
# XXX: make r0 once API is stable
return "%s_matrix/client/unstable/pushers/remove?%s" % (
self.hs.config.public_baseurl,
urllib.parse.urlencode(params),
)
def safe_markup(raw_html: str) -> jinja2.Markup:
"""
Sanitise a raw HTML string to a set of allowed tags and attributes, and linkify any bare URLs.
Args
raw_html: Unsafe HTML.
Returns:
A Markup object ready to safely use in a Jinja template.
"""
return jinja2.Markup(
bleach.linkify(
bleach.clean(
raw_html,
tags=ALLOWED_TAGS,
attributes=ALLOWED_ATTRS,
# bleach master has this, but it isn't released yet
# protocols=ALLOWED_SCHEMES,
strip=True,
)
)
)
def safe_text(raw_text: str) -> jinja2.Markup:
"""
Sanitise text (escape any HTML tags), and then linkify any bare URLs.
Args
raw_text: Unsafe text which might include HTML markup.
Returns:
A Markup object ready to safely use in a Jinja template.
"""
return jinja2.Markup(
bleach.linkify(bleach.clean(raw_text, tags=[], attributes={}, strip=False))
)
def deduped_ordered_list(it: Iterable[T]) -> List[T]:
seen = set()
ret = []
for item in it:
if item not in seen:
seen.add(item)
ret.append(item)
return ret
def string_ordinal_total(s: str) -> int:
tot = 0
for c in s:
tot += ord(c)
return tot
| xss | {
"code": [
" HTML) then linkify it."
],
"line_no": [
688
]
} | {
"code": [
" \"\"\"",
" Sanitise a raw HTML string to a set of allowed tags and attributes, and linkify any bare URLs.",
" Args",
" raw_html: Unsafe HTML.",
" A Markup object ready to safely use in a Jinja template.",
" \"\"\"",
" Sanitise text (escape any HTML tags), and then linkify any bare URLs.",
" Args",
" raw_text: Unsafe text which might include HTML markup.",
" Returns:",
" A Markup object ready to safely use in a Jinja template."
],
"line_no": [
671,
672,
674,
675,
678,
679,
696,
698,
699,
701,
702
]
} |
import email.mime.multipart
import email.utils
import logging
import urllib.parse
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import .TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar
import bleach
import jinja2
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import StoreError
from synapse.config.emailconfig import EmailSubjectConfig
from synapse.events import EventBase
from synapse.logging.context import make_deferred_yieldable
from synapse.push.presentable_names import (
calculate_room_name,
descriptor_from_member_events,
name_from_member_event,
)
from synapse.types import StateMap, UserID
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
VAR_0 = logging.getLogger(__name__)
VAR_1 = TypeVar("T")
VAR_2 = 1
VAR_3 = 1
VAR_4 = [
"font", # custom to matrix for IRC-style font coloring
"del", # for markdown
"h3",
"h4",
"h5",
"h6",
"blockquote",
"p",
"a",
"ul",
"ol",
"nl",
"li",
"b",
"i",
"u",
"strong",
"em",
"strike",
"code",
"hr",
"br",
"div",
"table",
"thead",
"caption",
"tbody",
"tr",
"th",
"td",
"pre",
]
VAR_5 = {
"font": ["color"], # custom to matrix
"a": ["href", "name", "target"], # remote target: custom to matrix
"img": ["src"],
}
class CLASS_0:
def __init__(
self,
VAR_10: "HomeServer",
VAR_11: str,
VAR_12: jinja2.Template,
VAR_13: jinja2.Template,
):
self.hs = VAR_10
self.template_html = VAR_12
self.template_text = VAR_13
self.sendmail = self.hs.get_sendmail()
self.store = self.hs.get_datastore()
self.macaroon_gen = self.hs.get_macaroon_generator()
self.state_handler = self.hs.get_state_handler()
self.storage = VAR_10.get_storage()
self.app_name = VAR_11
self.email_subjects = VAR_10.config.email_subjects # type: EmailSubjectConfig
VAR_0.info("Created CLASS_0 for VAR_11 %s" % VAR_11)
async def FUNC_4(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_synapse/client/password_reset/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.password_reset
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_5(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/registration/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_6(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/add_threepid/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_7(
self,
VAR_18: str,
VAR_19: str,
VAR_14: str,
VAR_20: Iterable[Dict[str, Any]],
VAR_21: Dict[str, Any],
) -> None:
VAR_39 = FUNC_2([pa["room_id"] for pa in VAR_20])
VAR_26 = await self.store.get_events(
[pa["event_id"] for pa in VAR_20]
)
VAR_32 = {} # type: Dict[str, List[Dict[str, Any]]]
for pa in VAR_20:
VAR_32.setdefault(pa["room_id"], []).append(pa)
VAR_40 = {}
try:
VAR_64 = await self.store.get_profile_displayname(
UserID.from_string(VAR_19).localpart
)
if VAR_64 is None:
VAR_64 = VAR_19
except StoreError:
VAR_64 = VAR_19
async def FUNC_18(VAR_24):
VAR_65 = await self.store.get_current_state_ids(VAR_24)
VAR_40[VAR_24] = VAR_65
await concurrently_execute(FUNC_18, VAR_39, 3)
VAR_39.sort(key=lambda r: -(VAR_32[r][-1]["received_ts"] or 0))
VAR_41 = []
for r in VAR_39:
VAR_66 = await self.get_room_vars(
r, VAR_19, VAR_32[r], VAR_26, VAR_40[r]
)
VAR_41.append(VAR_66)
VAR_21["room_name"] = await calculate_room_name(
self.store,
VAR_40[VAR_21["room_id"]],
VAR_19,
fallback_to_members=True,
)
VAR_42 = await self.make_summary_text(
VAR_32, VAR_40, VAR_26, VAR_19, VAR_21
)
VAR_38 = {
"user_display_name": VAR_64,
"unsubscribe_link": self.make_unsubscribe_link(
VAR_19, VAR_18, VAR_14
),
"summary_text": VAR_42,
"rooms": VAR_41,
"reason": VAR_21,
}
await self.send_email(VAR_14, VAR_42, VAR_38)
async def FUNC_8(
self, VAR_14: str, VAR_22: str, VAR_23: Dict[str, Any]
) -> None:
try:
VAR_67 = self.hs.config.email_notif_from % {"app": self.app_name}
except TypeError:
VAR_67 = self.hs.config.email_notif_from
VAR_43 = email.utils.parseaddr(VAR_67)[1]
VAR_44 = email.utils.parseaddr(VAR_14)[1]
if VAR_44 == "":
raise RuntimeError("Invalid 'to' address")
VAR_38 = {
"app_name": self.app_name,
"server_name": self.hs.config.server.server_name,
}
VAR_38.update(VAR_23)
VAR_45 = self.template_html.render(**VAR_38)
VAR_46 = MIMEText(VAR_45, "html", "utf8")
VAR_47 = self.template_text.render(**VAR_38)
VAR_48 = MIMEText(VAR_47, "plain", "utf8")
VAR_49 = MIMEMultipart("alternative")
VAR_49["Subject"] = VAR_22
VAR_49["From"] = VAR_67
VAR_49["To"] = VAR_14
VAR_49["Date"] = email.utils.formatdate()
VAR_49["Message-ID"] = email.utils.make_msgid()
VAR_49.attach(VAR_48)
VAR_49.attach(VAR_46)
VAR_0.info("Sending email to %s" % VAR_14)
await make_deferred_yieldable(
self.sendmail(
self.hs.config.email_smtp_host,
VAR_43,
VAR_44,
VAR_49.as_string().encode("utf8"),
reactor=self.hs.get_reactor(),
port=self.hs.config.email_smtp_port,
requireAuthentication=self.hs.config.email_smtp_user is not None,
username=self.hs.config.email_smtp_user,
password=self.hs.config.email_smtp_pass,
requireTransportSecurity=self.hs.config.require_transport_security,
)
)
async def FUNC_9(
self,
VAR_24: str,
VAR_19: str,
VAR_25: Iterable[Dict[str, Any]],
VAR_26: Dict[str, EventBase],
VAR_27: StateMap[str],
) -> Dict[str, Any]:
VAR_50 = False
for n in VAR_25:
VAR_68 = VAR_26[n["event_id"]]
if VAR_68.type == EventTypes.Member and VAR_68.state_key == VAR_19:
if VAR_68.content.get("membership") == Membership.INVITE:
VAR_50 = True
break
VAR_51 = await calculate_room_name(self.store, VAR_27, VAR_19)
VAR_52 = {
"title": VAR_51,
"hash": FUNC_3(VAR_24), # See sender avatar hash
"notifs": [],
"invite": VAR_50,
"link": self.make_room_link(VAR_24),
} # type: Dict[str, Any]
if not VAR_50:
for n in VAR_25:
VAR_71 = await self.get_notif_vars(
n, VAR_19, VAR_26[n["event_id"]], VAR_27
)
VAR_72 = False
if VAR_52["notifs"] and "messages" in VAR_52["notifs"][-1]:
VAR_77 = VAR_52["notifs"][-1]["messages"]
for message in VAR_71["messages"]:
VAR_81 = list(
filter(lambda VAR_81: pm["id"] == message["id"], VAR_77)
)
if VAR_81:
if not message["is_historical"]:
VAR_81[0]["is_historical"] = False
VAR_72 = True
elif VAR_72:
VAR_77.append(message)
if not VAR_72:
VAR_52["notifs"].append(VAR_71)
return VAR_52
async def FUNC_10(
self,
VAR_28: Dict[str, Any],
VAR_19: str,
VAR_29: EventBase,
VAR_27: StateMap[str],
) -> Dict[str, Any]:
VAR_53 = await self.store.get_events_around(
VAR_28["room_id"],
VAR_28["event_id"],
before_limit=VAR_2,
after_limit=VAR_3,
)
VAR_34 = {
"link": self.make_notif_link(VAR_28),
"ts": VAR_28["received_ts"],
"messages": [],
}
VAR_54 = await filter_events_for_client(
self.storage, VAR_19, VAR_53["events_before"]
)
VAR_54.append(VAR_29)
for VAR_30 in VAR_54:
VAR_31 = await self.get_message_vars(VAR_28, VAR_30, VAR_27)
if VAR_31 is not None:
VAR_34["messages"].append(VAR_31)
return VAR_34
async def FUNC_11(
self, VAR_28: Dict[str, Any], VAR_30: EventBase, VAR_27: StateMap[str]
) -> Optional[Dict[str, Any]]:
if VAR_30.type != EventTypes.Message and VAR_30.type != EventTypes.Encrypted:
return None
VAR_55 = VAR_27[("m.room.member", VAR_30.sender)]
VAR_56 = await self.store.get_event(VAR_55)
VAR_57 = name_from_member_event(VAR_56)
VAR_58 = VAR_56.content.get("avatar_url")
VAR_59 = FUNC_3(VAR_30.sender)
VAR_34 = {
"event_type": VAR_30.type,
"is_historical": VAR_30.event_id != VAR_28["event_id"],
"id": VAR_30.event_id,
"ts": VAR_30.origin_server_ts,
"sender_name": VAR_57,
"sender_avatar_url": VAR_58,
"sender_hash": VAR_59,
}
if VAR_30.type == EventTypes.Encrypted:
return VAR_34
VAR_60 = VAR_30.content.get("msgtype")
VAR_34["msgtype"] = VAR_60
if VAR_60 == "m.text":
self.add_text_message_vars(VAR_34, VAR_30)
elif VAR_60 == "m.image":
self.add_image_message_vars(VAR_34, VAR_30)
if "body" in VAR_30.content:
VAR_34["body_text_plain"] = VAR_30.content["body"]
return VAR_34
def FUNC_12(
self, VAR_31: Dict[str, Any], VAR_30: EventBase
) -> None:
VAR_61 = VAR_30.content.get("format")
VAR_31["format"] = VAR_61
VAR_62 = VAR_30.content.get("formatted_body")
VAR_63 = VAR_30.content.get("body")
if VAR_61 == "org.matrix.custom.html" and VAR_62:
VAR_31["body_text_html"] = FUNC_0(VAR_62)
elif VAR_63:
VAR_31["body_text_html"] = FUNC_1(VAR_63)
def FUNC_13(
self, VAR_31: Dict[str, Any], VAR_30: EventBase
) -> None:
if "url" in VAR_30.content:
VAR_31["image_url"] = VAR_30.content["url"]
async def FUNC_14(
self,
VAR_32: Dict[str, List[Dict[str, Any]]],
VAR_27: Dict[str, StateMap[str]],
VAR_26: Dict[str, EventBase],
VAR_19: str,
VAR_21: Dict[str, Any],
):
if len(VAR_32) == 1:
VAR_24 = list(VAR_32.keys())[0]
VAR_51 = await calculate_room_name(
self.store, VAR_27[VAR_24], VAR_19, fallback_to_members=False
)
VAR_69 = None
for n in VAR_32[VAR_24]:
VAR_68 = VAR_26[n["event_id"]]
if VAR_68.type == EventTypes.Member and VAR_68.state_key == VAR_19:
if VAR_68.content.get("membership") == Membership.INVITE:
VAR_69 = VAR_68
break
if VAR_69:
VAR_73 = VAR_27[VAR_24].get(
("m.room.member", VAR_69.sender)
)
VAR_74 = VAR_69.sender
if VAR_73:
VAR_78 = await self.store.get_event(
VAR_73, allow_none=True
)
if VAR_78:
VAR_74 = name_from_member_event(VAR_78)
if VAR_51 is None:
return self.email_subjects.invite_from_person % {
"person": VAR_74,
"app": self.app_name,
}
else:
return self.email_subjects.invite_from_person_to_room % {
"person": VAR_74,
"room": VAR_51,
"app": self.app_name,
}
VAR_57 = None
if len(VAR_32[VAR_24]) == 1:
VAR_30 = VAR_26[VAR_32[VAR_24][0]["event_id"]]
if ("m.room.member", VAR_30.sender) in VAR_27[VAR_24]:
VAR_79 = VAR_27[VAR_24][
("m.room.member", VAR_30.sender)
]
VAR_80 = await self.store.get_event(VAR_79)
VAR_57 = name_from_member_event(VAR_80)
if VAR_57 is not None and VAR_51 is not None:
return self.email_subjects.message_from_person_in_room % {
"person": VAR_57,
"room": VAR_51,
"app": self.app_name,
}
elif VAR_57 is not None:
return self.email_subjects.message_from_person % {
"person": VAR_57,
"app": self.app_name,
}
else:
if VAR_51 is not None:
return self.email_subjects.messages_in_room % {
"room": VAR_51,
"app": self.app_name,
}
else:
VAR_75 = list(
{
VAR_26[n["event_id"]].sender
for n in VAR_32[VAR_24]
}
)
VAR_76 = await self.store.get_events(
[
VAR_27[VAR_24][("m.room.member", VAR_9)]
for VAR_9 in VAR_75
]
)
return self.email_subjects.messages_from_person % {
"person": descriptor_from_member_events(VAR_76.values()),
"app": self.app_name,
}
else:
if VAR_21["room_name"] is not None:
return self.email_subjects.messages_in_room_and_others % {
"room": VAR_21["room_name"],
"app": self.app_name,
}
else:
VAR_24 = VAR_21["room_id"]
VAR_75 = list(
{
VAR_26[n["event_id"]].sender
for n in VAR_32[VAR_24]
}
)
VAR_76 = await self.store.get_events(
[VAR_27[VAR_24][("m.room.member", VAR_9)] for VAR_9 in VAR_75]
)
return self.email_subjects.messages_from_person_and_others % {
"person": descriptor_from_member_events(VAR_76.values()),
"app": self.app_name,
}
def FUNC_15(self, VAR_24: str) -> str:
if self.hs.config.email_riot_base_url:
VAR_70 = "%VAR_9/#/room" % (self.hs.config.email_riot_base_url)
elif self.app_name == "Vector":
VAR_70 = "https://vector.im/beta/#/room"
else:
VAR_70 = "https://matrix.to/#"
return "%VAR_9/%s" % (VAR_70, VAR_24)
def FUNC_16(self, VAR_28: Dict[str, str]) -> str:
if self.hs.config.email_riot_base_url:
return "%VAR_9/#/room/%VAR_9/%s" % (
self.hs.config.email_riot_base_url,
VAR_28["room_id"],
VAR_28["event_id"],
)
elif self.app_name == "Vector":
return "https://vector.im/beta/#/room/%VAR_9/%s" % (
VAR_28["room_id"],
VAR_28["event_id"],
)
else:
return "https://matrix.to/#/%VAR_9/%s" % (VAR_28["room_id"], VAR_28["event_id"])
def FUNC_17(
self, VAR_19: str, VAR_18: str, VAR_14: str
) -> str:
VAR_36 = {
"access_token": self.macaroon_gen.generate_delete_pusher_token(VAR_19),
"app_id": VAR_18,
"pushkey": VAR_14,
}
return "%s_matrix/client/unstable/pushers/remove?%s" % (
self.hs.config.public_baseurl,
urllib.parse.urlencode(VAR_36),
)
def FUNC_0(VAR_6: str) -> jinja2.Markup:
return jinja2.Markup(
bleach.linkify(
bleach.clean(
VAR_6,
tags=VAR_4,
attributes=VAR_5,
strip=True,
)
)
)
def FUNC_1(VAR_7: str) -> jinja2.Markup:
return jinja2.Markup(
bleach.linkify(bleach.clean(VAR_7, tags=[], attributes={}, strip=False))
)
def FUNC_2(VAR_8: Iterable[VAR_1]) -> List[VAR_1]:
VAR_33 = set()
VAR_34 = []
for item in VAR_8:
if item not in VAR_33:
seen.add(item)
VAR_34.append(item)
return VAR_34
def FUNC_3(VAR_9: str) -> int:
VAR_35 = 0
for c in VAR_9:
VAR_35 += ord(c)
return VAR_35
|
import email.mime.multipart
import email.utils
import logging
import urllib.parse
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import .TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar
import bleach
import jinja2
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import StoreError
from synapse.config.emailconfig import EmailSubjectConfig
from synapse.events import EventBase
from synapse.logging.context import make_deferred_yieldable
from synapse.push.presentable_names import (
calculate_room_name,
descriptor_from_member_events,
name_from_member_event,
)
from synapse.types import StateMap, UserID
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
VAR_0 = logging.getLogger(__name__)
VAR_1 = TypeVar("T")
VAR_2 = 1
VAR_3 = 1
VAR_4 = [
"font", # custom to matrix for IRC-style font coloring
"del", # for markdown
"h3",
"h4",
"h5",
"h6",
"blockquote",
"p",
"a",
"ul",
"ol",
"nl",
"li",
"b",
"i",
"u",
"strong",
"em",
"strike",
"code",
"hr",
"br",
"div",
"table",
"thead",
"caption",
"tbody",
"tr",
"th",
"td",
"pre",
]
VAR_5 = {
"font": ["color"], # custom to matrix
"a": ["href", "name", "target"], # remote target: custom to matrix
"img": ["src"],
}
class CLASS_0:
def __init__(
self,
VAR_10: "HomeServer",
VAR_11: str,
VAR_12: jinja2.Template,
VAR_13: jinja2.Template,
):
self.hs = VAR_10
self.template_html = VAR_12
self.template_text = VAR_13
self.sendmail = self.hs.get_sendmail()
self.store = self.hs.get_datastore()
self.macaroon_gen = self.hs.get_macaroon_generator()
self.state_handler = self.hs.get_state_handler()
self.storage = VAR_10.get_storage()
self.app_name = VAR_11
self.email_subjects = VAR_10.config.email_subjects # type: EmailSubjectConfig
VAR_0.info("Created CLASS_0 for VAR_11 %s" % VAR_11)
async def FUNC_4(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_synapse/client/password_reset/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.password_reset
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_5(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/registration/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_6(
self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str
) -> None:
VAR_36 = {"token": VAR_15, "client_secret": VAR_16, "sid": VAR_17}
VAR_37 = (
self.hs.config.public_baseurl
+ "_matrix/client/unstable/add_threepid/email/submit_token?%s"
% urllib.parse.urlencode(VAR_36)
)
VAR_38 = {"link": VAR_37}
await self.send_email(
VAR_14,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server_name},
VAR_38,
)
async def FUNC_7(
self,
VAR_18: str,
VAR_19: str,
VAR_14: str,
VAR_20: Iterable[Dict[str, Any]],
VAR_21: Dict[str, Any],
) -> None:
VAR_39 = FUNC_2([pa["room_id"] for pa in VAR_20])
VAR_26 = await self.store.get_events(
[pa["event_id"] for pa in VAR_20]
)
VAR_32 = {} # type: Dict[str, List[Dict[str, Any]]]
for pa in VAR_20:
VAR_32.setdefault(pa["room_id"], []).append(pa)
VAR_40 = {}
try:
VAR_64 = await self.store.get_profile_displayname(
UserID.from_string(VAR_19).localpart
)
if VAR_64 is None:
VAR_64 = VAR_19
except StoreError:
VAR_64 = VAR_19
async def FUNC_18(VAR_24):
VAR_65 = await self.store.get_current_state_ids(VAR_24)
VAR_40[VAR_24] = VAR_65
await concurrently_execute(FUNC_18, VAR_39, 3)
VAR_39.sort(key=lambda r: -(VAR_32[r][-1]["received_ts"] or 0))
VAR_41 = []
for r in VAR_39:
VAR_66 = await self.get_room_vars(
r, VAR_19, VAR_32[r], VAR_26, VAR_40[r]
)
VAR_41.append(VAR_66)
VAR_21["room_name"] = await calculate_room_name(
self.store,
VAR_40[VAR_21["room_id"]],
VAR_19,
fallback_to_members=True,
)
VAR_42 = await self.make_summary_text(
VAR_32, VAR_40, VAR_26, VAR_19, VAR_21
)
VAR_38 = {
"user_display_name": VAR_64,
"unsubscribe_link": self.make_unsubscribe_link(
VAR_19, VAR_18, VAR_14
),
"summary_text": VAR_42,
"rooms": VAR_41,
"reason": VAR_21,
}
await self.send_email(VAR_14, VAR_42, VAR_38)
async def FUNC_8(
self, VAR_14: str, VAR_22: str, VAR_23: Dict[str, Any]
) -> None:
try:
VAR_67 = self.hs.config.email_notif_from % {"app": self.app_name}
except TypeError:
VAR_67 = self.hs.config.email_notif_from
VAR_43 = email.utils.parseaddr(VAR_67)[1]
VAR_44 = email.utils.parseaddr(VAR_14)[1]
if VAR_44 == "":
raise RuntimeError("Invalid 'to' address")
VAR_38 = {
"app_name": self.app_name,
"server_name": self.hs.config.server.server_name,
}
VAR_38.update(VAR_23)
VAR_45 = self.template_html.render(**VAR_38)
VAR_46 = MIMEText(VAR_45, "html", "utf8")
VAR_47 = self.template_text.render(**VAR_38)
VAR_48 = MIMEText(VAR_47, "plain", "utf8")
VAR_49 = MIMEMultipart("alternative")
VAR_49["Subject"] = VAR_22
VAR_49["From"] = VAR_67
VAR_49["To"] = VAR_14
VAR_49["Date"] = email.utils.formatdate()
VAR_49["Message-ID"] = email.utils.make_msgid()
VAR_49.attach(VAR_48)
VAR_49.attach(VAR_46)
VAR_0.info("Sending email to %s" % VAR_14)
await make_deferred_yieldable(
self.sendmail(
self.hs.config.email_smtp_host,
VAR_43,
VAR_44,
VAR_49.as_string().encode("utf8"),
reactor=self.hs.get_reactor(),
port=self.hs.config.email_smtp_port,
requireAuthentication=self.hs.config.email_smtp_user is not None,
username=self.hs.config.email_smtp_user,
password=self.hs.config.email_smtp_pass,
requireTransportSecurity=self.hs.config.require_transport_security,
)
)
async def FUNC_9(
self,
VAR_24: str,
VAR_19: str,
VAR_25: Iterable[Dict[str, Any]],
VAR_26: Dict[str, EventBase],
VAR_27: StateMap[str],
) -> Dict[str, Any]:
VAR_50 = False
for n in VAR_25:
VAR_68 = VAR_26[n["event_id"]]
if VAR_68.type == EventTypes.Member and VAR_68.state_key == VAR_19:
if VAR_68.content.get("membership") == Membership.INVITE:
VAR_50 = True
break
VAR_51 = await calculate_room_name(self.store, VAR_27, VAR_19)
VAR_52 = {
"title": VAR_51,
"hash": FUNC_3(VAR_24), # See sender avatar hash
"notifs": [],
"invite": VAR_50,
"link": self.make_room_link(VAR_24),
} # type: Dict[str, Any]
if not VAR_50:
for n in VAR_25:
VAR_71 = await self.get_notif_vars(
n, VAR_19, VAR_26[n["event_id"]], VAR_27
)
VAR_72 = False
if VAR_52["notifs"] and "messages" in VAR_52["notifs"][-1]:
VAR_77 = VAR_52["notifs"][-1]["messages"]
for message in VAR_71["messages"]:
VAR_81 = list(
filter(lambda VAR_81: pm["id"] == message["id"], VAR_77)
)
if VAR_81:
if not message["is_historical"]:
VAR_81[0]["is_historical"] = False
VAR_72 = True
elif VAR_72:
VAR_77.append(message)
if not VAR_72:
VAR_52["notifs"].append(VAR_71)
return VAR_52
async def FUNC_10(
self,
VAR_28: Dict[str, Any],
VAR_19: str,
VAR_29: EventBase,
VAR_27: StateMap[str],
) -> Dict[str, Any]:
VAR_53 = await self.store.get_events_around(
VAR_28["room_id"],
VAR_28["event_id"],
before_limit=VAR_2,
after_limit=VAR_3,
)
VAR_34 = {
"link": self.make_notif_link(VAR_28),
"ts": VAR_28["received_ts"],
"messages": [],
}
VAR_54 = await filter_events_for_client(
self.storage, VAR_19, VAR_53["events_before"]
)
VAR_54.append(VAR_29)
for VAR_30 in VAR_54:
VAR_31 = await self.get_message_vars(VAR_28, VAR_30, VAR_27)
if VAR_31 is not None:
VAR_34["messages"].append(VAR_31)
return VAR_34
async def FUNC_11(
self, VAR_28: Dict[str, Any], VAR_30: EventBase, VAR_27: StateMap[str]
) -> Optional[Dict[str, Any]]:
if VAR_30.type != EventTypes.Message and VAR_30.type != EventTypes.Encrypted:
return None
VAR_55 = VAR_27[("m.room.member", VAR_30.sender)]
VAR_56 = await self.store.get_event(VAR_55)
VAR_57 = name_from_member_event(VAR_56)
VAR_58 = VAR_56.content.get("avatar_url")
VAR_59 = FUNC_3(VAR_30.sender)
VAR_34 = {
"event_type": VAR_30.type,
"is_historical": VAR_30.event_id != VAR_28["event_id"],
"id": VAR_30.event_id,
"ts": VAR_30.origin_server_ts,
"sender_name": VAR_57,
"sender_avatar_url": VAR_58,
"sender_hash": VAR_59,
}
if VAR_30.type == EventTypes.Encrypted:
return VAR_34
VAR_60 = VAR_30.content.get("msgtype")
VAR_34["msgtype"] = VAR_60
if VAR_60 == "m.text":
self.add_text_message_vars(VAR_34, VAR_30)
elif VAR_60 == "m.image":
self.add_image_message_vars(VAR_34, VAR_30)
if "body" in VAR_30.content:
VAR_34["body_text_plain"] = VAR_30.content["body"]
return VAR_34
def FUNC_12(
self, VAR_31: Dict[str, Any], VAR_30: EventBase
) -> None:
VAR_61 = VAR_30.content.get("format")
VAR_31["format"] = VAR_61
VAR_62 = VAR_30.content.get("formatted_body")
VAR_63 = VAR_30.content.get("body")
if VAR_61 == "org.matrix.custom.html" and VAR_62:
VAR_31["body_text_html"] = FUNC_0(VAR_62)
elif VAR_63:
VAR_31["body_text_html"] = FUNC_1(VAR_63)
def FUNC_13(
self, VAR_31: Dict[str, Any], VAR_30: EventBase
) -> None:
if "url" in VAR_30.content:
VAR_31["image_url"] = VAR_30.content["url"]
async def FUNC_14(
self,
VAR_32: Dict[str, List[Dict[str, Any]]],
VAR_27: Dict[str, StateMap[str]],
VAR_26: Dict[str, EventBase],
VAR_19: str,
VAR_21: Dict[str, Any],
):
if len(VAR_32) == 1:
VAR_24 = list(VAR_32.keys())[0]
VAR_51 = await calculate_room_name(
self.store, VAR_27[VAR_24], VAR_19, fallback_to_members=False
)
VAR_69 = None
for n in VAR_32[VAR_24]:
VAR_68 = VAR_26[n["event_id"]]
if VAR_68.type == EventTypes.Member and VAR_68.state_key == VAR_19:
if VAR_68.content.get("membership") == Membership.INVITE:
VAR_69 = VAR_68
break
if VAR_69:
VAR_73 = VAR_27[VAR_24].get(
("m.room.member", VAR_69.sender)
)
VAR_74 = VAR_69.sender
if VAR_73:
VAR_78 = await self.store.get_event(
VAR_73, allow_none=True
)
if VAR_78:
VAR_74 = name_from_member_event(VAR_78)
if VAR_51 is None:
return self.email_subjects.invite_from_person % {
"person": VAR_74,
"app": self.app_name,
}
else:
return self.email_subjects.invite_from_person_to_room % {
"person": VAR_74,
"room": VAR_51,
"app": self.app_name,
}
VAR_57 = None
if len(VAR_32[VAR_24]) == 1:
VAR_30 = VAR_26[VAR_32[VAR_24][0]["event_id"]]
if ("m.room.member", VAR_30.sender) in VAR_27[VAR_24]:
VAR_79 = VAR_27[VAR_24][
("m.room.member", VAR_30.sender)
]
VAR_80 = await self.store.get_event(VAR_79)
VAR_57 = name_from_member_event(VAR_80)
if VAR_57 is not None and VAR_51 is not None:
return self.email_subjects.message_from_person_in_room % {
"person": VAR_57,
"room": VAR_51,
"app": self.app_name,
}
elif VAR_57 is not None:
return self.email_subjects.message_from_person % {
"person": VAR_57,
"app": self.app_name,
}
else:
if VAR_51 is not None:
return self.email_subjects.messages_in_room % {
"room": VAR_51,
"app": self.app_name,
}
else:
VAR_75 = list(
{
VAR_26[n["event_id"]].sender
for n in VAR_32[VAR_24]
}
)
VAR_76 = await self.store.get_events(
[
VAR_27[VAR_24][("m.room.member", VAR_9)]
for VAR_9 in VAR_75
]
)
return self.email_subjects.messages_from_person % {
"person": descriptor_from_member_events(VAR_76.values()),
"app": self.app_name,
}
else:
if VAR_21["room_name"] is not None:
return self.email_subjects.messages_in_room_and_others % {
"room": VAR_21["room_name"],
"app": self.app_name,
}
else:
VAR_24 = VAR_21["room_id"]
VAR_75 = list(
{
VAR_26[n["event_id"]].sender
for n in VAR_32[VAR_24]
}
)
VAR_76 = await self.store.get_events(
[VAR_27[VAR_24][("m.room.member", VAR_9)] for VAR_9 in VAR_75]
)
return self.email_subjects.messages_from_person_and_others % {
"person": descriptor_from_member_events(VAR_76.values()),
"app": self.app_name,
}
def FUNC_15(self, VAR_24: str) -> str:
if self.hs.config.email_riot_base_url:
VAR_70 = "%VAR_9/#/room" % (self.hs.config.email_riot_base_url)
elif self.app_name == "Vector":
VAR_70 = "https://vector.im/beta/#/room"
else:
VAR_70 = "https://matrix.to/#"
return "%VAR_9/%s" % (VAR_70, VAR_24)
def FUNC_16(self, VAR_28: Dict[str, str]) -> str:
if self.hs.config.email_riot_base_url:
return "%VAR_9/#/room/%VAR_9/%s" % (
self.hs.config.email_riot_base_url,
VAR_28["room_id"],
VAR_28["event_id"],
)
elif self.app_name == "Vector":
return "https://vector.im/beta/#/room/%VAR_9/%s" % (
VAR_28["room_id"],
VAR_28["event_id"],
)
else:
return "https://matrix.to/#/%VAR_9/%s" % (VAR_28["room_id"], VAR_28["event_id"])
def FUNC_17(
self, VAR_19: str, VAR_18: str, VAR_14: str
) -> str:
VAR_36 = {
"access_token": self.macaroon_gen.generate_delete_pusher_token(VAR_19),
"app_id": VAR_18,
"pushkey": VAR_14,
}
return "%s_matrix/client/unstable/pushers/remove?%s" % (
self.hs.config.public_baseurl,
urllib.parse.urlencode(VAR_36),
)
def FUNC_0(VAR_6: str) -> jinja2.Markup:
return jinja2.Markup(
bleach.linkify(
bleach.clean(
VAR_6,
tags=VAR_4,
attributes=VAR_5,
strip=True,
)
)
)
def FUNC_1(VAR_7: str) -> jinja2.Markup:
return jinja2.Markup(
bleach.linkify(bleach.clean(VAR_7, tags=[], attributes={}, strip=False))
)
def FUNC_2(VAR_8: Iterable[VAR_1]) -> List[VAR_1]:
VAR_33 = set()
VAR_34 = []
for item in VAR_8:
if item not in VAR_33:
seen.add(item)
VAR_34.append(item)
return VAR_34
def FUNC_3(VAR_9: str) -> int:
VAR_35 = 0
for c in VAR_9:
VAR_35 += ord(c)
return VAR_35
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
23,
26,
40,
43,
45,
47,
48,
51,
52,
56,
88,
91,
92,
95,
96,
97,
98,
110,
118,
120,
125,
141,
143,
150,
155,
171,
173,
180,
185,
188,
190,
193,
202,
204,
211,
222,
226,
230,
231,
232,
234,
243,
247,
248,
249,
251,
252,
254,
256,
262,
269,
273,
283,
285,
294,
297,
300,
305,
307,
310,
313,
322,
324,
339,
348,
356,
358,
366,
372,
373,
374,
387,
388,
390,
393,
395,
409,
415,
420,
425,
427,
433,
438,
439,
440,
442,
452,
453,
456,
458,
460,
465,
468,
470,
475,
477,
480,
485,
494,
504,
506,
507,
508,
509,
513,
514,
522,
534,
546,
549,
557,
570,
571,
578,
579,
586,
593,
599,
600,
601,
608,
609,
611,
618,
622,
627,
632,
637,
646,
653,
662,
663,
668,
669,
677,
678,
683,
684,
693,
694,
703,
704,
710,
686,
687,
688,
689,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
220,
289,
489,
490,
491
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
23,
26,
40,
43,
45,
47,
48,
51,
52,
56,
88,
91,
92,
95,
96,
97,
98,
110,
118,
120,
125,
141,
143,
150,
155,
171,
173,
180,
185,
188,
190,
193,
202,
204,
211,
222,
226,
230,
231,
232,
234,
243,
247,
248,
249,
251,
252,
254,
256,
262,
269,
273,
283,
285,
294,
297,
300,
305,
307,
310,
313,
322,
324,
339,
348,
356,
358,
366,
372,
373,
374,
387,
388,
390,
393,
395,
409,
415,
420,
425,
427,
433,
438,
439,
440,
442,
452,
453,
456,
458,
460,
465,
468,
470,
475,
477,
480,
485,
494,
504,
506,
507,
508,
509,
513,
514,
522,
534,
546,
549,
557,
570,
571,
578,
579,
586,
593,
599,
600,
601,
608,
609,
611,
618,
622,
627,
632,
637,
646,
653,
662,
663,
668,
669,
673,
676,
686,
687,
692,
693,
697,
700,
707,
708,
717,
718,
724,
671,
672,
673,
674,
675,
676,
677,
678,
679,
695,
696,
697,
698,
699,
700,
701,
702,
703,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
220,
289,
489,
490,
491
] |
0CWE-22
| import pathlib
import httpcore
import jinja2
import pytest
import yaml
from openapi_python_client import GeneratorError
def test__get_project_for_url_or_path(mocker):
data_dict = mocker.MagicMock()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=data_dict)
openapi = mocker.MagicMock()
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=openapi)
_Project = mocker.patch("openapi_python_client.Project")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_called_once_with(data_dict)
_Project.assert_called_once_with(openapi=openapi)
assert project == _Project()
def test__get_project_for_url_or_path_generator_error(mocker):
data_dict = mocker.MagicMock()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=data_dict)
error = GeneratorError()
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=error)
_Project = mocker.patch("openapi_python_client.Project")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_called_once_with(data_dict)
_Project.assert_not_called()
assert project == error
def test__get_project_for_url_or_path_document_error(mocker):
error = GeneratorError()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=error)
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_not_called()
assert project == error
def test_create_new_client(mocker):
project = mocker.MagicMock()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=project
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import create_new_client
result = create_new_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
project.build.assert_called_once()
assert result == project.build.return_value
def test_create_new_client_project_error(mocker):
error = GeneratorError()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=error
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import create_new_client
result = create_new_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
assert result == [error]
def test_update_existing_client(mocker):
project = mocker.MagicMock()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=project
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import update_existing_client
result = update_existing_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
project.update.assert_called_once()
assert result == project.update.return_value
def test_update_existing_client_project_error(mocker):
error = GeneratorError()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=error
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import update_existing_client
result = update_existing_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
assert result == [error]
class TestGetJson:
def test__get_document_no_url_or_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
result = _get_document(url=None, path=None)
assert result == GeneratorError(header="No URL or Path provided")
get.assert_not_called()
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_url_and_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
result = _get_document(url=mocker.MagicMock(), path=mocker.MagicMock())
assert result == GeneratorError(header="Provide URL or Path, not both.")
get.assert_not_called()
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_bad_url(self, mocker):
get = mocker.patch("httpx.get", side_effect=httpcore.NetworkError)
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
url = mocker.MagicMock()
result = _get_document(url=url, path=None)
assert result == GeneratorError(header="Could not get OpenAPI document from provided URL")
get.assert_called_once_with(url)
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_url_no_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
url = mocker.MagicMock()
_get_document(url=url, path=None)
get.assert_called_once_with(url)
Path.assert_not_called()
loads.assert_called_once_with(get().content)
def test__get_document_path_no_url(self, mocker):
get = mocker.patch("httpx.get")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
path = mocker.MagicMock()
_get_document(url=None, path=path)
get.assert_not_called()
path.read_bytes.assert_called_once()
loads.assert_called_once_with(path.read_bytes())
def test__get_document_bad_yaml(self, mocker):
get = mocker.patch("httpx.get")
loads = mocker.patch("yaml.safe_load", side_effect=yaml.YAMLError)
from openapi_python_client import _get_document
path = mocker.MagicMock()
result = _get_document(url=None, path=path)
get.assert_not_called()
path.read_bytes.assert_called_once()
loads.assert_called_once_with(path.read_bytes())
assert result == GeneratorError(header="Invalid YAML from provided source")
class TestProject:
def test___init__(self, mocker):
openapi = mocker.MagicMock(title="My Test API")
from openapi_python_client import Project
project = Project(openapi=openapi)
assert project.openapi == openapi
assert project.project_name == "my-test-api-client"
assert project.package_name == "my_test_api_client"
assert project.package_description == "A client library for accessing My Test API"
def test_project_and_package_name_overrides(self, mocker):
openapi = mocker.MagicMock(title="My Test API")
from openapi_python_client import Project
Project.project_name_override = "my-special-project-name"
project = Project(openapi=openapi)
assert project.project_name == "my-special-project-name"
assert project.package_name == "my_special_project_name"
Project.package_name_override = "my_special_package_name"
project = Project(openapi=openapi)
assert project.project_name == "my-special-project-name"
assert project.package_name == "my_special_package_name"
def test_build(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
project.package_dir = mocker.MagicMock()
project._build_metadata = mocker.MagicMock()
project._build_models = mocker.MagicMock()
project._build_api = mocker.MagicMock()
project._create_package = mocker.MagicMock()
project._reformat = mocker.MagicMock()
project._get_errors = mocker.MagicMock()
result = project.build()
project.project_dir.mkdir.assert_called_once()
project._create_package.assert_called_once()
project._build_metadata.assert_called_once()
project._build_models.assert_called_once()
project._build_api.assert_called_once()
project._reformat.assert_called_once()
project._get_errors.assert_called_once()
assert result == project._get_errors.return_value
def test_build_file_exists(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
project.project_dir.mkdir.side_effect = FileExistsError
result = project.build()
project.project_dir.mkdir.assert_called_once()
assert result == [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
def test_update(self, mocker):
from openapi_python_client import Project, shutil
rmtree = mocker.patch.object(shutil, "rmtree")
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
project._build_metadata = mocker.MagicMock()
project._build_models = mocker.MagicMock()
project._build_api = mocker.MagicMock()
project._create_package = mocker.MagicMock()
project._reformat = mocker.MagicMock()
project._get_errors = mocker.MagicMock()
result = project.update()
rmtree.assert_called_once_with(project.package_dir)
project._create_package.assert_called_once()
project._build_models.assert_called_once()
project._build_api.assert_called_once()
project._reformat.assert_called_once()
project._get_errors.assert_called_once()
assert result == project._get_errors.return_value
def test_update_missing_dir(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
project.package_dir.is_dir.return_value = False
project._build_models = mocker.MagicMock()
with pytest.raises(FileNotFoundError):
project.update()
project.package_dir.is_dir.assert_called_once()
project._build_models.assert_not_called()
def test__create_package(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
package_init_template = mocker.MagicMock()
project.env = mocker.MagicMock()
project.env.get_template.return_value = package_init_template
package_init_path = mocker.MagicMock(autospec=pathlib.Path)
pytyped_path = mocker.MagicMock(autospec=pathlib.Path)
paths = {
"__init__.py": package_init_path,
"py.typed": pytyped_path,
}
project.package_dir.__truediv__.side_effect = lambda x: paths[x]
project._create_package()
project.package_dir.mkdir.assert_called_once()
project.env.get_template.assert_called_once_with("package_init.pyi")
package_init_template.render.assert_called_once_with(description=project.package_description)
package_init_path.write_text.assert_called_once_with(package_init_template.render())
pytyped_path.write_text.assert_called_once_with("# Marker file for PEP 561")
def test__build_metadata(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
pyproject_path = mocker.MagicMock(autospec=pathlib.Path)
readme_path = mocker.MagicMock(autospec=pathlib.Path)
git_ignore_path = mocker.MagicMock(autospec=pathlib.Path)
paths = {
"pyproject.toml": pyproject_path,
"README.md": readme_path,
".gitignore": git_ignore_path,
}
project.project_dir.__truediv__.side_effect = lambda x: paths[x]
pyproject_template = mocker.MagicMock(autospec=jinja2.Template)
readme_template = mocker.MagicMock(autospec=jinja2.Template)
git_ignore_template = mocker.MagicMock(autospec=jinja2.Template)
project.env = mocker.MagicMock(autospec=jinja2.Environment)
templates = {
"pyproject.toml": pyproject_template,
"README.md": readme_template,
".gitignore": git_ignore_template,
}
project.env.get_template.side_effect = lambda x: templates[x]
project._build_metadata()
project.env.get_template.assert_has_calls(
[mocker.call("pyproject.toml"), mocker.call("README.md"), mocker.call(".gitignore")]
)
pyproject_template.render.assert_called_once_with(
project_name=project.project_name,
package_name=project.package_name,
version=project.version,
description=project.package_description,
)
pyproject_path.write_text.assert_called_once_with(pyproject_template.render())
readme_template.render.assert_called_once_with(
description=project.package_description,
project_name=project.project_name,
package_name=project.package_name,
)
readme_path.write_text.assert_called_once_with(readme_template.render())
git_ignore_template.render.assert_called_once()
git_ignore_path.write_text.assert_called_once_with(git_ignore_template.render())
def test__build_models(self, mocker):
from openapi_python_client import GeneratorData, Project
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
model_1 = mocker.MagicMock()
model_2 = mocker.MagicMock()
openapi.schemas.models = {"1": model_1, "2": model_2}
enum_1 = mocker.MagicMock()
enum_2 = mocker.MagicMock()
openapi.enums = {"1": enum_1, "2": enum_2}
project = Project(openapi=openapi)
project.package_dir = mocker.MagicMock()
models_init = mocker.MagicMock()
types_py = mocker.MagicMock()
models_dir = mocker.MagicMock()
model_1_module_path = mocker.MagicMock()
model_2_module_path = mocker.MagicMock()
enum_1_module_path = mocker.MagicMock()
enum_2_module_path = mocker.MagicMock()
module_paths = {
"__init__.py": models_init,
"types.py": types_py,
f"{model_1.reference.module_name}.py": model_1_module_path,
f"{model_2.reference.module_name}.py": model_2_module_path,
f"{enum_1.reference.module_name}.py": enum_1_module_path,
f"{enum_2.reference.module_name}.py": enum_2_module_path,
}
def models_dir_get(x):
return module_paths[x]
models_dir.__truediv__.side_effect = models_dir_get
project.package_dir.__truediv__.return_value = models_dir
model_render_1 = mocker.MagicMock()
model_render_2 = mocker.MagicMock()
model_template = mocker.MagicMock()
model_template.render.side_effect = [model_render_1, model_render_2]
enum_render_1 = mocker.MagicMock()
enum_render_2 = mocker.MagicMock()
enum_template = mocker.MagicMock()
enum_renders = {
enum_1: enum_render_1,
enum_2: enum_render_2,
}
enum_template.render.side_effect = lambda enum: enum_renders[enum]
models_init_template = mocker.MagicMock()
types_template = mocker.MagicMock()
templates = {
"types.py": types_template,
"model.pyi": model_template,
"enum.pyi": enum_template,
"models_init.pyi": models_init_template,
}
project.env = mocker.MagicMock()
project.env.get_template.side_effect = lambda x: templates[x]
imports = [
"import_schema_1",
"import_schema_2",
"import_enum_1",
"import_enum_2",
]
import_string_from_reference = mocker.patch(
"openapi_python_client.import_string_from_reference", side_effect=imports
)
project._build_models()
project.package_dir.__truediv__.assert_called_once_with("models")
models_dir.mkdir.assert_called_once()
models_dir.__truediv__.assert_has_calls([mocker.call(key) for key in module_paths])
project.env.get_template.assert_has_calls([mocker.call(key) for key in templates])
model_template.render.assert_has_calls([mocker.call(model=model_1), mocker.call(model=model_2)])
model_1_module_path.write_text.assert_called_once_with(model_render_1)
model_2_module_path.write_text.assert_called_once_with(model_render_2)
import_string_from_reference.assert_has_calls(
[
mocker.call(model_1.reference),
mocker.call(model_2.reference),
mocker.call(enum_1.reference),
mocker.call(enum_2.reference),
]
)
models_init_template.render.assert_called_once_with(imports=imports)
types_template.render.assert_called_once()
enum_1_module_path.write_text.assert_called_once_with(enum_render_1)
enum_2_module_path.write_text.assert_called_once_with(enum_render_2)
def test__build_api(self, mocker):
import pathlib
from jinja2 import Template
from openapi_python_client import GeneratorData, Project
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
tag_1 = mocker.MagicMock(autospec=str)
tag_2 = mocker.MagicMock(autospec=str)
collection_1 = mocker.MagicMock()
collection_2 = mocker.MagicMock()
openapi.endpoint_collections_by_tag = {tag_1: collection_1, tag_2: collection_2}
project = Project(openapi=openapi)
project.package_dir = mocker.MagicMock()
api_errors = mocker.MagicMock(autospec=pathlib.Path)
client_path = mocker.MagicMock()
api_init = mocker.MagicMock(autospec=pathlib.Path)
collection_1_path = mocker.MagicMock(autospec=pathlib.Path)
collection_2_path = mocker.MagicMock(autospec=pathlib.Path)
async_api_init = mocker.MagicMock(autospec=pathlib.Path)
async_collection_1_path = mocker.MagicMock(autospec=pathlib.Path)
async_collection_2_path = mocker.MagicMock(autospec=pathlib.Path)
api_paths = {
"__init__.py": api_init,
f"{tag_1}.py": collection_1_path,
f"{tag_2}.py": collection_2_path,
}
async_api_paths = {
"__init__.py": async_api_init,
f"{tag_1}.py": async_collection_1_path,
f"{tag_2}.py": async_collection_2_path,
}
api_dir = mocker.MagicMock(autospec=pathlib.Path)
api_dir.__truediv__.side_effect = lambda x: api_paths[x]
async_api_dir = mocker.MagicMock(autospec=pathlib.Path)
async_api_dir.__truediv__.side_effect = lambda x: async_api_paths[x]
package_paths = {
"client.py": client_path,
"api": api_dir,
"async_api": async_api_dir,
"errors.py": api_errors,
}
project.package_dir.__truediv__.side_effect = lambda x: package_paths[x]
client_template = mocker.MagicMock(autospec=Template)
errors_template = mocker.MagicMock(autospec=Template)
endpoint_template = mocker.MagicMock(autospec=Template)
async_endpoint_template = mocker.MagicMock(autospec=Template)
templates = {
"client.pyi": client_template,
"errors.pyi": errors_template,
"endpoint_module.pyi": endpoint_template,
"async_endpoint_module.pyi": async_endpoint_template,
}
mocker.patch.object(project.env, "get_template", autospec=True, side_effect=lambda x: templates[x])
endpoint_renders = {
collection_1: mocker.MagicMock(),
collection_2: mocker.MagicMock(),
}
endpoint_template.render.side_effect = lambda collection: endpoint_renders[collection]
async_endpoint_renders = {
collection_1: mocker.MagicMock(),
collection_2: mocker.MagicMock(),
}
async_endpoint_template.render.side_effect = lambda collection: async_endpoint_renders[collection]
project._build_api()
project.package_dir.__truediv__.assert_has_calls([mocker.call(key) for key in package_paths])
project.env.get_template.assert_has_calls([mocker.call(key) for key in templates])
client_template.render.assert_called_once()
client_path.write_text.assert_called_once_with(client_template.render())
errors_template.render.assert_called_once()
api_errors.write_text.assert_called_once_with(errors_template.render())
api_dir.mkdir.assert_called_once()
api_dir.__truediv__.assert_has_calls([mocker.call(key) for key in api_paths])
api_init.write_text.assert_called_once_with('""" Contains synchronous methods for accessing the API """')
endpoint_template.render.assert_has_calls(
[mocker.call(collection=collection_1), mocker.call(collection=collection_2)]
)
collection_1_path.write_text.assert_called_once_with(endpoint_renders[collection_1])
collection_2_path.write_text.assert_called_once_with(endpoint_renders[collection_2])
async_api_dir.mkdir.assert_called_once()
async_api_dir.__truediv__.assert_has_calls([mocker.call(key) for key in async_api_paths])
async_api_init.write_text.assert_called_once_with('""" Contains async methods for accessing the API """')
async_endpoint_template.render.assert_has_calls(
[mocker.call(collection=collection_1), mocker.call(collection=collection_2)]
)
async_collection_1_path.write_text.assert_called_once_with(async_endpoint_renders[collection_1])
async_collection_2_path.write_text.assert_called_once_with(async_endpoint_renders[collection_2])
def test__reformat(mocker):
import subprocess
from openapi_python_client import GeneratorData, Project
sub_run = mocker.patch("subprocess.run")
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
project = Project(openapi=openapi)
project.project_dir = mocker.MagicMock(autospec=pathlib.Path)
project._reformat()
sub_run.assert_has_calls(
[
mocker.call(
"isort .", cwd=project.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
),
mocker.call("black .", cwd=project.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE),
]
)
def test__get_errors(mocker):
from openapi_python_client import GeneratorData, Project
from openapi_python_client.parser.openapi import EndpointCollection, Schemas
openapi = mocker.MagicMock(
autospec=GeneratorData,
title="My Test API",
endpoint_collections_by_tag={
"default": mocker.MagicMock(autospec=EndpointCollection, parse_errors=[1]),
"other": mocker.MagicMock(autospec=EndpointCollection, parse_errors=[2]),
},
schemas=mocker.MagicMock(autospec=Schemas, errors=[3]),
)
project = Project(openapi=openapi)
assert project._get_errors() == [1, 2, 3]
| import pathlib
import httpcore
import jinja2
import pytest
import yaml
from openapi_python_client import GeneratorError
def test__get_project_for_url_or_path(mocker):
data_dict = mocker.MagicMock()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=data_dict)
openapi = mocker.MagicMock()
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=openapi)
_Project = mocker.patch("openapi_python_client.Project")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_called_once_with(data_dict)
_Project.assert_called_once_with(openapi=openapi)
assert project == _Project()
def test__get_project_for_url_or_path_generator_error(mocker):
data_dict = mocker.MagicMock()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=data_dict)
error = GeneratorError()
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=error)
_Project = mocker.patch("openapi_python_client.Project")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_called_once_with(data_dict)
_Project.assert_not_called()
assert project == error
def test__get_project_for_url_or_path_document_error(mocker):
error = GeneratorError()
_get_document = mocker.patch("openapi_python_client._get_document", return_value=error)
from_dict = mocker.patch("openapi_python_client.parser.GeneratorData.from_dict")
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import _get_project_for_url_or_path
project = _get_project_for_url_or_path(url=url, path=path)
_get_document.assert_called_once_with(url=url, path=path)
from_dict.assert_not_called()
assert project == error
def test_create_new_client(mocker):
project = mocker.MagicMock()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=project
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import create_new_client
result = create_new_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
project.build.assert_called_once()
assert result == project.build.return_value
def test_create_new_client_project_error(mocker):
error = GeneratorError()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=error
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import create_new_client
result = create_new_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
assert result == [error]
def test_update_existing_client(mocker):
project = mocker.MagicMock()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=project
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import update_existing_client
result = update_existing_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
project.update.assert_called_once()
assert result == project.update.return_value
def test_update_existing_client_project_error(mocker):
error = GeneratorError()
_get_project_for_url_or_path = mocker.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=error
)
url = mocker.MagicMock()
path = mocker.MagicMock()
from openapi_python_client import update_existing_client
result = update_existing_client(url=url, path=path)
_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)
assert result == [error]
class TestGetJson:
def test__get_document_no_url_or_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
result = _get_document(url=None, path=None)
assert result == GeneratorError(header="No URL or Path provided")
get.assert_not_called()
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_url_and_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
result = _get_document(url=mocker.MagicMock(), path=mocker.MagicMock())
assert result == GeneratorError(header="Provide URL or Path, not both.")
get.assert_not_called()
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_bad_url(self, mocker):
get = mocker.patch("httpx.get", side_effect=httpcore.NetworkError)
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
url = mocker.MagicMock()
result = _get_document(url=url, path=None)
assert result == GeneratorError(header="Could not get OpenAPI document from provided URL")
get.assert_called_once_with(url)
Path.assert_not_called()
loads.assert_not_called()
def test__get_document_url_no_path(self, mocker):
get = mocker.patch("httpx.get")
Path = mocker.patch("openapi_python_client.Path")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
url = mocker.MagicMock()
_get_document(url=url, path=None)
get.assert_called_once_with(url)
Path.assert_not_called()
loads.assert_called_once_with(get().content)
def test__get_document_path_no_url(self, mocker):
get = mocker.patch("httpx.get")
loads = mocker.patch("yaml.safe_load")
from openapi_python_client import _get_document
path = mocker.MagicMock()
_get_document(url=None, path=path)
get.assert_not_called()
path.read_bytes.assert_called_once()
loads.assert_called_once_with(path.read_bytes())
def test__get_document_bad_yaml(self, mocker):
get = mocker.patch("httpx.get")
loads = mocker.patch("yaml.safe_load", side_effect=yaml.YAMLError)
from openapi_python_client import _get_document
path = mocker.MagicMock()
result = _get_document(url=None, path=path)
get.assert_not_called()
path.read_bytes.assert_called_once()
loads.assert_called_once_with(path.read_bytes())
assert result == GeneratorError(header="Invalid YAML from provided source")
class TestProject:
def test___init__(self, mocker):
openapi = mocker.MagicMock(title="My Test API")
from openapi_python_client import Project
project = Project(openapi=openapi)
assert project.openapi == openapi
assert project.project_name == "my-test-api-client"
assert project.package_name == "my_test_api_client"
assert project.package_description == "A client library for accessing My Test API"
def test_project_and_package_name_overrides(self, mocker):
openapi = mocker.MagicMock(title="My Test API")
from openapi_python_client import Project
Project.project_name_override = "my-special-project-name"
project = Project(openapi=openapi)
assert project.project_name == "my-special-project-name"
assert project.package_name == "my_special_project_name"
Project.package_name_override = "my_special_package_name"
project = Project(openapi=openapi)
assert project.project_name == "my-special-project-name"
assert project.package_name == "my_special_package_name"
def test_build(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
project.package_dir = mocker.MagicMock()
project._build_metadata = mocker.MagicMock()
project._build_models = mocker.MagicMock()
project._build_api = mocker.MagicMock()
project._create_package = mocker.MagicMock()
project._reformat = mocker.MagicMock()
project._get_errors = mocker.MagicMock()
result = project.build()
project.project_dir.mkdir.assert_called_once()
project._create_package.assert_called_once()
project._build_metadata.assert_called_once()
project._build_models.assert_called_once()
project._build_api.assert_called_once()
project._reformat.assert_called_once()
project._get_errors.assert_called_once()
assert result == project._get_errors.return_value
def test_build_file_exists(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
project.project_dir.mkdir.side_effect = FileExistsError
result = project.build()
project.project_dir.mkdir.assert_called_once()
assert result == [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
def test_update(self, mocker):
from openapi_python_client import Project, shutil
rmtree = mocker.patch.object(shutil, "rmtree")
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
project._build_metadata = mocker.MagicMock()
project._build_models = mocker.MagicMock()
project._build_api = mocker.MagicMock()
project._create_package = mocker.MagicMock()
project._reformat = mocker.MagicMock()
project._get_errors = mocker.MagicMock()
result = project.update()
rmtree.assert_called_once_with(project.package_dir)
project._create_package.assert_called_once()
project._build_models.assert_called_once()
project._build_api.assert_called_once()
project._reformat.assert_called_once()
project._get_errors.assert_called_once()
assert result == project._get_errors.return_value
def test_update_missing_dir(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
project.package_dir.is_dir.return_value = False
project._build_models = mocker.MagicMock()
with pytest.raises(FileNotFoundError):
project.update()
project.package_dir.is_dir.assert_called_once()
project._build_models.assert_not_called()
def test__create_package(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.package_dir = mocker.MagicMock()
package_init_template = mocker.MagicMock()
project.env = mocker.MagicMock()
project.env.get_template.return_value = package_init_template
package_init_path = mocker.MagicMock(autospec=pathlib.Path)
pytyped_path = mocker.MagicMock(autospec=pathlib.Path)
paths = {
"__init__.py": package_init_path,
"py.typed": pytyped_path,
}
project.package_dir.__truediv__.side_effect = lambda x: paths[x]
project._create_package()
project.package_dir.mkdir.assert_called_once()
project.env.get_template.assert_called_once_with("package_init.pyi")
package_init_template.render.assert_called_once_with(description=project.package_description)
package_init_path.write_text.assert_called_once_with(package_init_template.render())
pytyped_path.write_text.assert_called_once_with("# Marker file for PEP 561")
def test__build_metadata(self, mocker):
from openapi_python_client import Project
project = Project(openapi=mocker.MagicMock(title="My Test API"))
project.project_dir = mocker.MagicMock()
pyproject_path = mocker.MagicMock(autospec=pathlib.Path)
readme_path = mocker.MagicMock(autospec=pathlib.Path)
git_ignore_path = mocker.MagicMock(autospec=pathlib.Path)
paths = {
"pyproject.toml": pyproject_path,
"README.md": readme_path,
".gitignore": git_ignore_path,
}
project.project_dir.__truediv__.side_effect = lambda x: paths[x]
pyproject_template = mocker.MagicMock(autospec=jinja2.Template)
readme_template = mocker.MagicMock(autospec=jinja2.Template)
git_ignore_template = mocker.MagicMock(autospec=jinja2.Template)
project.env = mocker.MagicMock(autospec=jinja2.Environment)
templates = {
"pyproject.toml": pyproject_template,
"README.md": readme_template,
".gitignore": git_ignore_template,
}
project.env.get_template.side_effect = lambda x: templates[x]
project._build_metadata()
project.env.get_template.assert_has_calls(
[mocker.call("pyproject.toml"), mocker.call("README.md"), mocker.call(".gitignore")]
)
pyproject_template.render.assert_called_once_with(
project_name=project.project_name,
package_name=project.package_name,
version=project.version,
description=project.package_description,
)
pyproject_path.write_text.assert_called_once_with(pyproject_template.render())
readme_template.render.assert_called_once_with(
description=project.package_description,
project_name=project.project_name,
package_name=project.package_name,
)
readme_path.write_text.assert_called_once_with(readme_template.render())
git_ignore_template.render.assert_called_once()
git_ignore_path.write_text.assert_called_once_with(git_ignore_template.render())
def test__build_models(self, mocker):
from openapi_python_client import GeneratorData, Project
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
model_1 = mocker.MagicMock()
model_2 = mocker.MagicMock()
openapi.schemas.models = {"1": model_1, "2": model_2}
enum_1 = mocker.MagicMock()
enum_2 = mocker.MagicMock()
openapi.enums = {"1": enum_1, "2": enum_2}
project = Project(openapi=openapi)
project.package_dir = mocker.MagicMock()
models_init = mocker.MagicMock()
types_py = mocker.MagicMock()
models_dir = mocker.MagicMock()
model_1_module_path = mocker.MagicMock()
model_2_module_path = mocker.MagicMock()
enum_1_module_path = mocker.MagicMock()
enum_2_module_path = mocker.MagicMock()
module_paths = {
"__init__.py": models_init,
"types.py": types_py,
f"{model_1.reference.module_name}.py": model_1_module_path,
f"{model_2.reference.module_name}.py": model_2_module_path,
f"{enum_1.reference.module_name}.py": enum_1_module_path,
f"{enum_2.reference.module_name}.py": enum_2_module_path,
}
def models_dir_get(x):
return module_paths[x]
models_dir.__truediv__.side_effect = models_dir_get
project.package_dir.__truediv__.return_value = models_dir
model_render_1 = mocker.MagicMock()
model_render_2 = mocker.MagicMock()
model_template = mocker.MagicMock()
model_template.render.side_effect = [model_render_1, model_render_2]
enum_render_1 = mocker.MagicMock()
enum_render_2 = mocker.MagicMock()
enum_template = mocker.MagicMock()
enum_renders = {
enum_1: enum_render_1,
enum_2: enum_render_2,
}
enum_template.render.side_effect = lambda enum: enum_renders[enum]
models_init_template = mocker.MagicMock()
types_template = mocker.MagicMock()
templates = {
"types.py": types_template,
"model.pyi": model_template,
"enum.pyi": enum_template,
"models_init.pyi": models_init_template,
}
project.env = mocker.MagicMock()
project.env.get_template.side_effect = lambda x: templates[x]
imports = [
"import_schema_1",
"import_schema_2",
"import_enum_1",
"import_enum_2",
]
import_string_from_reference = mocker.patch(
"openapi_python_client.import_string_from_reference", side_effect=imports
)
project._build_models()
project.package_dir.__truediv__.assert_called_once_with("models")
models_dir.mkdir.assert_called_once()
models_dir.__truediv__.assert_has_calls([mocker.call(key) for key in module_paths])
project.env.get_template.assert_has_calls([mocker.call(key) for key in templates])
model_template.render.assert_has_calls([mocker.call(model=model_1), mocker.call(model=model_2)])
model_1_module_path.write_text.assert_called_once_with(model_render_1)
model_2_module_path.write_text.assert_called_once_with(model_render_2)
import_string_from_reference.assert_has_calls(
[
mocker.call(model_1.reference),
mocker.call(model_2.reference),
mocker.call(enum_1.reference),
mocker.call(enum_2.reference),
]
)
models_init_template.render.assert_called_once_with(imports=imports)
types_template.render.assert_called_once()
enum_1_module_path.write_text.assert_called_once_with(enum_render_1)
enum_2_module_path.write_text.assert_called_once_with(enum_render_2)
def test__build_api(self, mocker):
import pathlib
from jinja2 import Template
from openapi_python_client import GeneratorData, Project
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
tag_1 = "a_tag"
tag_2 = "another_tag"
collection_1 = mocker.MagicMock()
collection_2 = mocker.MagicMock()
openapi.endpoint_collections_by_tag = {tag_1: collection_1, tag_2: collection_2}
project = Project(openapi=openapi)
project.package_dir = mocker.MagicMock()
api_errors = mocker.MagicMock(autospec=pathlib.Path)
client_path = mocker.MagicMock()
api_init = mocker.MagicMock(autospec=pathlib.Path)
collection_1_path = mocker.MagicMock(autospec=pathlib.Path)
collection_2_path = mocker.MagicMock(autospec=pathlib.Path)
async_api_init = mocker.MagicMock(autospec=pathlib.Path)
async_collection_1_path = mocker.MagicMock(autospec=pathlib.Path)
async_collection_2_path = mocker.MagicMock(autospec=pathlib.Path)
api_paths = {
"__init__.py": api_init,
f"{tag_1}.py": collection_1_path,
f"{tag_2}.py": collection_2_path,
}
async_api_paths = {
"__init__.py": async_api_init,
f"{tag_1}.py": async_collection_1_path,
f"{tag_2}.py": async_collection_2_path,
}
api_dir = mocker.MagicMock(autospec=pathlib.Path)
api_dir.__truediv__.side_effect = lambda x: api_paths[x]
async_api_dir = mocker.MagicMock(autospec=pathlib.Path)
async_api_dir.__truediv__.side_effect = lambda x: async_api_paths[x]
package_paths = {
"client.py": client_path,
"api": api_dir,
"async_api": async_api_dir,
"errors.py": api_errors,
}
project.package_dir.__truediv__.side_effect = lambda x: package_paths[x]
client_template = mocker.MagicMock(autospec=Template)
errors_template = mocker.MagicMock(autospec=Template)
endpoint_template = mocker.MagicMock(autospec=Template)
async_endpoint_template = mocker.MagicMock(autospec=Template)
templates = {
"client.pyi": client_template,
"errors.pyi": errors_template,
"endpoint_module.pyi": endpoint_template,
"async_endpoint_module.pyi": async_endpoint_template,
}
mocker.patch.object(project.env, "get_template", autospec=True, side_effect=lambda x: templates[x])
endpoint_renders = {
collection_1: mocker.MagicMock(),
collection_2: mocker.MagicMock(),
}
endpoint_template.render.side_effect = lambda collection: endpoint_renders[collection]
async_endpoint_renders = {
collection_1: mocker.MagicMock(),
collection_2: mocker.MagicMock(),
}
async_endpoint_template.render.side_effect = lambda collection: async_endpoint_renders[collection]
project._build_api()
project.package_dir.__truediv__.assert_has_calls([mocker.call(key) for key in package_paths])
project.env.get_template.assert_has_calls([mocker.call(key) for key in templates])
client_template.render.assert_called_once()
client_path.write_text.assert_called_once_with(client_template.render())
errors_template.render.assert_called_once()
api_errors.write_text.assert_called_once_with(errors_template.render())
api_dir.mkdir.assert_called_once()
api_dir.__truediv__.assert_has_calls([mocker.call(key) for key in api_paths])
api_init.write_text.assert_called_once_with('""" Contains synchronous methods for accessing the API """')
endpoint_template.render.assert_has_calls(
[mocker.call(collection=collection_1), mocker.call(collection=collection_2)]
)
collection_1_path.write_text.assert_called_once_with(endpoint_renders[collection_1])
collection_2_path.write_text.assert_called_once_with(endpoint_renders[collection_2])
async_api_dir.mkdir.assert_called_once()
async_api_dir.__truediv__.assert_has_calls([mocker.call(key) for key in async_api_paths])
async_api_init.write_text.assert_called_once_with('""" Contains async methods for accessing the API """')
async_endpoint_template.render.assert_has_calls(
[mocker.call(collection=collection_1), mocker.call(collection=collection_2)]
)
async_collection_1_path.write_text.assert_called_once_with(async_endpoint_renders[collection_1])
async_collection_2_path.write_text.assert_called_once_with(async_endpoint_renders[collection_2])
def test__reformat(mocker):
import subprocess
from openapi_python_client import GeneratorData, Project
sub_run = mocker.patch("subprocess.run")
openapi = mocker.MagicMock(autospec=GeneratorData, title="My Test API")
project = Project(openapi=openapi)
project.project_dir = mocker.MagicMock(autospec=pathlib.Path)
project._reformat()
sub_run.assert_has_calls(
[
mocker.call(
"isort .", cwd=project.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
),
mocker.call("black .", cwd=project.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE),
]
)
def test__get_errors(mocker):
from openapi_python_client import GeneratorData, Project
from openapi_python_client.parser.openapi import EndpointCollection, Schemas
openapi = mocker.MagicMock(
autospec=GeneratorData,
title="My Test API",
endpoint_collections_by_tag={
"default": mocker.MagicMock(autospec=EndpointCollection, parse_errors=[1]),
"other": mocker.MagicMock(autospec=EndpointCollection, parse_errors=[2]),
},
schemas=mocker.MagicMock(autospec=Schemas, errors=[3]),
)
project = Project(openapi=openapi)
assert project._get_errors() == [1, 2, 3]
| path_disclosure | {
"code": [
" tag_1 = mocker.MagicMock(autospec=str)",
" tag_2 = mocker.MagicMock(autospec=str)"
],
"line_no": [
489,
490
]
} | {
"code": [
" tag_1 = \"a_tag\"",
" tag_2 = \"another_tag\""
],
"line_no": [
489,
490
]
} | import .pathlib
import httpcore
import jinja2
import pytest
import yaml
from openapi_python_client import GeneratorError
def FUNC_0(VAR_0):
VAR_1 = VAR_0.MagicMock()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_1)
VAR_3 = VAR_0.MagicMock()
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=VAR_3)
VAR_5 = VAR_0.patch("openapi_python_client.Project")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_called_once_with(VAR_1)
VAR_5.assert_called_once_with(VAR_3=VAR_3)
assert VAR_8 == VAR_5()
def FUNC_1(VAR_0):
VAR_1 = VAR_0.MagicMock()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_1)
VAR_9 = GeneratorError()
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=VAR_9)
VAR_5 = VAR_0.patch("openapi_python_client.Project")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_called_once_with(VAR_1)
VAR_5.assert_not_called()
assert VAR_8 == VAR_9
def FUNC_2(VAR_0):
VAR_9 = GeneratorError()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_9)
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_not_called()
assert VAR_8 == VAR_9
def FUNC_3(VAR_0):
VAR_8 = VAR_0.MagicMock()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_8
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import create_new_client
VAR_11 = create_new_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_8.build.assert_called_once()
assert VAR_11 == VAR_8.build.return_value
def FUNC_4(VAR_0):
VAR_9 = GeneratorError()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_9
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import create_new_client
VAR_11 = create_new_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
assert VAR_11 == [VAR_9]
def FUNC_5(VAR_0):
VAR_8 = VAR_0.MagicMock()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_8
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import update_existing_client
VAR_11 = update_existing_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_8.update.assert_called_once()
assert VAR_11 == VAR_8.update.return_value
def FUNC_6(VAR_0):
VAR_9 = GeneratorError()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_9
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import update_existing_client
VAR_11 = update_existing_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
assert VAR_11 == [VAR_9]
class CLASS_0:
def FUNC_9(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_11 = VAR_2(VAR_6=None, VAR_7=None)
assert VAR_11 == GeneratorError(header="No URL or VAR_14 provided")
VAR_13.assert_not_called()
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_10(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_11 = VAR_2(VAR_6=VAR_0.MagicMock(), VAR_7=VAR_0.MagicMock())
assert VAR_11 == GeneratorError(header="Provide URL or VAR_14, not both.")
VAR_13.assert_not_called()
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_11(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get", side_effect=httpcore.NetworkError)
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_6 = VAR_0.MagicMock()
VAR_11 = VAR_2(VAR_6=url, VAR_7=None)
assert VAR_11 == GeneratorError(header="Could not VAR_13 OpenAPI document from provided URL")
VAR_13.assert_called_once_with(VAR_6)
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_12(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_6 = VAR_0.MagicMock()
VAR_2(VAR_6=url, VAR_7=None)
VAR_13.assert_called_once_with(VAR_6)
VAR_14.assert_not_called()
VAR_15.assert_called_once_with(VAR_13().content)
def FUNC_13(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_7 = VAR_0.MagicMock()
VAR_2(VAR_6=None, VAR_7=path)
VAR_13.assert_not_called()
VAR_7.read_bytes.assert_called_once()
VAR_15.assert_called_once_with(VAR_7.read_bytes())
def FUNC_14(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_15 = VAR_0.patch("yaml.safe_load", side_effect=yaml.YAMLError)
from openapi_python_client import ._get_document
VAR_7 = VAR_0.MagicMock()
VAR_11 = VAR_2(VAR_6=None, VAR_7=path)
VAR_13.assert_not_called()
VAR_7.read_bytes.assert_called_once()
VAR_15.assert_called_once_with(VAR_7.read_bytes())
assert VAR_11 == GeneratorError(header="Invalid YAML from provided source")
class CLASS_1:
def test___init__(self, VAR_0):
VAR_3 = VAR_0.MagicMock(title="My Test API")
from openapi_python_client import Project
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.openapi == VAR_3
assert VAR_8.project_name == "my-test-api-client"
assert VAR_8.package_name == "my_test_api_client"
assert VAR_8.package_description == "A client library for accessing My Test API"
def FUNC_15(self, VAR_0):
VAR_3 = VAR_0.MagicMock(title="My Test API")
from openapi_python_client import Project
Project.project_name_override = "my-special-VAR_8-name"
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.project_name == "my-special-VAR_8-name"
assert VAR_8.package_name == "my_special_project_name"
Project.package_name_override = "my_special_package_name"
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.project_name == "my-special-VAR_8-name"
assert VAR_8.package_name == "my_special_package_name"
def FUNC_16(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8._build_metadata = VAR_0.MagicMock()
VAR_8._build_models = VAR_0.MagicMock()
VAR_8._build_api = VAR_0.MagicMock()
VAR_8._create_package = VAR_0.MagicMock()
VAR_8._reformat = VAR_0.MagicMock()
VAR_8._get_errors = VAR_0.MagicMock()
VAR_11 = VAR_8.build()
VAR_8.project_dir.mkdir.assert_called_once()
VAR_8._create_package.assert_called_once()
VAR_8._build_metadata.assert_called_once()
VAR_8._build_models.assert_called_once()
VAR_8._build_api.assert_called_once()
VAR_8._reformat.assert_called_once()
VAR_8._get_errors.assert_called_once()
assert VAR_11 == VAR_8._get_errors.return_value
def FUNC_17(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_8.project_dir.mkdir.side_effect = FileExistsError
VAR_11 = VAR_8.build()
VAR_8.project_dir.mkdir.assert_called_once()
assert VAR_11 == [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
def FUNC_18(self, VAR_0):
from openapi_python_client import Project, shutil
VAR_16 = VAR_0.patch.object(shutil, "rmtree")
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8._build_metadata = VAR_0.MagicMock()
VAR_8._build_models = VAR_0.MagicMock()
VAR_8._build_api = VAR_0.MagicMock()
VAR_8._create_package = VAR_0.MagicMock()
VAR_8._reformat = VAR_0.MagicMock()
VAR_8._get_errors = VAR_0.MagicMock()
VAR_11 = VAR_8.update()
VAR_16.assert_called_once_with(VAR_8.package_dir)
VAR_8._create_package.assert_called_once()
VAR_8._build_models.assert_called_once()
VAR_8._build_api.assert_called_once()
VAR_8._reformat.assert_called_once()
VAR_8._get_errors.assert_called_once()
assert VAR_11 == VAR_8._get_errors.return_value
def FUNC_19(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8.package_dir.is_dir.return_value = False
VAR_8._build_models = VAR_0.MagicMock()
with pytest.raises(FileNotFoundError):
VAR_8.update()
VAR_8.package_dir.is_dir.assert_called_once()
VAR_8._build_models.assert_not_called()
def FUNC_20(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_17 = VAR_0.MagicMock()
VAR_8.env = VAR_0.MagicMock()
VAR_8.env.get_template.return_value = VAR_17
VAR_18 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_19 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_20 = {
"__init__.py": VAR_18,
"py.typed": VAR_19,
}
VAR_8.package_dir.__truediv__.side_effect = lambda VAR_40: VAR_20[VAR_40]
VAR_8._create_package()
VAR_8.package_dir.mkdir.assert_called_once()
VAR_8.env.get_template.assert_called_once_with("package_init.pyi")
VAR_17.render.assert_called_once_with(description=VAR_8.package_description)
VAR_18.write_text.assert_called_once_with(VAR_17.render())
VAR_19.write_text.assert_called_once_with("# Marker file for PEP 561")
def FUNC_21(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_21 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_22 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_23 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_20 = {
"pyproject.toml": VAR_21,
"README.md": VAR_22,
".gitignore": VAR_23,
}
VAR_8.project_dir.__truediv__.side_effect = lambda VAR_40: VAR_20[VAR_40]
VAR_24 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_25 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_26 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_8.env = VAR_0.MagicMock(autospec=jinja2.Environment)
VAR_27 = {
"pyproject.toml": VAR_24,
"README.md": VAR_25,
".gitignore": VAR_26,
}
VAR_8.env.get_template.side_effect = lambda VAR_40: VAR_27[VAR_40]
VAR_8._build_metadata()
VAR_8.env.get_template.assert_has_calls(
[VAR_0.call("pyproject.toml"), VAR_0.call("README.md"), VAR_0.call(".gitignore")]
)
VAR_24.render.assert_called_once_with(
project_name=VAR_8.project_name,
package_name=VAR_8.package_name,
version=VAR_8.version,
description=VAR_8.package_description,
)
VAR_21.write_text.assert_called_once_with(VAR_24.render())
VAR_25.render.assert_called_once_with(
description=VAR_8.package_description,
project_name=VAR_8.project_name,
package_name=VAR_8.package_name,
)
VAR_22.write_text.assert_called_once_with(VAR_25.render())
VAR_26.render.assert_called_once()
VAR_23.write_text.assert_called_once_with(VAR_26.render())
def FUNC_22(self, VAR_0):
from openapi_python_client import GeneratorData, Project
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_28 = VAR_0.MagicMock()
VAR_29 = VAR_0.MagicMock()
VAR_3.schemas.models = {"1": VAR_28, "2": VAR_29}
VAR_30 = VAR_0.MagicMock()
VAR_31 = VAR_0.MagicMock()
VAR_3.enums = {"1": VAR_30, "2": VAR_31}
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.package_dir = VAR_0.MagicMock()
VAR_32 = VAR_0.MagicMock()
VAR_33 = VAR_0.MagicMock()
VAR_34 = VAR_0.MagicMock()
VAR_35 = VAR_0.MagicMock()
VAR_36 = VAR_0.MagicMock()
VAR_37 = VAR_0.MagicMock()
VAR_38 = VAR_0.MagicMock()
VAR_39 = {
"__init__.py": VAR_32,
"types.py": VAR_33,
f"{VAR_28.reference.module_name}.py": VAR_35,
f"{VAR_29.reference.module_name}.py": VAR_36,
f"{VAR_30.reference.module_name}.py": VAR_37,
f"{VAR_31.reference.module_name}.py": VAR_38,
}
def FUNC_24(VAR_40):
return VAR_39[VAR_40]
VAR_34.__truediv__.side_effect = FUNC_24
VAR_8.package_dir.__truediv__.return_value = VAR_34
VAR_41 = VAR_0.MagicMock()
VAR_42 = VAR_0.MagicMock()
VAR_43 = VAR_0.MagicMock()
VAR_43.render.side_effect = [VAR_41, VAR_42]
VAR_44 = VAR_0.MagicMock()
VAR_45 = VAR_0.MagicMock()
VAR_46 = VAR_0.MagicMock()
VAR_47 = {
VAR_30: VAR_44,
VAR_31: VAR_45,
}
VAR_46.render.side_effect = lambda enum: VAR_47[enum]
VAR_48 = VAR_0.MagicMock()
VAR_49 = VAR_0.MagicMock()
VAR_27 = {
"types.py": VAR_49,
"model.pyi": VAR_43,
"enum.pyi": VAR_46,
"models_init.pyi": VAR_48,
}
VAR_8.env = VAR_0.MagicMock()
VAR_8.env.get_template.side_effect = lambda VAR_40: VAR_27[VAR_40]
VAR_50 = [
"import_schema_1",
"import_schema_2",
"import_enum_1",
"import_enum_2",
]
VAR_51 = VAR_0.patch(
"openapi_python_client.import_string_from_reference", side_effect=VAR_50
)
VAR_8._build_models()
VAR_8.package_dir.__truediv__.assert_called_once_with("models")
VAR_34.mkdir.assert_called_once()
VAR_34.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_39])
VAR_8.env.get_template.assert_has_calls([VAR_0.call(key) for key in VAR_27])
VAR_43.render.assert_has_calls([VAR_0.call(model=VAR_28), VAR_0.call(model=VAR_29)])
VAR_35.write_text.assert_called_once_with(VAR_41)
VAR_36.write_text.assert_called_once_with(VAR_42)
VAR_51.assert_has_calls(
[
VAR_0.call(VAR_28.reference),
VAR_0.call(VAR_29.reference),
VAR_0.call(VAR_30.reference),
VAR_0.call(VAR_31.reference),
]
)
VAR_48.render.assert_called_once_with(VAR_50=imports)
VAR_49.render.assert_called_once()
VAR_37.write_text.assert_called_once_with(VAR_44)
VAR_38.write_text.assert_called_once_with(VAR_45)
def FUNC_23(self, VAR_0):
import .pathlib
from jinja2 import Template
from openapi_python_client import GeneratorData, Project
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_52 = VAR_0.MagicMock(autospec=str)
VAR_53 = VAR_0.MagicMock(autospec=str)
VAR_54 = VAR_0.MagicMock()
VAR_55 = VAR_0.MagicMock()
VAR_3.endpoint_collections_by_tag = {VAR_52: VAR_54, VAR_53: VAR_55}
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.package_dir = VAR_0.MagicMock()
VAR_56 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_57 = VAR_0.MagicMock()
VAR_58 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_59 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_60 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_61 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_62 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_63 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_64 = {
"__init__.py": VAR_58,
f"{VAR_52}.py": VAR_59,
f"{VAR_53}.py": VAR_60,
}
VAR_65 = {
"__init__.py": VAR_61,
f"{VAR_52}.py": VAR_62,
f"{VAR_53}.py": VAR_63,
}
VAR_66 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_66.__truediv__.side_effect = lambda VAR_40: VAR_64[VAR_40]
VAR_67 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_67.__truediv__.side_effect = lambda VAR_40: VAR_65[VAR_40]
VAR_68 = {
"client.py": VAR_57,
"api": VAR_66,
"async_api": VAR_67,
"errors.py": VAR_56,
}
VAR_8.package_dir.__truediv__.side_effect = lambda VAR_40: VAR_68[VAR_40]
VAR_69 = VAR_0.MagicMock(autospec=Template)
VAR_70 = VAR_0.MagicMock(autospec=Template)
VAR_71 = VAR_0.MagicMock(autospec=Template)
VAR_72 = VAR_0.MagicMock(autospec=Template)
VAR_27 = {
"client.pyi": VAR_69,
"errors.pyi": VAR_70,
"endpoint_module.pyi": VAR_71,
"async_endpoint_module.pyi": VAR_72,
}
VAR_0.patch.object(VAR_8.env, "get_template", autospec=True, side_effect=lambda VAR_40: VAR_27[VAR_40])
VAR_73 = {
VAR_54: VAR_0.MagicMock(),
VAR_55: VAR_0.MagicMock(),
}
VAR_71.render.side_effect = lambda collection: VAR_73[collection]
VAR_74 = {
VAR_54: VAR_0.MagicMock(),
VAR_55: VAR_0.MagicMock(),
}
VAR_72.render.side_effect = lambda collection: VAR_74[collection]
VAR_8._build_api()
VAR_8.package_dir.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_68])
VAR_8.env.get_template.assert_has_calls([VAR_0.call(key) for key in VAR_27])
VAR_69.render.assert_called_once()
VAR_57.write_text.assert_called_once_with(VAR_69.render())
VAR_70.render.assert_called_once()
VAR_56.write_text.assert_called_once_with(VAR_70.render())
VAR_66.mkdir.assert_called_once()
VAR_66.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_64])
VAR_58.write_text.assert_called_once_with('""" Contains synchronous methods for accessing the API """')
VAR_71.render.assert_has_calls(
[VAR_0.call(collection=VAR_54), VAR_0.call(collection=VAR_55)]
)
VAR_59.write_text.assert_called_once_with(VAR_73[VAR_54])
VAR_60.write_text.assert_called_once_with(VAR_73[VAR_55])
VAR_67.mkdir.assert_called_once()
VAR_67.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_65])
VAR_61.write_text.assert_called_once_with('""" Contains async methods for accessing the API """')
VAR_72.render.assert_has_calls(
[VAR_0.call(collection=VAR_54), VAR_0.call(collection=VAR_55)]
)
VAR_62.write_text.assert_called_once_with(VAR_74[VAR_54])
VAR_63.write_text.assert_called_once_with(VAR_74[VAR_55])
def FUNC_7(VAR_0):
import subprocess
from openapi_python_client import GeneratorData, Project
VAR_12 = VAR_0.patch("subprocess.run")
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.project_dir = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_8._reformat()
VAR_12.assert_has_calls(
[
VAR_0.call(
"isort .", cwd=VAR_8.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
),
VAR_0.call("black .", cwd=VAR_8.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE),
]
)
def FUNC_8(VAR_0):
from openapi_python_client import GeneratorData, Project
from openapi_python_client.parser.openapi import EndpointCollection, Schemas
VAR_3 = VAR_0.MagicMock(
autospec=GeneratorData,
title="My Test API",
endpoint_collections_by_tag={
"default": VAR_0.MagicMock(autospec=EndpointCollection, parse_errors=[1]),
"other": VAR_0.MagicMock(autospec=EndpointCollection, parse_errors=[2]),
},
schemas=VAR_0.MagicMock(autospec=Schemas, errors=[3]),
)
VAR_8 = Project(VAR_3=openapi)
assert VAR_8._get_errors() == [1, 2, 3]
| import .pathlib
import httpcore
import jinja2
import pytest
import yaml
from openapi_python_client import GeneratorError
def FUNC_0(VAR_0):
VAR_1 = VAR_0.MagicMock()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_1)
VAR_3 = VAR_0.MagicMock()
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=VAR_3)
VAR_5 = VAR_0.patch("openapi_python_client.Project")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_called_once_with(VAR_1)
VAR_5.assert_called_once_with(VAR_3=VAR_3)
assert VAR_8 == VAR_5()
def FUNC_1(VAR_0):
VAR_1 = VAR_0.MagicMock()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_1)
VAR_9 = GeneratorError()
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict", return_value=VAR_9)
VAR_5 = VAR_0.patch("openapi_python_client.Project")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_called_once_with(VAR_1)
VAR_5.assert_not_called()
assert VAR_8 == VAR_9
def FUNC_2(VAR_0):
VAR_9 = GeneratorError()
VAR_2 = VAR_0.patch("openapi_python_client._get_document", return_value=VAR_9)
VAR_4 = VAR_0.patch("openapi_python_client.parser.GeneratorData.from_dict")
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import ._get_project_for_url_or_path
VAR_8 = VAR_10(VAR_6=url, VAR_7=path)
VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_4.assert_not_called()
assert VAR_8 == VAR_9
def FUNC_3(VAR_0):
VAR_8 = VAR_0.MagicMock()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_8
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import create_new_client
VAR_11 = create_new_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_8.build.assert_called_once()
assert VAR_11 == VAR_8.build.return_value
def FUNC_4(VAR_0):
VAR_9 = GeneratorError()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_9
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import create_new_client
VAR_11 = create_new_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
assert VAR_11 == [VAR_9]
def FUNC_5(VAR_0):
VAR_8 = VAR_0.MagicMock()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_8
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import update_existing_client
VAR_11 = update_existing_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
VAR_8.update.assert_called_once()
assert VAR_11 == VAR_8.update.return_value
def FUNC_6(VAR_0):
VAR_9 = GeneratorError()
VAR_10 = VAR_0.patch(
"openapi_python_client._get_project_for_url_or_path", return_value=VAR_9
)
VAR_6 = VAR_0.MagicMock()
VAR_7 = VAR_0.MagicMock()
from openapi_python_client import update_existing_client
VAR_11 = update_existing_client(VAR_6=url, VAR_7=path)
VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)
assert VAR_11 == [VAR_9]
class CLASS_0:
def FUNC_9(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_11 = VAR_2(VAR_6=None, VAR_7=None)
assert VAR_11 == GeneratorError(header="No URL or VAR_14 provided")
VAR_13.assert_not_called()
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_10(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_11 = VAR_2(VAR_6=VAR_0.MagicMock(), VAR_7=VAR_0.MagicMock())
assert VAR_11 == GeneratorError(header="Provide URL or VAR_14, not both.")
VAR_13.assert_not_called()
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_11(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get", side_effect=httpcore.NetworkError)
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_6 = VAR_0.MagicMock()
VAR_11 = VAR_2(VAR_6=url, VAR_7=None)
assert VAR_11 == GeneratorError(header="Could not VAR_13 OpenAPI document from provided URL")
VAR_13.assert_called_once_with(VAR_6)
VAR_14.assert_not_called()
VAR_15.assert_not_called()
def FUNC_12(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_14 = VAR_0.patch("openapi_python_client.Path")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_6 = VAR_0.MagicMock()
VAR_2(VAR_6=url, VAR_7=None)
VAR_13.assert_called_once_with(VAR_6)
VAR_14.assert_not_called()
VAR_15.assert_called_once_with(VAR_13().content)
def FUNC_13(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_15 = VAR_0.patch("yaml.safe_load")
from openapi_python_client import ._get_document
VAR_7 = VAR_0.MagicMock()
VAR_2(VAR_6=None, VAR_7=path)
VAR_13.assert_not_called()
VAR_7.read_bytes.assert_called_once()
VAR_15.assert_called_once_with(VAR_7.read_bytes())
def FUNC_14(self, VAR_0):
VAR_13 = VAR_0.patch("httpx.get")
VAR_15 = VAR_0.patch("yaml.safe_load", side_effect=yaml.YAMLError)
from openapi_python_client import ._get_document
VAR_7 = VAR_0.MagicMock()
VAR_11 = VAR_2(VAR_6=None, VAR_7=path)
VAR_13.assert_not_called()
VAR_7.read_bytes.assert_called_once()
VAR_15.assert_called_once_with(VAR_7.read_bytes())
assert VAR_11 == GeneratorError(header="Invalid YAML from provided source")
class CLASS_1:
def test___init__(self, VAR_0):
VAR_3 = VAR_0.MagicMock(title="My Test API")
from openapi_python_client import Project
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.openapi == VAR_3
assert VAR_8.project_name == "my-test-api-client"
assert VAR_8.package_name == "my_test_api_client"
assert VAR_8.package_description == "A client library for accessing My Test API"
def FUNC_15(self, VAR_0):
VAR_3 = VAR_0.MagicMock(title="My Test API")
from openapi_python_client import Project
Project.project_name_override = "my-special-VAR_8-name"
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.project_name == "my-special-VAR_8-name"
assert VAR_8.package_name == "my_special_project_name"
Project.package_name_override = "my_special_package_name"
VAR_8 = Project(VAR_3=openapi)
assert VAR_8.project_name == "my-special-VAR_8-name"
assert VAR_8.package_name == "my_special_package_name"
def FUNC_16(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8._build_metadata = VAR_0.MagicMock()
VAR_8._build_models = VAR_0.MagicMock()
VAR_8._build_api = VAR_0.MagicMock()
VAR_8._create_package = VAR_0.MagicMock()
VAR_8._reformat = VAR_0.MagicMock()
VAR_8._get_errors = VAR_0.MagicMock()
VAR_11 = VAR_8.build()
VAR_8.project_dir.mkdir.assert_called_once()
VAR_8._create_package.assert_called_once()
VAR_8._build_metadata.assert_called_once()
VAR_8._build_models.assert_called_once()
VAR_8._build_api.assert_called_once()
VAR_8._reformat.assert_called_once()
VAR_8._get_errors.assert_called_once()
assert VAR_11 == VAR_8._get_errors.return_value
def FUNC_17(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_8.project_dir.mkdir.side_effect = FileExistsError
VAR_11 = VAR_8.build()
VAR_8.project_dir.mkdir.assert_called_once()
assert VAR_11 == [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
def FUNC_18(self, VAR_0):
from openapi_python_client import Project, shutil
VAR_16 = VAR_0.patch.object(shutil, "rmtree")
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8._build_metadata = VAR_0.MagicMock()
VAR_8._build_models = VAR_0.MagicMock()
VAR_8._build_api = VAR_0.MagicMock()
VAR_8._create_package = VAR_0.MagicMock()
VAR_8._reformat = VAR_0.MagicMock()
VAR_8._get_errors = VAR_0.MagicMock()
VAR_11 = VAR_8.update()
VAR_16.assert_called_once_with(VAR_8.package_dir)
VAR_8._create_package.assert_called_once()
VAR_8._build_models.assert_called_once()
VAR_8._build_api.assert_called_once()
VAR_8._reformat.assert_called_once()
VAR_8._get_errors.assert_called_once()
assert VAR_11 == VAR_8._get_errors.return_value
def FUNC_19(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_8.package_dir.is_dir.return_value = False
VAR_8._build_models = VAR_0.MagicMock()
with pytest.raises(FileNotFoundError):
VAR_8.update()
VAR_8.package_dir.is_dir.assert_called_once()
VAR_8._build_models.assert_not_called()
def FUNC_20(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.package_dir = VAR_0.MagicMock()
VAR_17 = VAR_0.MagicMock()
VAR_8.env = VAR_0.MagicMock()
VAR_8.env.get_template.return_value = VAR_17
VAR_18 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_19 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_20 = {
"__init__.py": VAR_18,
"py.typed": VAR_19,
}
VAR_8.package_dir.__truediv__.side_effect = lambda VAR_40: VAR_20[VAR_40]
VAR_8._create_package()
VAR_8.package_dir.mkdir.assert_called_once()
VAR_8.env.get_template.assert_called_once_with("package_init.pyi")
VAR_17.render.assert_called_once_with(description=VAR_8.package_description)
VAR_18.write_text.assert_called_once_with(VAR_17.render())
VAR_19.write_text.assert_called_once_with("# Marker file for PEP 561")
def FUNC_21(self, VAR_0):
from openapi_python_client import Project
VAR_8 = Project(VAR_3=VAR_0.MagicMock(title="My Test API"))
VAR_8.project_dir = VAR_0.MagicMock()
VAR_21 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_22 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_23 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_20 = {
"pyproject.toml": VAR_21,
"README.md": VAR_22,
".gitignore": VAR_23,
}
VAR_8.project_dir.__truediv__.side_effect = lambda VAR_40: VAR_20[VAR_40]
VAR_24 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_25 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_26 = VAR_0.MagicMock(autospec=jinja2.Template)
VAR_8.env = VAR_0.MagicMock(autospec=jinja2.Environment)
VAR_27 = {
"pyproject.toml": VAR_24,
"README.md": VAR_25,
".gitignore": VAR_26,
}
VAR_8.env.get_template.side_effect = lambda VAR_40: VAR_27[VAR_40]
VAR_8._build_metadata()
VAR_8.env.get_template.assert_has_calls(
[VAR_0.call("pyproject.toml"), VAR_0.call("README.md"), VAR_0.call(".gitignore")]
)
VAR_24.render.assert_called_once_with(
project_name=VAR_8.project_name,
package_name=VAR_8.package_name,
version=VAR_8.version,
description=VAR_8.package_description,
)
VAR_21.write_text.assert_called_once_with(VAR_24.render())
VAR_25.render.assert_called_once_with(
description=VAR_8.package_description,
project_name=VAR_8.project_name,
package_name=VAR_8.package_name,
)
VAR_22.write_text.assert_called_once_with(VAR_25.render())
VAR_26.render.assert_called_once()
VAR_23.write_text.assert_called_once_with(VAR_26.render())
def FUNC_22(self, VAR_0):
from openapi_python_client import GeneratorData, Project
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_28 = VAR_0.MagicMock()
VAR_29 = VAR_0.MagicMock()
VAR_3.schemas.models = {"1": VAR_28, "2": VAR_29}
VAR_30 = VAR_0.MagicMock()
VAR_31 = VAR_0.MagicMock()
VAR_3.enums = {"1": VAR_30, "2": VAR_31}
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.package_dir = VAR_0.MagicMock()
VAR_32 = VAR_0.MagicMock()
VAR_33 = VAR_0.MagicMock()
VAR_34 = VAR_0.MagicMock()
VAR_35 = VAR_0.MagicMock()
VAR_36 = VAR_0.MagicMock()
VAR_37 = VAR_0.MagicMock()
VAR_38 = VAR_0.MagicMock()
VAR_39 = {
"__init__.py": VAR_32,
"types.py": VAR_33,
f"{VAR_28.reference.module_name}.py": VAR_35,
f"{VAR_29.reference.module_name}.py": VAR_36,
f"{VAR_30.reference.module_name}.py": VAR_37,
f"{VAR_31.reference.module_name}.py": VAR_38,
}
def FUNC_24(VAR_40):
return VAR_39[VAR_40]
VAR_34.__truediv__.side_effect = FUNC_24
VAR_8.package_dir.__truediv__.return_value = VAR_34
VAR_41 = VAR_0.MagicMock()
VAR_42 = VAR_0.MagicMock()
VAR_43 = VAR_0.MagicMock()
VAR_43.render.side_effect = [VAR_41, VAR_42]
VAR_44 = VAR_0.MagicMock()
VAR_45 = VAR_0.MagicMock()
VAR_46 = VAR_0.MagicMock()
VAR_47 = {
VAR_30: VAR_44,
VAR_31: VAR_45,
}
VAR_46.render.side_effect = lambda enum: VAR_47[enum]
VAR_48 = VAR_0.MagicMock()
VAR_49 = VAR_0.MagicMock()
VAR_27 = {
"types.py": VAR_49,
"model.pyi": VAR_43,
"enum.pyi": VAR_46,
"models_init.pyi": VAR_48,
}
VAR_8.env = VAR_0.MagicMock()
VAR_8.env.get_template.side_effect = lambda VAR_40: VAR_27[VAR_40]
VAR_50 = [
"import_schema_1",
"import_schema_2",
"import_enum_1",
"import_enum_2",
]
VAR_51 = VAR_0.patch(
"openapi_python_client.import_string_from_reference", side_effect=VAR_50
)
VAR_8._build_models()
VAR_8.package_dir.__truediv__.assert_called_once_with("models")
VAR_34.mkdir.assert_called_once()
VAR_34.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_39])
VAR_8.env.get_template.assert_has_calls([VAR_0.call(key) for key in VAR_27])
VAR_43.render.assert_has_calls([VAR_0.call(model=VAR_28), VAR_0.call(model=VAR_29)])
VAR_35.write_text.assert_called_once_with(VAR_41)
VAR_36.write_text.assert_called_once_with(VAR_42)
VAR_51.assert_has_calls(
[
VAR_0.call(VAR_28.reference),
VAR_0.call(VAR_29.reference),
VAR_0.call(VAR_30.reference),
VAR_0.call(VAR_31.reference),
]
)
VAR_48.render.assert_called_once_with(VAR_50=imports)
VAR_49.render.assert_called_once()
VAR_37.write_text.assert_called_once_with(VAR_44)
VAR_38.write_text.assert_called_once_with(VAR_45)
def FUNC_23(self, VAR_0):
import .pathlib
from jinja2 import Template
from openapi_python_client import GeneratorData, Project
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_52 = "a_tag"
VAR_53 = "another_tag"
VAR_54 = VAR_0.MagicMock()
VAR_55 = VAR_0.MagicMock()
VAR_3.endpoint_collections_by_tag = {VAR_52: VAR_54, VAR_53: VAR_55}
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.package_dir = VAR_0.MagicMock()
VAR_56 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_57 = VAR_0.MagicMock()
VAR_58 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_59 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_60 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_61 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_62 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_63 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_64 = {
"__init__.py": VAR_58,
f"{VAR_52}.py": VAR_59,
f"{VAR_53}.py": VAR_60,
}
VAR_65 = {
"__init__.py": VAR_61,
f"{VAR_52}.py": VAR_62,
f"{VAR_53}.py": VAR_63,
}
VAR_66 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_66.__truediv__.side_effect = lambda VAR_40: VAR_64[VAR_40]
VAR_67 = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_67.__truediv__.side_effect = lambda VAR_40: VAR_65[VAR_40]
VAR_68 = {
"client.py": VAR_57,
"api": VAR_66,
"async_api": VAR_67,
"errors.py": VAR_56,
}
VAR_8.package_dir.__truediv__.side_effect = lambda VAR_40: VAR_68[VAR_40]
VAR_69 = VAR_0.MagicMock(autospec=Template)
VAR_70 = VAR_0.MagicMock(autospec=Template)
VAR_71 = VAR_0.MagicMock(autospec=Template)
VAR_72 = VAR_0.MagicMock(autospec=Template)
VAR_27 = {
"client.pyi": VAR_69,
"errors.pyi": VAR_70,
"endpoint_module.pyi": VAR_71,
"async_endpoint_module.pyi": VAR_72,
}
VAR_0.patch.object(VAR_8.env, "get_template", autospec=True, side_effect=lambda VAR_40: VAR_27[VAR_40])
VAR_73 = {
VAR_54: VAR_0.MagicMock(),
VAR_55: VAR_0.MagicMock(),
}
VAR_71.render.side_effect = lambda collection: VAR_73[collection]
VAR_74 = {
VAR_54: VAR_0.MagicMock(),
VAR_55: VAR_0.MagicMock(),
}
VAR_72.render.side_effect = lambda collection: VAR_74[collection]
VAR_8._build_api()
VAR_8.package_dir.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_68])
VAR_8.env.get_template.assert_has_calls([VAR_0.call(key) for key in VAR_27])
VAR_69.render.assert_called_once()
VAR_57.write_text.assert_called_once_with(VAR_69.render())
VAR_70.render.assert_called_once()
VAR_56.write_text.assert_called_once_with(VAR_70.render())
VAR_66.mkdir.assert_called_once()
VAR_66.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_64])
VAR_58.write_text.assert_called_once_with('""" Contains synchronous methods for accessing the API """')
VAR_71.render.assert_has_calls(
[VAR_0.call(collection=VAR_54), VAR_0.call(collection=VAR_55)]
)
VAR_59.write_text.assert_called_once_with(VAR_73[VAR_54])
VAR_60.write_text.assert_called_once_with(VAR_73[VAR_55])
VAR_67.mkdir.assert_called_once()
VAR_67.__truediv__.assert_has_calls([VAR_0.call(key) for key in VAR_65])
VAR_61.write_text.assert_called_once_with('""" Contains async methods for accessing the API """')
VAR_72.render.assert_has_calls(
[VAR_0.call(collection=VAR_54), VAR_0.call(collection=VAR_55)]
)
VAR_62.write_text.assert_called_once_with(VAR_74[VAR_54])
VAR_63.write_text.assert_called_once_with(VAR_74[VAR_55])
def FUNC_7(VAR_0):
import subprocess
from openapi_python_client import GeneratorData, Project
VAR_12 = VAR_0.patch("subprocess.run")
VAR_3 = VAR_0.MagicMock(autospec=GeneratorData, title="My Test API")
VAR_8 = Project(VAR_3=VAR_3)
VAR_8.project_dir = VAR_0.MagicMock(autospec=pathlib.Path)
VAR_8._reformat()
VAR_12.assert_has_calls(
[
VAR_0.call(
"isort .", cwd=VAR_8.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
),
VAR_0.call("black .", cwd=VAR_8.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE),
]
)
def FUNC_8(VAR_0):
from openapi_python_client import GeneratorData, Project
from openapi_python_client.parser.openapi import EndpointCollection, Schemas
VAR_3 = VAR_0.MagicMock(
autospec=GeneratorData,
title="My Test API",
endpoint_collections_by_tag={
"default": VAR_0.MagicMock(autospec=EndpointCollection, parse_errors=[1]),
"other": VAR_0.MagicMock(autospec=EndpointCollection, parse_errors=[2]),
},
schemas=VAR_0.MagicMock(autospec=Schemas, errors=[3]),
)
VAR_8 = Project(VAR_3=openapi)
assert VAR_8._get_errors() == [1, 2, 3]
| [
2,
7,
9,
10,
19,
21,
23,
28,
29,
38,
40,
42,
47,
48,
52,
56,
58,
60,
64,
65,
73,
75,
77,
81,
82,
90,
92,
94,
97,
98,
106,
108,
110,
114,
115,
123,
125,
127,
130,
131,
137,
139,
141,
146,
151,
153,
155,
160,
165,
167,
170,
175,
180,
182,
185,
189,
193,
195,
198,
202,
206,
208,
211,
216,
217,
221,
223,
225,
230,
233,
235,
238,
241,
244,
247,
250,
260,
262,
271,
274,
279,
281,
283,
286,
296,
298,
306,
309,
314,
317,
320,
323,
335,
337,
339,
345,
348,
360,
371,
373,
377,
393,
396,
421,
424,
458,
460,
480,
483,
485,
487,
518,
547,
549,
572,
573,
576,
578,
583,
585,
594,
595,
599,
610,
612
] | [
2,
7,
9,
10,
19,
21,
23,
28,
29,
38,
40,
42,
47,
48,
52,
56,
58,
60,
64,
65,
73,
75,
77,
81,
82,
90,
92,
94,
97,
98,
106,
108,
110,
114,
115,
123,
125,
127,
130,
131,
137,
139,
141,
146,
151,
153,
155,
160,
165,
167,
170,
175,
180,
182,
185,
189,
193,
195,
198,
202,
206,
208,
211,
216,
217,
221,
223,
225,
230,
233,
235,
238,
241,
244,
247,
250,
260,
262,
271,
274,
279,
281,
283,
286,
296,
298,
306,
309,
314,
317,
320,
323,
335,
337,
339,
345,
348,
360,
371,
373,
377,
393,
396,
421,
424,
458,
460,
480,
483,
485,
487,
518,
547,
549,
572,
573,
576,
578,
583,
585,
594,
595,
599,
610,
612
] |
2CWE-601
| from typing import Any, Callable, List, Optional
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpResponse
from django.urls import URLPattern, include, path, re_path
from django.views.decorators import csrf
from django.views.decorators.csrf import csrf_exempt
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView
from posthog.api import (
api_not_found,
authentication,
capture,
dashboard,
decide,
organizations_router,
project_dashboards_router,
projects_router,
router,
signup,
user,
)
from posthog.demo import demo
from .utils import render_template
from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats
ee_urlpatterns: List[Any] = []
try:
from ee.urls import extend_api_router
from ee.urls import urlpatterns as ee_urlpatterns
except ImportError:
pass
else:
extend_api_router(router, projects_router=projects_router, project_dashboards_router=project_dashboards_router)
try:
# See https://github.com/PostHog/posthog-cloud/blob/master/multi_tenancy/router.py
from multi_tenancy.router import extend_api_router as extend_api_router_cloud # noqa
except ImportError:
pass
else:
extend_api_router_cloud(router, organizations_router=organizations_router, projects_router=projects_router)
@csrf.ensure_csrf_cookie
def home(request, *args, **kwargs):
return render_template("index.html", request)
def authorize_and_redirect(request):
if not request.GET.get("redirect"):
return HttpResponse("You need to pass a url to ?redirect=", status=401)
if not request.META.get("HTTP_REFERER"):
return HttpResponse('You need to make a request that includes the "Referer" header.', status=400)
referer_url = urlparse(request.META["HTTP_REFERER"])
redirect_url = urlparse(request.GET["redirect"])
if referer_url.hostname != redirect_url.hostname:
return HttpResponse(f"Can only redirect to the same domain as the referer: {referer_url.hostname}", status=400)
if referer_url.scheme != redirect_url.scheme:
return HttpResponse(f"Can only redirect to the same scheme as the referer: {referer_url.scheme}", status=400)
if referer_url.port != redirect_url.port:
return HttpResponse(
f"Can only redirect to the same port as the referer: {referer_url.port or 'no port in URL'}", status=400
)
return render_template(
"authorize_and_redirect.html",
request=request,
context={"domain": redirect_url.hostname, "redirect_url": request.GET["redirect"]},
)
def opt_slash_path(route: str, view: Callable, name: Optional[str] = None) -> URLPattern:
"""Catches path with or without trailing slash, taking into account query param and hash."""
# Ignoring the type because while name can be optional on re_path, mypy doesn't agree
return re_path(fr"^{route}/?(?:[?#].*)?$", view, name=name) # type: ignore
urlpatterns = [
path("api/schema/", SpectacularAPIView.as_view(), name="schema"),
# Optional UI:
path("api/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), name="swagger-ui"),
path("api/schema/redoc/", SpectacularRedocView.as_view(url_name="schema"), name="redoc"),
# Health check probe endpoints for K8s
# NOTE: We have _health, livez, and _readyz. _health is deprecated and
# is only included for compatability with old installations. For new
# operations livez and readyz should be used.
opt_slash_path("_health", health),
opt_slash_path("_stats", stats),
opt_slash_path("_preflight", preflight_check),
# ee
*ee_urlpatterns,
# api
path("api/", include(router.urls)),
opt_slash_path("api/user/redirect_to_site", user.redirect_to_site),
opt_slash_path("api/user/test_slack_webhook", user.test_slack_webhook),
opt_slash_path("api/signup", signup.SignupViewset.as_view()),
opt_slash_path("api/social_signup", signup.SocialSignupViewset.as_view()),
path("api/signup/<str:invite_id>/", signup.InviteSignupViewset.as_view()),
path(
"api/reset/<str:user_uuid>/",
authentication.PasswordResetCompleteViewSet.as_view({"get": "retrieve", "post": "create"}),
),
re_path(r"^api.+", api_not_found),
path("authorize_and_redirect/", login_required(authorize_and_redirect)),
path("shared_dashboard/<str:share_token>", dashboard.shared_dashboard),
re_path(r"^demo.*", login_required(demo)),
# ingestion
opt_slash_path("decide", decide.get_decide),
opt_slash_path("e", capture.get_event),
opt_slash_path("engage", capture.get_event),
opt_slash_path("track", capture.get_event),
opt_slash_path("capture", capture.get_event),
opt_slash_path("batch", capture.get_event),
opt_slash_path("s", capture.get_event), # session recordings
opt_slash_path("robots.txt", robots_txt),
opt_slash_path(".well-known/security.txt", security_txt),
# auth
path("logout", authentication.logout, name="login"),
path("signup/finish/", signup.finish_social_signup, name="signup_finish"),
path(
"login/<str:backend>/", sso_login, name="social_begin"
), # overrides from `social_django.urls` to validate proper license
path("", include("social_django.urls", namespace="social")),
]
if settings.TEST:
# Used in posthog-js e2e tests
@csrf_exempt
def delete_events(request):
from ee.clickhouse.sql.events import TRUNCATE_EVENTS_TABLE_SQL
from posthog.client import sync_execute
sync_execute(TRUNCATE_EVENTS_TABLE_SQL())
return HttpResponse()
urlpatterns.append(path("delete_events/", delete_events))
# Routes added individually to remove login requirement
frontend_unauthenticated_routes = [
"preflight",
"signup",
r"signup\/[A-Za-z0-9\-]*",
"reset",
"organization/billing/subscribed",
"login",
]
for route in frontend_unauthenticated_routes:
urlpatterns.append(re_path(route, home))
urlpatterns.append(re_path(r"^.*", login_required(home)))
| from typing import Any, Callable, List, Optional, cast
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.urls import URLPattern, include, path, re_path
from django.views.decorators import csrf
from django.views.decorators.csrf import csrf_exempt
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView
from posthog.api import (
api_not_found,
authentication,
capture,
dashboard,
decide,
organizations_router,
project_dashboards_router,
projects_router,
router,
signup,
user,
)
from posthog.api.decide import hostname_in_app_urls
from posthog.demo import demo
from posthog.models import User
from .utils import render_template
from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats
ee_urlpatterns: List[Any] = []
try:
from ee.urls import extend_api_router
from ee.urls import urlpatterns as ee_urlpatterns
except ImportError:
pass
else:
extend_api_router(router, projects_router=projects_router, project_dashboards_router=project_dashboards_router)
try:
# See https://github.com/PostHog/posthog-cloud/blob/master/multi_tenancy/router.py
from multi_tenancy.router import extend_api_router as extend_api_router_cloud # noqa
except ImportError:
pass
else:
extend_api_router_cloud(router, organizations_router=organizations_router, projects_router=projects_router)
@csrf.ensure_csrf_cookie
def home(request, *args, **kwargs):
return render_template("index.html", request)
def authorize_and_redirect(request: HttpRequest) -> HttpResponse:
if not request.GET.get("redirect"):
return HttpResponse("You need to pass a url to ?redirect=", status=401)
if not request.META.get("HTTP_REFERER"):
return HttpResponse('You need to make a request that includes the "Referer" header.', status=400)
current_team = cast(User, request.user).team
referer_url = urlparse(request.META["HTTP_REFERER"])
redirect_url = urlparse(request.GET["redirect"])
if not current_team or not hostname_in_app_urls(current_team, redirect_url.hostname):
return HttpResponse(f"Can only redirect to a permitted domain.", status=400)
if referer_url.hostname != redirect_url.hostname:
return HttpResponse(f"Can only redirect to the same domain as the referer: {referer_url.hostname}", status=400)
if referer_url.scheme != redirect_url.scheme:
return HttpResponse(f"Can only redirect to the same scheme as the referer: {referer_url.scheme}", status=400)
if referer_url.port != redirect_url.port:
return HttpResponse(
f"Can only redirect to the same port as the referer: {referer_url.port or 'no port in URL'}", status=400
)
return render_template(
"authorize_and_redirect.html",
request=request,
context={"domain": redirect_url.hostname, "redirect_url": request.GET["redirect"]},
)
def opt_slash_path(route: str, view: Callable, name: Optional[str] = None) -> URLPattern:
"""Catches path with or without trailing slash, taking into account query param and hash."""
# Ignoring the type because while name can be optional on re_path, mypy doesn't agree
return re_path(fr"^{route}/?(?:[?#].*)?$", view, name=name) # type: ignore
urlpatterns = [
path("api/schema/", SpectacularAPIView.as_view(), name="schema"),
# Optional UI:
path("api/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), name="swagger-ui"),
path("api/schema/redoc/", SpectacularRedocView.as_view(url_name="schema"), name="redoc"),
# Health check probe endpoints for K8s
# NOTE: We have _health, livez, and _readyz. _health is deprecated and
# is only included for compatability with old installations. For new
# operations livez and readyz should be used.
opt_slash_path("_health", health),
opt_slash_path("_stats", stats),
opt_slash_path("_preflight", preflight_check),
# ee
*ee_urlpatterns,
# api
path("api/", include(router.urls)),
opt_slash_path("api/user/redirect_to_site", user.redirect_to_site),
opt_slash_path("api/user/test_slack_webhook", user.test_slack_webhook),
opt_slash_path("api/signup", signup.SignupViewset.as_view()),
opt_slash_path("api/social_signup", signup.SocialSignupViewset.as_view()),
path("api/signup/<str:invite_id>/", signup.InviteSignupViewset.as_view()),
path(
"api/reset/<str:user_uuid>/",
authentication.PasswordResetCompleteViewSet.as_view({"get": "retrieve", "post": "create"}),
),
re_path(r"^api.+", api_not_found),
path("authorize_and_redirect/", login_required(authorize_and_redirect)),
path("shared_dashboard/<str:share_token>", dashboard.shared_dashboard),
re_path(r"^demo.*", login_required(demo)),
# ingestion
opt_slash_path("decide", decide.get_decide),
opt_slash_path("e", capture.get_event),
opt_slash_path("engage", capture.get_event),
opt_slash_path("track", capture.get_event),
opt_slash_path("capture", capture.get_event),
opt_slash_path("batch", capture.get_event),
opt_slash_path("s", capture.get_event), # session recordings
opt_slash_path("robots.txt", robots_txt),
opt_slash_path(".well-known/security.txt", security_txt),
# auth
path("logout", authentication.logout, name="login"),
path("signup/finish/", signup.finish_social_signup, name="signup_finish"),
path(
"login/<str:backend>/", sso_login, name="social_begin"
), # overrides from `social_django.urls` to validate proper license
path("", include("social_django.urls", namespace="social")),
]
if settings.TEST:
# Used in posthog-js e2e tests
@csrf_exempt
def delete_events(request):
from ee.clickhouse.sql.events import TRUNCATE_EVENTS_TABLE_SQL
from posthog.client import sync_execute
sync_execute(TRUNCATE_EVENTS_TABLE_SQL())
return HttpResponse()
urlpatterns.append(path("delete_events/", delete_events))
# Routes added individually to remove login requirement
frontend_unauthenticated_routes = [
"preflight",
"signup",
r"signup\/[A-Za-z0-9\-]*",
"reset",
"organization/billing/subscribed",
"login",
]
for route in frontend_unauthenticated_routes:
urlpatterns.append(re_path(route, home))
urlpatterns.append(re_path(r"^.*", login_required(home)))
| open_redirect | {
"code": [
"from typing import Any, Callable, List, Optional",
"from django.http import HttpResponse",
"def authorize_and_redirect(request):"
],
"line_no": [
1,
5,
53
]
} | {
"code": [
"from typing import Any, Callable, List, Optional, cast",
"from django.http import HttpRequest, HttpResponse",
"from posthog.api.decide import hostname_in_app_urls",
"from posthog.models import User",
"def authorize_and_redirect(request: HttpRequest) -> HttpResponse:",
" if not current_team or not hostname_in_app_urls(current_team, redirect_url.hostname):",
" return HttpResponse(f\"Can only redirect to a permitted domain.\", status=400)"
],
"line_no": [
1,
5,
24,
26,
55,
65,
66
]
} | from typing import Any, Callable, List, Optional
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpResponse
from django.urls import URLPattern, include, path, re_path
from django.views.decorators import csrf
from django.views.decorators.csrf import csrf_exempt
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView
from posthog.api import (
api_not_found,
authentication,
capture,
dashboard,
decide,
organizations_router,
project_dashboards_router,
projects_router,
router,
signup,
user,
)
from posthog.demo import demo
from .utils import render_template
from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats
ee_urlpatterns: List[Any] = []
try:
from ee.urls import extend_api_router
from ee.urls import .urlpatterns as ee_urlpatterns
except ImportError:
pass
else:
extend_api_router(router, projects_router=projects_router, project_dashboards_router=project_dashboards_router)
try:
from multi_tenancy.router import extend_api_router as extend_api_router_cloud # noqa
except ImportError:
pass
else:
extend_api_router_cloud(router, organizations_router=organizations_router, projects_router=projects_router)
@csrf.ensure_csrf_cookie
def FUNC_0(VAR_0, *VAR_1, **VAR_2):
return render_template("index.html", VAR_0)
def FUNC_1(VAR_0):
if not VAR_0.GET.get("redirect"):
return HttpResponse("You need to pass a url to ?redirect=", status=401)
if not VAR_0.META.get("HTTP_REFERER"):
return HttpResponse('You need to make a VAR_0 that includes the "Referer" header.', status=400)
VAR_8 = urlparse(VAR_0.META["HTTP_REFERER"])
VAR_9 = urlparse(VAR_0.GET["redirect"])
if VAR_8.hostname != VAR_9.hostname:
return HttpResponse(f"Can only redirect to the same domain as the referer: {VAR_8.hostname}", status=400)
if VAR_8.scheme != VAR_9.scheme:
return HttpResponse(f"Can only redirect to the same scheme as the referer: {VAR_8.scheme}", status=400)
if VAR_8.port != VAR_9.port:
return HttpResponse(
f"Can only redirect to the same port as the referer: {VAR_8.port or 'no port in URL'}", status=400
)
return render_template(
"authorize_and_redirect.html",
VAR_0=request,
context={"domain": VAR_9.hostname, "redirect_url": VAR_0.GET["redirect"]},
)
def FUNC_2(VAR_3: str, VAR_4: Callable, VAR_5: Optional[str] = None) -> URLPattern:
return re_path(fr"^{VAR_3}/?(?:[?#].*)?$", VAR_4, VAR_5=name) # type: ignore
VAR_6 = [
path("api/schema/", SpectacularAPIView.as_view(), VAR_5="schema"),
path("api/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), VAR_5="swagger-ui"),
path("api/schema/redoc/", SpectacularRedocView.as_view(url_name="schema"), VAR_5="redoc"),
FUNC_2("_health", health),
FUNC_2("_stats", stats),
FUNC_2("_preflight", preflight_check),
*ee_urlpatterns,
path("api/", include(router.urls)),
FUNC_2("api/user/redirect_to_site", user.redirect_to_site),
FUNC_2("api/user/test_slack_webhook", user.test_slack_webhook),
FUNC_2("api/signup", signup.SignupViewset.as_view()),
FUNC_2("api/social_signup", signup.SocialSignupViewset.as_view()),
path("api/signup/<str:invite_id>/", signup.InviteSignupViewset.as_view()),
path(
"api/reset/<str:user_uuid>/",
authentication.PasswordResetCompleteViewSet.as_view({"get": "retrieve", "post": "create"}),
),
re_path(r"^api.+", api_not_found),
path("authorize_and_redirect/", login_required(FUNC_1)),
path("shared_dashboard/<str:share_token>", dashboard.shared_dashboard),
re_path(r"^demo.*", login_required(demo)),
FUNC_2("decide", decide.get_decide),
FUNC_2("e", capture.get_event),
FUNC_2("engage", capture.get_event),
FUNC_2("track", capture.get_event),
FUNC_2("capture", capture.get_event),
FUNC_2("batch", capture.get_event),
FUNC_2("s", capture.get_event), # session recordings
FUNC_2("robots.txt", robots_txt),
FUNC_2(".well-known/security.txt", security_txt),
path("logout", authentication.logout, VAR_5="login"),
path("signup/finish/", signup.finish_social_signup, VAR_5="signup_finish"),
path(
"login/<str:backend>/", sso_login, VAR_5="social_begin"
), # overrides from `social_django.urls` to validate proper license
path("", include("social_django.urls", namespace="social")),
]
if settings.TEST:
@csrf_exempt
def FUNC_3(VAR_0):
from ee.clickhouse.sql.events import TRUNCATE_EVENTS_TABLE_SQL
from posthog.client import sync_execute
sync_execute(TRUNCATE_EVENTS_TABLE_SQL())
return HttpResponse()
VAR_6.append(path("delete_events/", FUNC_3))
VAR_7 = [
"preflight",
"signup",
r"signup\/[A-Za-z0-9\-]*",
"reset",
"organization/billing/subscribed",
"login",
]
for VAR_3 in VAR_7:
VAR_6.append(re_path(VAR_3, FUNC_0))
VAR_6.append(re_path(r"^.*", login_required(FUNC_0)))
| from typing import Any, Callable, List, Optional, cast
from urllib.parse import urlparse
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.urls import URLPattern, include, path, re_path
from django.views.decorators import csrf
from django.views.decorators.csrf import csrf_exempt
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView
from posthog.api import (
api_not_found,
authentication,
capture,
dashboard,
decide,
organizations_router,
project_dashboards_router,
projects_router,
router,
signup,
user,
)
from posthog.api.decide import hostname_in_app_urls
from posthog.demo import demo
from posthog.models import User
from .utils import render_template
from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats
ee_urlpatterns: List[Any] = []
try:
from ee.urls import extend_api_router
from ee.urls import .urlpatterns as ee_urlpatterns
except ImportError:
pass
else:
extend_api_router(router, projects_router=projects_router, project_dashboards_router=project_dashboards_router)
try:
from multi_tenancy.router import extend_api_router as extend_api_router_cloud # noqa
except ImportError:
pass
else:
extend_api_router_cloud(router, organizations_router=organizations_router, projects_router=projects_router)
@csrf.ensure_csrf_cookie
def FUNC_0(VAR_0, *VAR_1, **VAR_2):
return render_template("index.html", VAR_0)
def FUNC_1(VAR_0: HttpRequest) -> HttpResponse:
if not VAR_0.GET.get("redirect"):
return HttpResponse("You need to pass a url to ?redirect=", status=401)
if not VAR_0.META.get("HTTP_REFERER"):
return HttpResponse('You need to make a VAR_0 that includes the "Referer" header.', status=400)
VAR_8 = cast(User, VAR_0.user).team
VAR_9 = urlparse(VAR_0.META["HTTP_REFERER"])
VAR_10 = urlparse(VAR_0.GET["redirect"])
if not VAR_8 or not hostname_in_app_urls(VAR_8, VAR_10.hostname):
return HttpResponse(f"Can only redirect to a permitted domain.", status=400)
if VAR_9.hostname != VAR_10.hostname:
return HttpResponse(f"Can only redirect to the same domain as the referer: {VAR_9.hostname}", status=400)
if VAR_9.scheme != VAR_10.scheme:
return HttpResponse(f"Can only redirect to the same scheme as the referer: {VAR_9.scheme}", status=400)
if VAR_9.port != VAR_10.port:
return HttpResponse(
f"Can only redirect to the same port as the referer: {VAR_9.port or 'no port in URL'}", status=400
)
return render_template(
"authorize_and_redirect.html",
VAR_0=request,
context={"domain": VAR_10.hostname, "redirect_url": VAR_0.GET["redirect"]},
)
def FUNC_2(VAR_3: str, VAR_4: Callable, VAR_5: Optional[str] = None) -> URLPattern:
return re_path(fr"^{VAR_3}/?(?:[?#].*)?$", VAR_4, VAR_5=name) # type: ignore
VAR_6 = [
path("api/schema/", SpectacularAPIView.as_view(), VAR_5="schema"),
path("api/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), VAR_5="swagger-ui"),
path("api/schema/redoc/", SpectacularRedocView.as_view(url_name="schema"), VAR_5="redoc"),
FUNC_2("_health", health),
FUNC_2("_stats", stats),
FUNC_2("_preflight", preflight_check),
*ee_urlpatterns,
path("api/", include(router.urls)),
FUNC_2("api/user/redirect_to_site", user.redirect_to_site),
FUNC_2("api/user/test_slack_webhook", user.test_slack_webhook),
FUNC_2("api/signup", signup.SignupViewset.as_view()),
FUNC_2("api/social_signup", signup.SocialSignupViewset.as_view()),
path("api/signup/<str:invite_id>/", signup.InviteSignupViewset.as_view()),
path(
"api/reset/<str:user_uuid>/",
authentication.PasswordResetCompleteViewSet.as_view({"get": "retrieve", "post": "create"}),
),
re_path(r"^api.+", api_not_found),
path("authorize_and_redirect/", login_required(FUNC_1)),
path("shared_dashboard/<str:share_token>", dashboard.shared_dashboard),
re_path(r"^demo.*", login_required(demo)),
FUNC_2("decide", decide.get_decide),
FUNC_2("e", capture.get_event),
FUNC_2("engage", capture.get_event),
FUNC_2("track", capture.get_event),
FUNC_2("capture", capture.get_event),
FUNC_2("batch", capture.get_event),
FUNC_2("s", capture.get_event), # session recordings
FUNC_2("robots.txt", robots_txt),
FUNC_2(".well-known/security.txt", security_txt),
path("logout", authentication.logout, VAR_5="login"),
path("signup/finish/", signup.finish_social_signup, VAR_5="signup_finish"),
path(
"login/<str:backend>/", sso_login, VAR_5="social_begin"
), # overrides from `social_django.urls` to validate proper license
path("", include("social_django.urls", namespace="social")),
]
if settings.TEST:
@csrf_exempt
def FUNC_3(VAR_0):
from ee.clickhouse.sql.events import TRUNCATE_EVENTS_TABLE_SQL
from posthog.client import sync_execute
sync_execute(TRUNCATE_EVENTS_TABLE_SQL())
return HttpResponse()
VAR_6.append(path("delete_events/", FUNC_3))
VAR_7 = [
"preflight",
"signup",
r"signup\/[A-Za-z0-9\-]*",
"reset",
"organization/billing/subscribed",
"login",
]
for VAR_3 in VAR_7:
VAR_6.append(re_path(VAR_3, FUNC_0))
VAR_6.append(re_path(r"^.*", login_required(FUNC_0)))
| [
3,
10,
25,
28,
37,
38,
40,
46,
47,
51,
52,
58,
61,
64,
67,
72,
78,
79,
82,
84,
85,
88,
91,
92,
93,
94,
98,
100,
115,
125,
133,
135,
136,
141,
144,
146,
147,
148,
159,
161,
81
] | [
3,
10,
27,
30,
39,
40,
42,
48,
49,
53,
54,
60,
64,
67,
70,
73,
78,
84,
85,
88,
90,
91,
94,
97,
98,
99,
100,
104,
106,
121,
131,
139,
141,
142,
147,
150,
152,
153,
154,
165,
167,
87
] |
1CWE-79
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import voidElements, booleanAttributes, spaceCharacters
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import escape
spaceCharacters = "".join(spaceCharacters)
quoteAttributeSpec = re.compile("[" + spaceCharacters + "\"'=<>`]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
unicode_encode_errors = "strict"
else:
unicode_encode_errors = "htmlentityreplace"
encode_entity_map = {}
is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((is_ucs4 and len(v) > 1) or
(not is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if v not in encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error(unicode_encode_errors, htmlentityreplace_errors)
del register_error
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = False
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer.
Keyword options (default given first unless specified) include:
inject_meta_charset=True|False
Whether it insert a meta element to define the character set of the
document.
quote_attr_values=True|False
Whether to quote attribute values that don't require quoting
per HTML5 parsing rules.
quote_char=u'"'|u"'"
Use given quote character for attribute quoting. Default is to
use double quote unless attribute value contains a double quote,
in which case single quotes are used instead.
escape_lt_in_attrs=False|True
Whether to escape < in attribute values.
escape_rcdata=False|True
Whether to escape characters that need to be escaped within normal
elements within rcdata elements such as style.
resolve_entities=True|False
Whether to resolve named character entities that appear in the
source tree. The XML predefined entities < > & " '
are unaffected by this setting.
strip_whitespace=False|True
Whether to remove semantically meaningless whitespace. (This
compresses all whitespace to a single space except within pre.)
minimize_boolean_attributes=True|False
Shortens boolean attributes to give just the attribute value,
for example <input disabled="disabled"> becomes <input disabled>.
use_trailing_solidus=False|True
Includes a close-tag slash at the end of the start tag of void
elements (empty elements whose end tag is forbidden). E.g. <hr/>.
space_before_trailing_solidus=True|False
Places a space immediately before the closing slash in a tag
using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
sanitize=False|True
Strip all unsafe or unknown constructs from output.
See `html5lib user documentation`_
omit_optional_tags=True|False
Omit start/end tags that are optional.
alphabetical_attributes=False|True
Reorder attributes to be in alphabetical order.
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, unicode_encode_errors)
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from ..filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from ..filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
treewalker = Filter(treewalker)
# Alphabetical attributes must be last, as other filters
# could add attributes and alter the order
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple()) and
k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values:
quote_attr = True
else:
quote_attr = len(v) == 0 or quoteAttributeSpec.search(v)
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if key not in entities:
self.serializeError("Entity %s not recognized" % name)
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
def SerializeError(Exception):
"""Error in serialized tree"""
pass
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import voidElements, booleanAttributes, spaceCharacters
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import escape
spaceCharacters = "".join(spaceCharacters)
quoteAttributeSpecChars = spaceCharacters + "\"'=<>`"
quoteAttributeSpec = re.compile("[" + quoteAttributeSpecChars + "]")
quoteAttributeLegacy = re.compile("[" + quoteAttributeSpecChars +
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
"\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
"\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
"\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
"\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
"\u3000]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
unicode_encode_errors = "strict"
else:
unicode_encode_errors = "htmlentityreplace"
encode_entity_map = {}
is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((is_ucs4 and len(v) > 1) or
(not is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if v not in encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error(unicode_encode_errors, htmlentityreplace_errors)
del register_error
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = "legacy" # be secure by default
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer.
Keyword options (default given first unless specified) include:
inject_meta_charset=True|False
Whether it insert a meta element to define the character set of the
document.
quote_attr_values="legacy"|"spec"|"always"
Whether to quote attribute values that don't require quoting
per legacy browser behaviour, when required by the standard, or always.
quote_char=u'"'|u"'"
Use given quote character for attribute quoting. Default is to
use double quote unless attribute value contains a double quote,
in which case single quotes are used instead.
escape_lt_in_attrs=False|True
Whether to escape < in attribute values.
escape_rcdata=False|True
Whether to escape characters that need to be escaped within normal
elements within rcdata elements such as style.
resolve_entities=True|False
Whether to resolve named character entities that appear in the
source tree. The XML predefined entities < > & " '
are unaffected by this setting.
strip_whitespace=False|True
Whether to remove semantically meaningless whitespace. (This
compresses all whitespace to a single space except within pre.)
minimize_boolean_attributes=True|False
Shortens boolean attributes to give just the attribute value,
for example <input disabled="disabled"> becomes <input disabled>.
use_trailing_solidus=False|True
Includes a close-tag slash at the end of the start tag of void
elements (empty elements whose end tag is forbidden). E.g. <hr/>.
space_before_trailing_solidus=True|False
Places a space immediately before the closing slash in a tag
using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
sanitize=False|True
Strip all unsafe or unknown constructs from output.
See `html5lib user documentation`_
omit_optional_tags=True|False
Omit start/end tags that are optional.
alphabetical_attributes=False|True
Reorder attributes to be in alphabetical order.
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, unicode_encode_errors)
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from ..filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from ..filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
treewalker = Filter(treewalker)
# Alphabetical attributes must be last, as other filters
# could add attributes and alter the order
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple()) and
k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values == "always" or len(v) == 0:
quote_attr = True
elif self.quote_attr_values == "spec":
quote_attr = quoteAttributeSpec.search(v) is not None
elif self.quote_attr_values == "legacy":
quote_attr = quoteAttributeLegacy.search(v) is not None
else:
raise ValueError("quote_attr_values must be one of: "
"'always', 'spec', or 'legacy'")
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if key not in entities:
self.serializeError("Entity %s not recognized" % name)
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
def SerializeError(Exception):
"""Error in serialized tree"""
pass
| xss | {
"code": [
"quoteAttributeSpec = re.compile(\"[\" + spaceCharacters + \"\\\"'=<>`]\")",
" quote_attr_values = False",
" per HTML5 parsing rules.",
" if self.quote_attr_values:",
" quote_attr = len(v) == 0 or quoteAttributeSpec.search(v)"
],
"line_no": [
13,
75,
113,
242,
245
]
} | {
"code": [
"quoteAttributeSpecChars = spaceCharacters + \"\\\"'=<>`\"",
"quoteAttributeLegacy = re.compile(\"[\" + quoteAttributeSpecChars +",
" \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\"",
" \"\\x0b\\x0c\\r\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\"",
" \"\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\"",
" \"\\x20\\x2f\\x60\\xa0\\u1680\\u180e\\u180f\\u2000\"",
" \"\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\"",
" \"\\u3000]\")",
" per legacy browser behaviour, when required by the standard, or always.",
" if self.quote_attr_values == \"always\" or len(v) == 0:",
" elif self.quote_attr_values == \"spec\":",
" quote_attr = quoteAttributeSpec.search(v) is not None",
" elif self.quote_attr_values == \"legacy\":",
" quote_attr = quoteAttributeLegacy.search(v) is not None",
" raise ValueError(\"quote_attr_values must be one of: \"",
" \"'always', 'spec', or 'legacy'\")"
],
"line_no": [
13,
15,
16,
17,
18,
19,
20,
22,
122,
251,
253,
254,
255,
256,
258,
259
]
} | from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import .voidElements, booleanAttributes, VAR_0
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import .escape
VAR_0 = "".join(VAR_0)
VAR_1 = re.compile("[" + VAR_0 + "\"'=<>`]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
VAR_3 = "strict"
else:
VAR_3 = "htmlentityreplace"
VAR_4 = {}
VAR_5 = len("\U0010FFFF") == 1
for VAR_38, VAR_32 in list(entities.items()):
if ((VAR_5 and len(VAR_32) > 1) or
(not VAR_5 and len(VAR_32) > 2)):
continue
if VAR_32 != "&":
if len(VAR_32) == 2:
VAR_32 = utils.surrogatePairToCodepoint(VAR_32)
else:
VAR_32 = ord(VAR_32)
if VAR_32 not in VAR_4 or VAR_38.islower():
VAR_4[VAR_32] = VAR_38
def FUNC_1(VAR_6):
if isinstance(VAR_6, (UnicodeEncodeError, UnicodeTranslateError)):
VAR_28 = []
VAR_29 = []
VAR_30 = False
for i, c in enumerate(VAR_6.object[VAR_6.start:VAR_6.end]):
if VAR_30:
skip = False
continue
VAR_33 = i + VAR_6.start
if utils.isSurrogatePair(VAR_6.object[VAR_33:min([VAR_6.end, VAR_33 + 2])]):
VAR_36 = utils.surrogatePairToCodepoint(VAR_6.object[VAR_33:index + 2])
VAR_30 = True
else:
VAR_36 = ord(c)
VAR_29.append(VAR_36)
for cp in VAR_29:
VAR_34 = VAR_4.get(cp)
if VAR_34:
VAR_28.append("&")
VAR_28.append(VAR_34)
if not VAR_34.endswith(";"):
VAR_28.append(";")
else:
VAR_28.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(VAR_28), VAR_6.end)
else:
return xmlcharrefreplace_errors(VAR_6)
register_error(VAR_3, FUNC_1)
del register_error
class CLASS_0(object):
VAR_7 = False
VAR_8 = '"'
VAR_9 = True
VAR_10 = True
VAR_11 = True
VAR_12 = False
VAR_13 = True
VAR_14 = False
VAR_15 = False
VAR_16 = True
VAR_17 = False
VAR_18 = True
VAR_19 = False
VAR_20 = False
VAR_21 = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **VAR_22):
if 'quote_char' in VAR_22:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, VAR_22.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def FUNC_2(self, VAR_23):
assert(isinstance(VAR_23, text_type))
if self.encoding:
return VAR_23.encode(self.encoding, VAR_3)
else:
return VAR_23
def FUNC_3(self, VAR_23):
assert(isinstance(VAR_23, text_type))
if self.encoding:
return VAR_23.encode(self.encoding, "strict")
else:
return VAR_23
def FUNC_4(self, VAR_24, VAR_25=None):
self.encoding = VAR_25
VAR_27 = False
self.errors = []
if VAR_25 and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
VAR_24 = Filter(VAR_24, VAR_25)
if self.strip_whitespace:
from ..filters.whitespace import Filter
VAR_24 = Filter(VAR_24)
if self.sanitize:
from ..filters.sanitizer import Filter
VAR_24 = Filter(VAR_24)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
VAR_24 = Filter(VAR_24)
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
VAR_24 = Filter(VAR_24)
for token in VAR_24:
VAR_31 = token["type"]
if VAR_31 == "Doctype":
VAR_35 = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
VAR_35 += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
VAR_35 += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
VAR_8 = "'"
else:
VAR_8 = '"'
VAR_35 += " %s%s%s" % (VAR_8, token["systemId"], VAR_8)
VAR_35 += ">"
yield self.encodeStrict(VAR_35)
elif VAR_31 in ("Characters", "SpaceCharacters"):
if VAR_31 == "SpaceCharacters" or VAR_27:
if VAR_27 and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif VAR_31 in ("StartTag", "EmptyTag"):
VAR_37 = token["name"]
yield self.encodeStrict("<%s" % VAR_37)
if VAR_37 in rcdataElements and not self.escape_rcdata:
VAR_27 = True
elif VAR_27:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
VAR_38 = attr_name
VAR_32 = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(VAR_38)
if not self.minimize_boolean_attributes or \
(VAR_38 not in booleanAttributes.get(VAR_37, tuple()) and
VAR_38 not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values:
VAR_39 = True
else:
VAR_39 = len(VAR_32) == 0 or VAR_1.search(VAR_32)
VAR_32 = VAR_32.replace("&", "&")
if self.escape_lt_in_attrs:
VAR_32 = VAR_32.replace("<", "<")
if VAR_39:
VAR_8 = self.quote_char
if self.use_best_quote_char:
if "'" in VAR_32 and '"' not in VAR_32:
VAR_8 = '"'
elif '"' in VAR_32 and "'" not in VAR_32:
VAR_8 = "'"
if VAR_8 == "'":
VAR_32 = VAR_32.replace("'", "'")
else:
VAR_32 = VAR_32.replace('"', """)
yield self.encodeStrict(VAR_8)
yield self.encode(VAR_32)
yield self.encodeStrict(VAR_8)
else:
yield self.encode(VAR_32)
if VAR_37 in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif VAR_31 == "EndTag":
VAR_37 = token["name"]
if VAR_37 in rcdataElements:
VAR_27 = False
elif VAR_27:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % VAR_37)
elif VAR_31 == "Comment":
VAR_26 = token["data"]
if VAR_26.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif VAR_31 == "Entity":
VAR_37 = token["name"]
VAR_40 = VAR_37 + ";"
if VAR_40 not in entities:
self.serializeError("Entity %s not recognized" % VAR_37)
if self.resolve_entities and VAR_40 not in xmlEntities:
VAR_26 = entities[VAR_40]
else:
VAR_26 = "&%s;" % VAR_37
yield self.encodeStrict(VAR_26)
else:
self.serializeError(token["data"])
def FUNC_5(self, VAR_24, VAR_25=None):
if VAR_25:
return b"".join(list(self.serialize(VAR_24, VAR_25)))
else:
return "".join(list(self.serialize(VAR_24)))
def FUNC_6(self, VAR_26="XXX ERROR MESSAGE NEEDED"):
self.errors.append(VAR_26)
if self.strict:
raise FUNC_0
def FUNC_0(VAR_2):
pass
| from __future__ import absolute_import, division, unicode_literals
from six import text_type
import re
from ..constants import .voidElements, booleanAttributes, VAR_0
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import .escape
VAR_0 = "".join(VAR_0)
VAR_1 = VAR_0 + "\"'=<>`"
VAR_2 = re.compile("[" + VAR_1 + "]")
VAR_3 = re.compile("[" + VAR_1 +
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
"\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
"\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
"\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
"\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
"\u3000]")
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
VAR_5 = "strict"
else:
VAR_5 = "htmlentityreplace"
VAR_6 = {}
VAR_7 = len("\U0010FFFF") == 1
for VAR_40, VAR_34 in list(entities.items()):
if ((VAR_7 and len(VAR_34) > 1) or
(not VAR_7 and len(VAR_34) > 2)):
continue
if VAR_34 != "&":
if len(VAR_34) == 2:
VAR_34 = utils.surrogatePairToCodepoint(VAR_34)
else:
VAR_34 = ord(VAR_34)
if VAR_34 not in VAR_6 or VAR_40.islower():
VAR_6[VAR_34] = VAR_40
def FUNC_1(VAR_8):
if isinstance(VAR_8, (UnicodeEncodeError, UnicodeTranslateError)):
VAR_30 = []
VAR_31 = []
VAR_32 = False
for i, c in enumerate(VAR_8.object[VAR_8.start:VAR_8.end]):
if VAR_32:
skip = False
continue
VAR_35 = i + VAR_8.start
if utils.isSurrogatePair(VAR_8.object[VAR_35:min([VAR_8.end, VAR_35 + 2])]):
VAR_38 = utils.surrogatePairToCodepoint(VAR_8.object[VAR_35:index + 2])
VAR_32 = True
else:
VAR_38 = ord(c)
VAR_31.append(VAR_38)
for cp in VAR_31:
VAR_36 = VAR_6.get(cp)
if VAR_36:
VAR_30.append("&")
VAR_30.append(VAR_36)
if not VAR_36.endswith(";"):
VAR_30.append(";")
else:
VAR_30.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(VAR_30), VAR_8.end)
else:
return xmlcharrefreplace_errors(VAR_8)
register_error(VAR_5, FUNC_1)
del register_error
class CLASS_0(object):
VAR_9 = "legacy" # be secure by default
VAR_10 = '"'
VAR_11 = True
VAR_12 = True
VAR_13 = True
VAR_14 = False
VAR_15 = True
VAR_16 = False
VAR_17 = False
VAR_18 = True
VAR_19 = False
VAR_20 = True
VAR_21 = False
VAR_22 = False
VAR_23 = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **VAR_24):
if 'quote_char' in VAR_24:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, VAR_24.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def FUNC_2(self, VAR_25):
assert(isinstance(VAR_25, text_type))
if self.encoding:
return VAR_25.encode(self.encoding, VAR_5)
else:
return VAR_25
def FUNC_3(self, VAR_25):
assert(isinstance(VAR_25, text_type))
if self.encoding:
return VAR_25.encode(self.encoding, "strict")
else:
return VAR_25
def FUNC_4(self, VAR_26, VAR_27=None):
self.encoding = VAR_27
VAR_29 = False
self.errors = []
if VAR_27 and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
VAR_26 = Filter(VAR_26, VAR_27)
if self.strip_whitespace:
from ..filters.whitespace import Filter
VAR_26 = Filter(VAR_26)
if self.sanitize:
from ..filters.sanitizer import Filter
VAR_26 = Filter(VAR_26)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
VAR_26 = Filter(VAR_26)
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
VAR_26 = Filter(VAR_26)
for token in VAR_26:
VAR_33 = token["type"]
if VAR_33 == "Doctype":
VAR_37 = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
VAR_37 += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
VAR_37 += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
VAR_10 = "'"
else:
VAR_10 = '"'
VAR_37 += " %s%s%s" % (VAR_10, token["systemId"], VAR_10)
VAR_37 += ">"
yield self.encodeStrict(VAR_37)
elif VAR_33 in ("Characters", "SpaceCharacters"):
if VAR_33 == "SpaceCharacters" or VAR_29:
if VAR_29 and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif VAR_33 in ("StartTag", "EmptyTag"):
VAR_39 = token["name"]
yield self.encodeStrict("<%s" % VAR_39)
if VAR_39 in rcdataElements and not self.escape_rcdata:
VAR_29 = True
elif VAR_29:
self.serializeError("Unexpected child element of a CDATA element")
for (attr_namespace, attr_name), attr_value in token["data"].items():
VAR_40 = attr_name
VAR_34 = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(VAR_40)
if not self.minimize_boolean_attributes or \
(VAR_40 not in booleanAttributes.get(VAR_39, tuple()) and
VAR_40 not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values == "always" or len(VAR_34) == 0:
VAR_41 = True
elif self.quote_attr_values == "spec":
VAR_41 = VAR_2.search(VAR_34) is not None
elif self.quote_attr_values == "legacy":
VAR_41 = VAR_3.search(VAR_34) is not None
else:
raise ValueError("quote_attr_values must be one of: "
"'always', 'spec', or 'legacy'")
VAR_34 = v.replace("&", "&")
if self.escape_lt_in_attrs:
VAR_34 = v.replace("<", "<")
if VAR_41:
VAR_10 = self.quote_char
if self.use_best_quote_char:
if "'" in VAR_34 and '"' not in VAR_34:
VAR_10 = '"'
elif '"' in VAR_34 and "'" not in VAR_34:
VAR_10 = "'"
if VAR_10 == "'":
VAR_34 = v.replace("'", "'")
else:
VAR_34 = v.replace('"', """)
yield self.encodeStrict(VAR_10)
yield self.encode(VAR_34)
yield self.encodeStrict(VAR_10)
else:
yield self.encode(VAR_34)
if VAR_39 in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif VAR_33 == "EndTag":
VAR_39 = token["name"]
if VAR_39 in rcdataElements:
VAR_29 = False
elif VAR_29:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % VAR_39)
elif VAR_33 == "Comment":
VAR_28 = token["data"]
if VAR_28.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif VAR_33 == "Entity":
VAR_39 = token["name"]
VAR_42 = VAR_39 + ";"
if VAR_42 not in entities:
self.serializeError("Entity %s not recognized" % VAR_39)
if self.resolve_entities and VAR_42 not in xmlEntities:
VAR_28 = entities[VAR_42]
else:
VAR_28 = "&%s;" % VAR_39
yield self.encodeStrict(VAR_28)
else:
self.serializeError(token["data"])
def FUNC_5(self, VAR_26, VAR_27=None):
if VAR_27:
return b"".join(list(self.serialize(VAR_26, VAR_27)))
else:
return "".join(list(self.serialize(VAR_26)))
def FUNC_6(self, VAR_28="XXX ERROR MESSAGE NEEDED"):
self.errors.append(VAR_28)
if self.strict:
raise FUNC_0
def FUNC_0(VAR_4):
pass
| [
3,
5,
10,
12,
14,
21,
25,
35,
37,
66,
68,
70,
71,
73,
74,
78,
79,
84,
85,
89,
90,
95,
102,
105,
107,
146,
155,
162,
169,
174,
178,
179,
189,
190,
194,
199,
212,
215,
223,
232,
236,
271,
279,
285,
296,
299,
305,
307,
311,
312,
316,
314,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148
] | [
3,
5,
10,
12,
23,
30,
34,
44,
46,
75,
77,
79,
80,
82,
83,
87,
88,
93,
94,
98,
99,
104,
111,
114,
116,
155,
164,
171,
178,
183,
187,
188,
198,
199,
203,
208,
221,
224,
232,
241,
245,
285,
293,
299,
310,
313,
319,
321,
325,
326,
330,
328,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157
] |
1CWE-79
| import orjson
from tests.views.message.utils import post_and_get_response
def test_message_nested_sync_input(client):
data = {"dictionary": {"name": "test"}}
action_queue = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
response = post_and_get_response(
client,
url="/message/tests.views.fake_components.FakeComponent",
data=data,
action_queue=action_queue,
)
assert not response["errors"]
assert response["data"].get("dictionary") == {"name": "test1"}
| from tests.views.message.utils import post_and_get_response
def test_message_nested_sync_input(client):
data = {"dictionary": {"name": "test"}}
action_queue = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
response = post_and_get_response(
client,
url="/message/tests.views.fake_components.FakeComponent",
data=data,
action_queue=action_queue,
)
assert not response["errors"]
assert response["data"].get("dictionary") == {"name": "test1"}
| xss | {
"code": [
"import orjson"
],
"line_no": [
1
]
} | {
"code": [],
"line_no": []
} | import orjson
from tests.views.message.utils import post_and_get_response
def FUNC_0(VAR_0):
VAR_1 = {"dictionary": {"name": "test"}}
VAR_2 = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
VAR_3 = post_and_get_response(
VAR_0,
url="/message/tests.views.fake_components.FakeComponent",
VAR_1=data,
VAR_2=action_queue,
)
assert not VAR_3["errors"]
assert VAR_3["data"].get("dictionary") == {"name": "test1"}
| from tests.views.message.utils import post_and_get_response
def FUNC_0(VAR_0):
VAR_1 = {"dictionary": {"name": "test"}}
VAR_2 = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
VAR_3 = post_and_get_response(
VAR_0,
url="/message/tests.views.fake_components.FakeComponent",
VAR_1=data,
VAR_2=action_queue,
)
assert not VAR_3["errors"]
assert VAR_3["data"].get("dictionary") == {"name": "test1"}
| [
2,
4,
5,
17,
20
] | [
2,
3,
15,
18
] |
1CWE-79
| from .base_config_test import BaseConfigTest, CollsDirMixin, fmod
import os
import tempfile
import shutil
import sys
import webtest
import time
import gevent
from six import StringIO
import webtest
from pytest import raises
from mock import patch
from pywb import get_test_dir
from pywb.warcserver.test.testutils import BaseTestClass
from pywb.manager.autoindex import AutoIndexer
from pywb.manager.manager import main
from pywb.indexer.cdxindexer import main as cdxindexer_main
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.apps.frontendapp import FrontEndApp
#=============================================================================
ARCHIVE_DIR = 'archive'
INDEX_DIR = 'indexes'
COLLECTIONS = '_test_colls'
INDEX_FILE = 'index.cdxj'
AUTOINDEX_FILE = 'autoindex.cdxj'
#=============================================================================
class TestManagedColls(CollsDirMixin, BaseConfigTest):
@classmethod
def setup_class(cls):
super(TestManagedColls, cls).setup_class('config_test.yaml')
def _check_dirs(self, base, dirlist):
for dir_ in dirlist:
assert os.path.isdir(os.path.join(base, dir_))
def _get_sample_warc(self, name):
return os.path.join(get_test_dir(), 'warcs', name)
@patch('pywb.apps.cli.BaseCli.run_gevent', lambda *args, **kwargs: None)
def test_run_cli(self):
""" test new wayback cli interface
test autoindex error before collections inited
"""
from pywb.apps.cli import wayback
wayback(['-p', '0'])
# Nothing to auto-index.. yet
with raises(SystemExit):
wayback(['-a', '-p', '0'])
colls = os.path.join(self.root_dir, COLLECTIONS)
os.mkdir(colls)
wayback(['-a', '-p', '0', '--auto-interval', '0'])
def test_create_first_coll(self):
""" Test first collection creation, with all required dirs
"""
main(['init', 'test'])
colls = os.path.join(self.root_dir, COLLECTIONS)
assert os.path.isdir(colls)
test = os.path.join(colls, 'test')
assert os.path.isdir(test)
self._check_dirs(test, [INDEX_DIR, ARCHIVE_DIR, 'static', 'templates'])
def test_add_warcs(self):
""" Test adding warc to new coll, check replay
"""
warc1 = self._get_sample_warc('example.warc.gz')
main(['add', 'test', warc1])
def test_add_warcs_replay(self, fmod):
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_another_coll(self):
""" Test adding warc to a new coll, check replay
"""
warc1 = self._get_sample_warc('example.warc.gz')
main(['init', 'foo'])
main(['add', 'foo', warc1])
def test_another_coll_replay(self, fmod):
resp = self.get('/foo/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_add_more_warcs(self):
""" Test adding additional warcs, check replay of added content
"""
warc1 = self._get_sample_warc('iana.warc.gz')
warc2 = self._get_sample_warc('example-extra.warc')
main(['add', 'test', warc1, warc2])
# Spurrious file in collections
with open(os.path.join(self.root_dir, COLLECTIONS, 'blah'), 'w+b') as fh:
fh.write(b'foo\n')
with raises(IOError):
main(['add', 'test', 'non-existent-file.warc.gz'])
def test_add_more_warcs_replay(self, fmod):
# check new cdx
resp = self.get('/test/20140126200624{0}/http://www.iana.org/', fmod)
assert resp.status_int == 200
def test_add_custom_nested_warcs(self):
""" Test recursive indexing of custom created WARC hierarchy,
warcs/A/..., warcs/B/sub/...
Ensure CDX is relative to root archive dir, test replay
"""
main(['init', 'nested'])
nested_root = os.path.join(self.root_dir, COLLECTIONS, 'nested', ARCHIVE_DIR)
nested_a = os.path.join(nested_root, 'A')
nested_b = os.path.join(nested_root, 'B', 'sub')
os.makedirs(nested_a)
os.makedirs(nested_b)
warc1 = self._get_sample_warc('iana.warc.gz')
warc2 = self._get_sample_warc('example.warc.gz')
shutil.copy2(warc1, nested_a)
shutil.copy2(warc2, nested_b)
main(['index',
'nested',
os.path.join(nested_a, 'iana.warc.gz'),
os.path.join(nested_b, 'example.warc.gz')
])
nested_cdx = os.path.join(self.root_dir, COLLECTIONS, 'nested', INDEX_DIR, INDEX_FILE)
with open(nested_cdx) as fh:
nested_cdx_index = fh.read()
assert '1043' in nested_cdx_index
assert '333' in nested_cdx_index
assert 'B/sub/example.warc.gz' in nested_cdx_index
assert '2258' in nested_cdx_index
assert '334' in nested_cdx_index
assert 'A/iana.warc.gz' in nested_cdx_index
def test_nested_replay(self, fmod):
resp = self.get('/nested/20140126200624{0}/http://www.iana.org/', fmod)
assert resp.status_int == 200
resp = self.get('/nested/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_merge_vs_reindex_equality(self):
""" Test full reindex vs merged update when adding warcs
to ensure equality of indexes
"""
# ensure merged index is same as full reindex
coll_dir = os.path.join(self.root_dir, COLLECTIONS, 'test', INDEX_DIR)
orig = os.path.join(coll_dir, INDEX_FILE)
bak = os.path.join(coll_dir, 'index.bak')
shutil.copy(orig, bak)
main(['reindex', 'test'])
with open(orig) as orig_fh:
merged_cdx = orig_fh.read()
with open(bak) as bak_fh:
reindex_cdx = bak_fh.read()
assert len(reindex_cdx.splitlines()) == len(merged_cdx.splitlines())
assert merged_cdx == reindex_cdx
def test_add_static(self):
""" Test adding static file to collection, check access
"""
a_static = os.path.join(self.root_dir, COLLECTIONS, 'test', 'static', 'abc.js')
with open(a_static, 'w+b') as fh:
fh.write(b'/* Some JS File */')
resp = self.testapp.get('/static/_/test/abc.js')
assert resp.status_int == 200
assert resp.content_type == 'application/javascript'
resp.charset = 'utf-8'
assert '/* Some JS File */' in resp.text
def test_add_shared_static(self):
""" Test adding shared static file to root static/ dir, check access
"""
a_static = os.path.join(self.root_dir, 'static', 'foo.css')
with open(a_static, 'w+b') as fh:
fh.write(b'/* Some CSS File */')
resp = self.testapp.get('/static/foo.css')
assert resp.status_int == 200
assert resp.content_type == 'text/css'
resp.charset = 'utf-8'
assert '/* Some CSS File */' in resp.text
def test_add_title_metadata_index_page(self):
""" Test adding title metadata to a collection, test
retrieval on default index page
"""
main(['metadata', 'foo', '--set', 'title=Collection Title'])
resp = self.testapp.get('/')
assert resp.status_int == 200
assert resp.content_type == 'text/html'
resp.charset = 'utf-8'
assert '(Collection Title)' in resp.text
# test cache
resp = self.testapp.get('/')
resp.charset = 'utf-8'
assert '(Collection Title)' in resp.text
def test_other_metadata_search_page(self):
main(['metadata', 'foo', '--set',
'desc=Some Description Text',
'other=custom value'])
with raises(ValueError):
main(['metadata', 'foo', '--set', 'name_only'])
resp = self.testapp.get('/foo/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'Collection Title' in resp.text
assert 'desc' in resp.text
assert 'Some Description Text' in resp.text
assert 'other' in resp.text
assert 'custom value' in resp.text
def test_custom_template_search(self):
""" Test manually added custom search template search.html
"""
custom_search = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'search.html')
with open(custom_search, 'w+b') as fh:
fh.write(b'pywb custom search page')
resp = self.testapp.get('/test/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'pywb custom search page' in resp.text
def test_add_custom_banner(self):
""" Test adding custom banner.html per-collection template
"""
banner_file = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'banner.html')
with open(banner_file, 'w+b') as fh:
fh.write(b'<div>Custom Banner Here!</div>')
fh.write(b'\n{{ metadata | tojson }}')
def test_add_custom_banner_replay(self, fmod):
resp = self.get('/test/20140103030321/http://example.com/?example=1', fmod)
assert '<div>Custom Banner Here!</div>' in resp.text
def test_more_custom_templates(self):
"""
Test custom templates and metadata
Template is relative to collection-specific dir
Add custom metadata and test its presence in custom search page
"""
custom_search = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'search.html')
# add metadata
main(['metadata', 'test', '--set', 'some=value'])
with open(custom_search, 'w+b') as fh:
fh.write(b'overriden search page: ')
fh.write(b'{{ metadata | tojson }}\n')
# force clear of jinja env cache to reload
self.app.rewriterapp.jinja_env.jinja_env.cache = {}
resp = self.testapp.get('/test/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'overriden search page: ' in resp.text
print(resp.text)
assert '"some":"value"' in resp.text, resp.text
def test_replay_banner_metadata(self, fmod):
""" Test adding metadata in replay banner (both framed and non-frame)
"""
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert '<div>Custom Banner Here!</div>' in resp.text
assert '"some":"value"' in resp.text
def test_more_custom_templates_replay(self, fmod):
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_add_default_coll_templates(self):
""" Test add default templates: collection,
and overwrite collection template
"""
# list
main(['template', 'foo', '--list'])
# Add collection template
main(['template', 'foo', '--add', 'query_html'])
assert os.path.isfile(os.path.join(self.root_dir, COLLECTIONS, 'foo', 'templates', 'query.html'))
# overwrite -- force
main(['template', 'foo', '--add', 'query_html', '-f'])
def test_add_modify_home_template(self):
# Add shared template
main(['template', '--add', 'home_html'])
filename = os.path.join(self.root_dir, 'templates', 'index.html')
assert os.path.isfile(filename)
with open(filename, 'r+b') as fh:
buf = fh.read()
buf = buf.replace(b'Pywb Wayback Machine', b'Custom Test Homepage')
fh.seek(0)
fh.write(buf)
resp = self.testapp.get('/')
resp.charset = 'utf-8'
assert resp.content_type == 'text/html'
assert 'Custom Test Homepage' in resp.text, resp.text
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def test_add_template_input_yes(self):
""" Test answer 'yes' to overwrite
"""
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'n')
def test_add_template_input_no(self):
""" Test answer 'no' to overwrite
"""
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'other')
def test_add_template_input_other(self):
""" Test answer 'other' to overwrite
"""
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'no')
def test_remove_not_confirm(self):
""" Test answer 'no' to remove
"""
# don't remove -- not confirmed
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'yes')
def test_remove_confirm(self):
# remove -- confirm
main(['template', 'foo', '--remove', 'query_html'])
def test_no_templates(self):
""" Test removing templates dir, using default template again
"""
shutil.rmtree(os.path.join(self.root_dir, COLLECTIONS, 'foo', 'templates'))
resp = self.testapp.get('/foo/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'pywb custom search page' not in resp.text
def test_list_colls(self):
""" Test collection listing, printed to stdout
"""
orig_stdout = sys.stdout
buff = StringIO()
sys.stdout = buff
try:
main(['list'])
finally:
sys.stdout = orig_stdout
output = sorted(buff.getvalue().splitlines())
assert len(output) == 4
assert 'Collections:' in output
assert '- foo' in output
assert '- nested' in output
assert '- test' in output
def test_convert_cdx(self):
""" Create non-surt cdx, then convert to cdxj
"""
migrate_dir = os.path.join(self.root_dir, '_migrate')
os.mkdir(migrate_dir)
cdxindexer_main(['-u', '-o', migrate_dir, self._get_sample_warc('')])
# try one file with -9
cdxindexer_main(['-u', '-9', '-o', migrate_dir, self._get_sample_warc('example.warc.gz')])
cdxs = os.listdir(migrate_dir)
assert all(x.endswith('.cdx') for x in cdxs)
@patch('pywb.manager.manager.get_input', lambda x: 'blah')
def do_migrate_no():
main(['cdx-convert', migrate_dir])
do_migrate_no()
assert os.listdir(migrate_dir) == cdxs
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def do_migrate_yes():
main(['cdx-convert', migrate_dir])
do_migrate_yes()
cdxjs = os.listdir(migrate_dir)
assert len(cdxs) == len(cdxjs)
assert all(x.endswith('.cdxj') for x in cdxjs)
with open(os.path.join(migrate_dir, 'iana.cdxj'), 'rb') as fh:
cdx = CDXObject(fh.readline())
assert cdx['urlkey'] == 'org,iana)/'
assert cdx['timestamp'] == '20140126200624'
assert cdx['url'] == 'http://www.iana.org/'
#assert fh.readline().startswith('org,iana)/ 20140126200624 {"url": "http://www.iana.org/",')
# Nothing else to migrate
main(['cdx-convert', migrate_dir])
def test_auto_index(self):
main(['init', 'auto'])
auto_dir = os.path.join(self.root_dir, COLLECTIONS, 'auto')
archive_dir = os.path.join(auto_dir, ARCHIVE_DIR)
archive_sub_dir = os.path.join(archive_dir, 'sub')
os.makedirs(archive_sub_dir)
def do_copy():
try:
time.sleep(1.0)
shutil.copy(self._get_sample_warc('example.warc.gz'), archive_dir)
shutil.copy(self._get_sample_warc('example-extra.warc'), archive_sub_dir)
time.sleep(1.0)
finally:
indexer.interval = 0
indexer = AutoIndexer(interval=0.25)
indexer.start()
ge = gevent.spawn(do_copy)
ge.join()
index_file = os.path.join(auto_dir, INDEX_DIR, AUTOINDEX_FILE)
assert os.path.isfile(index_file)
with open(index_file, 'r') as fh:
index = fh.read()
assert '"example.warc.gz' in index, index
assert '"sub/example-extra.warc' in index, index
mtime = os.path.getmtime(index_file)
# Update
indexer.interval = 0.25
indexer.start()
os.remove(index_file)
#thread = threading.Thread(target=do_copy)
#thread.daemon = True
#thread.start()
ge = gevent.spawn(do_copy)
#wayback(['-p', '0', '-a', '--auto-interval', '0.25'])
#thread.join()
ge.join()
# assert file was update
assert os.path.getmtime(index_file) > mtime
def test_err_template_remove(self):
""" Test various error conditions for templates:
invalid template name, no collection for collection template
no template file found
"""
# no such template
with raises(KeyError):
main(['template', 'foo', '--remove', 'blah_html'])
# collection needed
with raises(IOError):
main(['template', '--remove', 'query_html'])
# already removed
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
def test_err_no_such_coll(self):
""" Test error adding warc to non-existant collection
"""
warc1 = self._get_sample_warc('example.warc.gz')
with raises(IOError):
main(['add', 'bar', warc1])
def test_err_wrong_warcs(self):
warc1 = self._get_sample_warc('example.warc.gz')
invalid_warc = os.path.join(self.root_dir, COLLECTIONS, 'test', ARCHIVE_DIR, 'invalid.warc.gz')
# Empty warc list, argparse calls exit
with raises(SystemExit):
main(['index', 'test'])
# Wrong paths not in collection
with raises(IOError):
main(['index', 'test', warc1])
# Non-existent
with raises(IOError):
main(['index', 'test', invalid_warc])
def test_err_invalid_name(self):
""" Invalid collection name
"""
with raises(ValueError):
main(['init', '../abc%'])
with raises(ValueError):
main(['init', '45^23'])
def test_err_missing_dirs(self):
""" Test various errors with missing warcs dir,
missing cdx dir, non dir cdx file, and missing collections root
"""
colls = os.path.join(self.root_dir, COLLECTIONS)
# No Statics -- ignorable
shutil.rmtree(os.path.join(colls, 'foo', 'static'))
# No WARCS
warcs_path = os.path.join(colls, 'foo', ARCHIVE_DIR)
shutil.rmtree(warcs_path)
with raises(IOError):
main(['add', 'foo', 'somewarc'])
# No CDX
cdx_path = os.path.join(colls, 'foo', INDEX_DIR)
shutil.rmtree(cdx_path)
# CDX a file not a dir
with open(cdx_path, 'w+b') as fh:
fh.write(b'foo\n')
shutil.rmtree(colls)
# No Collections to list
with raises(IOError):
main(['list'])
# No Collections
resp = self.testapp.get('/test/', status=404)
assert resp.status_int == 404
| from .base_config_test import BaseConfigTest, CollsDirMixin, fmod
import os
import tempfile
import shutil
import sys
import webtest
import time
import gevent
from six import StringIO
import webtest
from pytest import raises
from mock import patch
from pywb import get_test_dir
from pywb.warcserver.test.testutils import BaseTestClass
from pywb.manager.autoindex import AutoIndexer
from pywb.manager.manager import main
from pywb.indexer.cdxindexer import main as cdxindexer_main
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.apps.frontendapp import FrontEndApp
#=============================================================================
ARCHIVE_DIR = 'archive'
INDEX_DIR = 'indexes'
COLLECTIONS = '_test_colls'
INDEX_FILE = 'index.cdxj'
AUTOINDEX_FILE = 'autoindex.cdxj'
#=============================================================================
class TestManagedColls(CollsDirMixin, BaseConfigTest):
@classmethod
def setup_class(cls):
super(TestManagedColls, cls).setup_class('config_test.yaml')
def _check_dirs(self, base, dirlist):
for dir_ in dirlist:
assert os.path.isdir(os.path.join(base, dir_))
def _get_sample_warc(self, name):
return os.path.join(get_test_dir(), 'warcs', name)
@patch('pywb.apps.cli.BaseCli.run_gevent', lambda *args, **kwargs: None)
def test_run_cli(self):
""" test new wayback cli interface
test autoindex error before collections inited
"""
from pywb.apps.cli import wayback
wayback(['-p', '0'])
# Nothing to auto-index.. yet
with raises(SystemExit):
wayback(['-a', '-p', '0'])
colls = os.path.join(self.root_dir, COLLECTIONS)
os.mkdir(colls)
wayback(['-a', '-p', '0', '--auto-interval', '0'])
def test_create_first_coll(self):
""" Test first collection creation, with all required dirs
"""
main(['init', 'test'])
colls = os.path.join(self.root_dir, COLLECTIONS)
assert os.path.isdir(colls)
test = os.path.join(colls, 'test')
assert os.path.isdir(test)
self._check_dirs(test, [INDEX_DIR, ARCHIVE_DIR, 'static', 'templates'])
def test_add_warcs(self):
""" Test adding warc to new coll, check replay
"""
warc1 = self._get_sample_warc('example.warc.gz')
main(['add', 'test', warc1])
def test_add_warcs_replay(self, fmod):
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_another_coll(self):
""" Test adding warc to a new coll, check replay
"""
warc1 = self._get_sample_warc('example.warc.gz')
main(['init', 'foo'])
main(['add', 'foo', warc1])
def test_another_coll_replay(self, fmod):
resp = self.get('/foo/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_add_more_warcs(self):
""" Test adding additional warcs, check replay of added content
"""
warc1 = self._get_sample_warc('iana.warc.gz')
warc2 = self._get_sample_warc('example-extra.warc')
main(['add', 'test', warc1, warc2])
# Spurrious file in collections
with open(os.path.join(self.root_dir, COLLECTIONS, 'blah'), 'w+b') as fh:
fh.write(b'foo\n')
with raises(IOError):
main(['add', 'test', 'non-existent-file.warc.gz'])
def test_add_more_warcs_replay(self, fmod):
# check new cdx
resp = self.get('/test/20140126200624{0}/http://www.iana.org/', fmod)
assert resp.status_int == 200
def test_add_custom_nested_warcs(self):
""" Test recursive indexing of custom created WARC hierarchy,
warcs/A/..., warcs/B/sub/...
Ensure CDX is relative to root archive dir, test replay
"""
main(['init', 'nested'])
nested_root = os.path.join(self.root_dir, COLLECTIONS, 'nested', ARCHIVE_DIR)
nested_a = os.path.join(nested_root, 'A')
nested_b = os.path.join(nested_root, 'B', 'sub')
os.makedirs(nested_a)
os.makedirs(nested_b)
warc1 = self._get_sample_warc('iana.warc.gz')
warc2 = self._get_sample_warc('example.warc.gz')
shutil.copy2(warc1, nested_a)
shutil.copy2(warc2, nested_b)
main(['index',
'nested',
os.path.join(nested_a, 'iana.warc.gz'),
os.path.join(nested_b, 'example.warc.gz')
])
nested_cdx = os.path.join(self.root_dir, COLLECTIONS, 'nested', INDEX_DIR, INDEX_FILE)
with open(nested_cdx) as fh:
nested_cdx_index = fh.read()
assert '1043' in nested_cdx_index
assert '333' in nested_cdx_index
assert 'B/sub/example.warc.gz' in nested_cdx_index
assert '2258' in nested_cdx_index
assert '334' in nested_cdx_index
assert 'A/iana.warc.gz' in nested_cdx_index
def test_nested_replay(self, fmod):
resp = self.get('/nested/20140126200624{0}/http://www.iana.org/', fmod)
assert resp.status_int == 200
resp = self.get('/nested/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_merge_vs_reindex_equality(self):
""" Test full reindex vs merged update when adding warcs
to ensure equality of indexes
"""
# ensure merged index is same as full reindex
coll_dir = os.path.join(self.root_dir, COLLECTIONS, 'test', INDEX_DIR)
orig = os.path.join(coll_dir, INDEX_FILE)
bak = os.path.join(coll_dir, 'index.bak')
shutil.copy(orig, bak)
main(['reindex', 'test'])
with open(orig) as orig_fh:
merged_cdx = orig_fh.read()
with open(bak) as bak_fh:
reindex_cdx = bak_fh.read()
assert len(reindex_cdx.splitlines()) == len(merged_cdx.splitlines())
assert merged_cdx == reindex_cdx
def test_add_static(self):
""" Test adding static file to collection, check access
"""
a_static = os.path.join(self.root_dir, COLLECTIONS, 'test', 'static', 'abc.js')
with open(a_static, 'w+b') as fh:
fh.write(b'/* Some JS File */')
resp = self.testapp.get('/static/_/test/abc.js')
assert resp.status_int == 200
assert resp.content_type == 'application/javascript'
resp.charset = 'utf-8'
assert '/* Some JS File */' in resp.text
def test_add_shared_static(self):
""" Test adding shared static file to root static/ dir, check access
"""
a_static = os.path.join(self.root_dir, 'static', 'foo.css')
with open(a_static, 'w+b') as fh:
fh.write(b'/* Some CSS File */')
resp = self.testapp.get('/static/foo.css')
assert resp.status_int == 200
assert resp.content_type == 'text/css'
resp.charset = 'utf-8'
assert '/* Some CSS File */' in resp.text
def test_add_title_metadata_index_page(self):
""" Test adding title metadata to a collection, test
retrieval on default index page
"""
main(['metadata', 'foo', '--set', 'title=Collection Title'])
resp = self.testapp.get('/')
assert resp.status_int == 200
assert resp.content_type == 'text/html'
resp.charset = 'utf-8'
assert '(Collection Title)' in resp.text
# test cache
resp = self.testapp.get('/')
resp.charset = 'utf-8'
assert '(Collection Title)' in resp.text
def test_other_metadata_search_page(self):
main(['metadata', 'foo', '--set',
'desc=Some Description Text',
'other=custom value'])
with raises(ValueError):
main(['metadata', 'foo', '--set', 'name_only'])
resp = self.testapp.get('/foo/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'Collection Title' in resp.text
assert 'desc' in resp.text
assert 'Some Description Text' in resp.text
assert 'other' in resp.text
assert 'custom value' in resp.text
def test_custom_template_search(self):
""" Test manually added custom search template search.html
"""
custom_search = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'search.html')
with open(custom_search, 'w+b') as fh:
fh.write(b'pywb custom search page')
resp = self.testapp.get('/test/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'pywb custom search page' in resp.text
def test_add_custom_banner(self):
""" Test adding custom banner.html per-collection template
"""
banner_file = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'banner.html')
with open(banner_file, 'w+b') as fh:
fh.write(b'<div>Custom Banner Here!</div>')
fh.write(b'\n{{ metadata | tojson }}')
def test_add_custom_banner_replay(self, fmod):
resp = self.get('/test/20140103030321/http://example.com/?example=1', fmod)
assert '<div>Custom Banner Here!</div>' in resp.text
def test_more_custom_templates(self):
"""
Test custom templates and metadata
Template is relative to collection-specific dir
Add custom metadata and test its presence in custom search page
"""
custom_search = os.path.join(self.root_dir, COLLECTIONS, 'test',
'templates', 'search.html')
# add metadata
main(['metadata', 'test', '--set', 'some=value'])
with open(custom_search, 'w+b') as fh:
fh.write(b'overriden search page: ')
fh.write(b'{{ metadata | tojson }}\n')
# force clear of jinja env cache to reload
self.app.rewriterapp.jinja_env.jinja_env.cache = {}
resp = self.testapp.get('/test/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'overriden search page: ' in resp.text
#assert '"some":"value"' in resp.text, resp.text
assert '{"some":"value"}' in resp.text, resp.text
def test_replay_banner_metadata(self, fmod):
""" Test adding metadata in replay banner (both framed and non-frame)
"""
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert '<div>Custom Banner Here!</div>' in resp.text
#assert '"some":"value"' in resp.text
assert '{"some":"value"}' in resp.text, resp.text
def test_more_custom_templates_replay(self, fmod):
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert resp.status_int == 200
def test_add_default_coll_templates(self):
""" Test add default templates: collection,
and overwrite collection template
"""
# list
main(['template', 'foo', '--list'])
# Add collection template
main(['template', 'foo', '--add', 'query_html'])
assert os.path.isfile(os.path.join(self.root_dir, COLLECTIONS, 'foo', 'templates', 'query.html'))
# overwrite -- force
main(['template', 'foo', '--add', 'query_html', '-f'])
def test_add_modify_home_template(self):
# Add shared template
main(['template', '--add', 'home_html'])
filename = os.path.join(self.root_dir, 'templates', 'index.html')
assert os.path.isfile(filename)
with open(filename, 'r+b') as fh:
buf = fh.read()
buf = buf.replace(b'Pywb Wayback Machine', b'Custom Test Homepage')
fh.seek(0)
fh.write(buf)
resp = self.testapp.get('/')
resp.charset = 'utf-8'
assert resp.content_type == 'text/html'
assert 'Custom Test Homepage' in resp.text, resp.text
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def test_add_template_input_yes(self):
""" Test answer 'yes' to overwrite
"""
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'n')
def test_add_template_input_no(self):
""" Test answer 'no' to overwrite
"""
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'other')
def test_add_template_input_other(self):
""" Test answer 'other' to overwrite
"""
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'no')
def test_remove_not_confirm(self):
""" Test answer 'no' to remove
"""
# don't remove -- not confirmed
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'yes')
def test_remove_confirm(self):
# remove -- confirm
main(['template', 'foo', '--remove', 'query_html'])
def test_no_templates(self):
""" Test removing templates dir, using default template again
"""
shutil.rmtree(os.path.join(self.root_dir, COLLECTIONS, 'foo', 'templates'))
resp = self.testapp.get('/foo/')
resp.charset = 'utf-8'
assert resp.status_int == 200
assert resp.content_type == 'text/html'
assert 'pywb custom search page' not in resp.text
def test_list_colls(self):
""" Test collection listing, printed to stdout
"""
orig_stdout = sys.stdout
buff = StringIO()
sys.stdout = buff
try:
main(['list'])
finally:
sys.stdout = orig_stdout
output = sorted(buff.getvalue().splitlines())
assert len(output) == 4
assert 'Collections:' in output
assert '- foo' in output
assert '- nested' in output
assert '- test' in output
def test_convert_cdx(self):
""" Create non-surt cdx, then convert to cdxj
"""
migrate_dir = os.path.join(self.root_dir, '_migrate')
os.mkdir(migrate_dir)
cdxindexer_main(['-u', '-o', migrate_dir, self._get_sample_warc('')])
# try one file with -9
cdxindexer_main(['-u', '-9', '-o', migrate_dir, self._get_sample_warc('example.warc.gz')])
cdxs = os.listdir(migrate_dir)
assert all(x.endswith('.cdx') for x in cdxs)
@patch('pywb.manager.manager.get_input', lambda x: 'blah')
def do_migrate_no():
main(['cdx-convert', migrate_dir])
do_migrate_no()
assert os.listdir(migrate_dir) == cdxs
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def do_migrate_yes():
main(['cdx-convert', migrate_dir])
do_migrate_yes()
cdxjs = os.listdir(migrate_dir)
assert len(cdxs) == len(cdxjs)
assert all(x.endswith('.cdxj') for x in cdxjs)
with open(os.path.join(migrate_dir, 'iana.cdxj'), 'rb') as fh:
cdx = CDXObject(fh.readline())
assert cdx['urlkey'] == 'org,iana)/'
assert cdx['timestamp'] == '20140126200624'
assert cdx['url'] == 'http://www.iana.org/'
#assert fh.readline().startswith('org,iana)/ 20140126200624 {"url": "http://www.iana.org/",')
# Nothing else to migrate
main(['cdx-convert', migrate_dir])
def test_auto_index(self):
main(['init', 'auto'])
auto_dir = os.path.join(self.root_dir, COLLECTIONS, 'auto')
archive_dir = os.path.join(auto_dir, ARCHIVE_DIR)
archive_sub_dir = os.path.join(archive_dir, 'sub')
os.makedirs(archive_sub_dir)
def do_copy():
try:
time.sleep(1.0)
shutil.copy(self._get_sample_warc('example.warc.gz'), archive_dir)
shutil.copy(self._get_sample_warc('example-extra.warc'), archive_sub_dir)
time.sleep(1.0)
finally:
indexer.interval = 0
indexer = AutoIndexer(interval=0.25)
indexer.start()
ge = gevent.spawn(do_copy)
ge.join()
index_file = os.path.join(auto_dir, INDEX_DIR, AUTOINDEX_FILE)
assert os.path.isfile(index_file)
with open(index_file, 'r') as fh:
index = fh.read()
assert '"example.warc.gz' in index, index
assert '"sub/example-extra.warc' in index, index
mtime = os.path.getmtime(index_file)
# Update
indexer.interval = 0.25
indexer.start()
os.remove(index_file)
#thread = threading.Thread(target=do_copy)
#thread.daemon = True
#thread.start()
ge = gevent.spawn(do_copy)
#wayback(['-p', '0', '-a', '--auto-interval', '0.25'])
#thread.join()
ge.join()
# assert file was update
assert os.path.getmtime(index_file) > mtime
def test_err_template_remove(self):
""" Test various error conditions for templates:
invalid template name, no collection for collection template
no template file found
"""
# no such template
with raises(KeyError):
main(['template', 'foo', '--remove', 'blah_html'])
# collection needed
with raises(IOError):
main(['template', '--remove', 'query_html'])
# already removed
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
def test_err_no_such_coll(self):
""" Test error adding warc to non-existant collection
"""
warc1 = self._get_sample_warc('example.warc.gz')
with raises(IOError):
main(['add', 'bar', warc1])
def test_err_wrong_warcs(self):
warc1 = self._get_sample_warc('example.warc.gz')
invalid_warc = os.path.join(self.root_dir, COLLECTIONS, 'test', ARCHIVE_DIR, 'invalid.warc.gz')
# Empty warc list, argparse calls exit
with raises(SystemExit):
main(['index', 'test'])
# Wrong paths not in collection
with raises(IOError):
main(['index', 'test', warc1])
# Non-existent
with raises(IOError):
main(['index', 'test', invalid_warc])
def test_err_invalid_name(self):
""" Invalid collection name
"""
with raises(ValueError):
main(['init', '../abc%'])
with raises(ValueError):
main(['init', '45^23'])
def test_err_missing_dirs(self):
""" Test various errors with missing warcs dir,
missing cdx dir, non dir cdx file, and missing collections root
"""
colls = os.path.join(self.root_dir, COLLECTIONS)
# No Statics -- ignorable
shutil.rmtree(os.path.join(colls, 'foo', 'static'))
# No WARCS
warcs_path = os.path.join(colls, 'foo', ARCHIVE_DIR)
shutil.rmtree(warcs_path)
with raises(IOError):
main(['add', 'foo', 'somewarc'])
# No CDX
cdx_path = os.path.join(colls, 'foo', INDEX_DIR)
shutil.rmtree(cdx_path)
# CDX a file not a dir
with open(cdx_path, 'w+b') as fh:
fh.write(b'foo\n')
shutil.rmtree(colls)
# No Collections to list
with raises(IOError):
main(['list'])
# No Collections
resp = self.testapp.get('/test/', status=404)
assert resp.status_int == 404
| xss | {
"code": [
" print(resp.text)",
" assert '\"some\":\"value\"' in resp.text, resp.text",
" assert '\"some\":\"value\"' in resp.text"
],
"line_no": [
316,
317,
324
]
} | {
"code": [],
"line_no": []
} | from .base_config_test import BaseConfigTest, CollsDirMixin, VAR_9
import os
import tempfile
import shutil
import sys
import webtest
import time
import .gevent
from six import StringIO
import webtest
from pytest import raises
from mock import patch
from pywb import .get_test_dir
from pywb.warcserver.test.testutils import BaseTestClass
from pywb.manager.autoindex import AutoIndexer
from pywb.manager.manager import main
from pywb.indexer.cdxindexer import main as cdxindexer_main
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.apps.frontendapp import FrontEndApp
VAR_0 = 'archive'
VAR_1 = 'indexes'
VAR_2 = '_test_colls'
VAR_3 = 'index.cdxj'
VAR_4 = 'autoindex.cdxj'
class CLASS_0(CollsDirMixin, BaseConfigTest):
@classmethod
def FUNC_0(VAR_5):
super(CLASS_0, VAR_5).setup_class('config_test.yaml')
def FUNC_1(self, VAR_6, VAR_7):
for dir_ in VAR_7:
assert os.path.isdir(os.path.join(VAR_6, dir_))
def FUNC_2(self, VAR_8):
return os.path.join(get_test_dir(), 'warcs', VAR_8)
@patch('pywb.apps.cli.BaseCli.run_gevent', lambda *args, **kwargs: None)
def FUNC_3(self):
from pywb.apps.cli import wayback
wayback(['-p', '0'])
with raises(SystemExit):
wayback(['-a', '-p', '0'])
VAR_10 = os.path.join(self.root_dir, VAR_2)
os.mkdir(VAR_10)
wayback(['-a', '-p', '0', '--auto-interval', '0'])
def FUNC_4(self):
main(['init', 'test'])
VAR_10 = os.path.join(self.root_dir, VAR_2)
assert os.path.isdir(VAR_10)
VAR_11 = os.path.join(VAR_10, 'test')
assert os.path.isdir(VAR_11)
self._check_dirs(VAR_11, [VAR_1, VAR_0, 'static', 'templates'])
def FUNC_5(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
main(['add', 'test', VAR_12])
def FUNC_6(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_7(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
main(['init', 'foo'])
main(['add', 'foo', VAR_12])
def FUNC_8(self, VAR_9):
VAR_13 = self.get('/foo/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_9(self):
VAR_12 = self._get_sample_warc('iana.warc.gz')
VAR_14 = self._get_sample_warc('example-extra.warc')
main(['add', 'test', VAR_12, VAR_14])
with open(os.path.join(self.root_dir, VAR_2, 'blah'), 'w+b') as fh:
fh.write(b'foo\n')
with raises(IOError):
main(['add', 'test', 'non-existent-file.warc.gz'])
def FUNC_10(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140126200624{0}/http://www.iana.org/', VAR_9)
assert VAR_13.status_int == 200
def FUNC_11(self):
main(['init', 'nested'])
VAR_15 = os.path.join(self.root_dir, VAR_2, 'nested', VAR_0)
VAR_16 = os.path.join(VAR_15, 'A')
VAR_17 = os.path.join(VAR_15, 'B', 'sub')
os.makedirs(VAR_16)
os.makedirs(VAR_17)
VAR_12 = self._get_sample_warc('iana.warc.gz')
VAR_14 = self._get_sample_warc('example.warc.gz')
shutil.copy2(VAR_12, VAR_16)
shutil.copy2(VAR_14, VAR_17)
main(['index',
'nested',
os.path.join(VAR_16, 'iana.warc.gz'),
os.path.join(VAR_17, 'example.warc.gz')
])
VAR_18 = os.path.join(self.root_dir, VAR_2, 'nested', VAR_1, VAR_3)
with open(VAR_18) as fh:
VAR_42 = fh.read()
assert '1043' in VAR_42
assert '333' in VAR_42
assert 'B/sub/example.warc.gz' in VAR_42
assert '2258' in VAR_42
assert '334' in VAR_42
assert 'A/iana.warc.gz' in VAR_42
def FUNC_12(self, VAR_9):
VAR_13 = self.get('/nested/20140126200624{0}/http://www.iana.org/', VAR_9)
assert VAR_13.status_int == 200
VAR_13 = self.get('/nested/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_13(self):
VAR_19 = os.path.join(self.root_dir, VAR_2, 'test', VAR_1)
VAR_20 = os.path.join(VAR_19, VAR_3)
VAR_21 = os.path.join(VAR_19, 'index.bak')
shutil.copy(VAR_20, VAR_21)
main(['reindex', 'test'])
with open(VAR_20) as orig_fh:
VAR_43 = orig_fh.read()
with open(VAR_21) as bak_fh:
VAR_44 = bak_fh.read()
assert len(VAR_44.splitlines()) == len(VAR_43.splitlines())
assert VAR_43 == VAR_44
def FUNC_14(self):
VAR_22 = os.path.join(self.root_dir, VAR_2, 'test', 'static', 'abc.js')
with open(VAR_22, 'w+b') as fh:
fh.write(b'/* Some JS File */')
VAR_13 = self.testapp.get('/static/_/VAR_11/abc.js')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'application/javascript'
VAR_13.charset = 'utf-8'
assert '/* Some JS File */' in VAR_13.text
def FUNC_15(self):
VAR_22 = os.path.join(self.root_dir, 'static', 'foo.css')
with open(VAR_22, 'w+b') as fh:
fh.write(b'/* Some CSS File */')
VAR_13 = self.testapp.get('/static/foo.css')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/css'
VAR_13.charset = 'utf-8'
assert '/* Some CSS File */' in VAR_13.text
def FUNC_16(self):
main(['metadata', 'foo', '--set', 'title=Collection Title'])
VAR_13 = self.testapp.get('/')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
VAR_13.charset = 'utf-8'
assert '(Collection Title)' in VAR_13.text
VAR_13 = self.testapp.get('/')
VAR_13.charset = 'utf-8'
assert '(Collection Title)' in VAR_13.text
def FUNC_17(self):
main(['metadata', 'foo', '--set',
'desc=Some Description Text',
'other=custom value'])
with raises(ValueError):
main(['metadata', 'foo', '--set', 'name_only'])
VAR_13 = self.testapp.get('/foo/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'Collection Title' in VAR_13.text
assert 'desc' in VAR_13.text
assert 'Some Description Text' in VAR_13.text
assert 'other' in VAR_13.text
assert 'custom value' in VAR_13.text
def FUNC_18(self):
VAR_23 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'search.html')
with open(VAR_23, 'w+b') as fh:
fh.write(b'pywb custom search page')
VAR_13 = self.testapp.get('/VAR_11/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'pywb custom search page' in VAR_13.text
def FUNC_19(self):
VAR_24 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'banner.html')
with open(VAR_24, 'w+b') as fh:
fh.write(b'<div>Custom Banner Here!</div>')
fh.write(b'\n{{ metadata | tojson }}')
def FUNC_20(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321/http://example.com/?example=1', VAR_9)
assert '<div>Custom Banner Here!</div>' in VAR_13.text
def FUNC_21(self):
VAR_23 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'search.html')
main(['metadata', 'test', '--set', 'some=value'])
with open(VAR_23, 'w+b') as fh:
fh.write(b'overriden search page: ')
fh.write(b'{{ metadata | tojson }}\n')
self.app.rewriterapp.jinja_env.jinja_env.cache = {}
VAR_13 = self.testapp.get('/VAR_11/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'overriden search page: ' in VAR_13.text
print(VAR_13.text)
assert '"some":"value"' in VAR_13.text, VAR_13.text
def FUNC_22(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert '<div>Custom Banner Here!</div>' in VAR_13.text
assert '"some":"value"' in VAR_13.text
def FUNC_23(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_24(self):
main(['template', 'foo', '--list'])
main(['template', 'foo', '--add', 'query_html'])
assert os.path.isfile(os.path.join(self.root_dir, VAR_2, 'foo', 'templates', 'query.html'))
main(['template', 'foo', '--add', 'query_html', '-f'])
def FUNC_25(self):
main(['template', '--add', 'home_html'])
VAR_25 = os.path.join(self.root_dir, 'templates', 'index.html')
assert os.path.isfile(VAR_25)
with open(VAR_25, 'r+b') as fh:
VAR_45 = fh.read()
VAR_45 = buf.replace(b'Pywb Wayback Machine', b'Custom Test Homepage')
fh.seek(0)
fh.write(VAR_45)
VAR_13 = self.testapp.get('/')
VAR_13.charset = 'utf-8'
assert VAR_13.content_type == 'text/html'
assert 'Custom Test Homepage' in VAR_13.text, VAR_13.text
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def FUNC_26(self):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'n')
def FUNC_27(self):
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'other')
def FUNC_28(self):
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'no')
def FUNC_29(self):
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'yes')
def FUNC_30(self):
main(['template', 'foo', '--remove', 'query_html'])
def FUNC_31(self):
shutil.rmtree(os.path.join(self.root_dir, VAR_2, 'foo', 'templates'))
VAR_13 = self.testapp.get('/foo/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'pywb custom search page' not in VAR_13.text
def FUNC_32(self):
VAR_26 = sys.stdout
VAR_27 = StringIO()
sys.stdout = VAR_27
try:
main(['list'])
finally:
sys.stdout = VAR_26
VAR_28 = sorted(VAR_27.getvalue().splitlines())
assert len(VAR_28) == 4
assert 'Collections:' in VAR_28
assert '- foo' in VAR_28
assert '- nested' in VAR_28
assert '- test' in VAR_28
def FUNC_33(self):
VAR_29 = os.path.join(self.root_dir, '_migrate')
os.mkdir(VAR_29)
cdxindexer_main(['-u', '-o', VAR_29, self._get_sample_warc('')])
cdxindexer_main(['-u', '-9', '-o', VAR_29, self._get_sample_warc('example.warc.gz')])
VAR_30 = os.listdir(VAR_29)
assert all(x.endswith('.cdx') for x in VAR_30)
@patch('pywb.manager.manager.get_input', lambda x: 'blah')
def FUNC_40():
main(['cdx-convert', VAR_29])
FUNC_40()
assert os.listdir(VAR_29) == VAR_30
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def FUNC_41():
main(['cdx-convert', VAR_29])
FUNC_41()
VAR_31 = os.listdir(VAR_29)
assert len(VAR_30) == len(VAR_31)
assert all(x.endswith('.cdxj') for x in VAR_31)
with open(os.path.join(VAR_29, 'iana.cdxj'), 'rb') as fh:
VAR_46 = CDXObject(fh.readline())
assert VAR_46['urlkey'] == 'org,iana)/'
assert VAR_46['timestamp'] == '20140126200624'
assert VAR_46['url'] == 'http://www.iana.org/'
main(['cdx-convert', VAR_29])
def FUNC_34(self):
main(['init', 'auto'])
VAR_32 = os.path.join(self.root_dir, VAR_2, 'auto')
VAR_33 = os.path.join(VAR_32, VAR_0)
VAR_34 = os.path.join(VAR_33, 'sub')
os.makedirs(VAR_34)
def FUNC_42():
try:
time.sleep(1.0)
shutil.copy(self._get_sample_warc('example.warc.gz'), VAR_33)
shutil.copy(self._get_sample_warc('example-extra.warc'), VAR_34)
time.sleep(1.0)
finally:
VAR_35.interval = 0
VAR_35 = AutoIndexer(interval=0.25)
VAR_35.start()
VAR_36 = gevent.spawn(FUNC_42)
VAR_36.join()
VAR_37 = os.path.join(VAR_32, VAR_1, VAR_4)
assert os.path.isfile(VAR_37)
with open(VAR_37, 'r') as fh:
VAR_47 = fh.read()
assert '"example.warc.gz' in VAR_47, index
assert '"sub/example-extra.warc' in VAR_47, index
VAR_38 = os.path.getmtime(VAR_37)
VAR_35.interval = 0.25
VAR_35.start()
os.remove(VAR_37)
VAR_36 = gevent.spawn(FUNC_42)
VAR_36.join()
assert os.path.getmtime(VAR_37) > VAR_38
def FUNC_35(self):
with raises(KeyError):
main(['template', 'foo', '--remove', 'blah_html'])
with raises(IOError):
main(['template', '--remove', 'query_html'])
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
def FUNC_36(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
with raises(IOError):
main(['add', 'bar', VAR_12])
def FUNC_37(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
VAR_39 = os.path.join(self.root_dir, VAR_2, 'test', VAR_0, 'invalid.warc.gz')
with raises(SystemExit):
main(['index', 'test'])
with raises(IOError):
main(['index', 'test', VAR_12])
with raises(IOError):
main(['index', 'test', VAR_39])
def FUNC_38(self):
with raises(ValueError):
main(['init', '../abc%'])
with raises(ValueError):
main(['init', '45^23'])
def FUNC_39(self):
VAR_10 = os.path.join(self.root_dir, VAR_2)
shutil.rmtree(os.path.join(VAR_10, 'foo', 'static'))
VAR_40 = os.path.join(VAR_10, 'foo', VAR_0)
shutil.rmtree(VAR_40)
with raises(IOError):
main(['add', 'foo', 'somewarc'])
VAR_41 = os.path.join(VAR_10, 'foo', VAR_1)
shutil.rmtree(VAR_41)
with open(VAR_41, 'w+b') as fh:
fh.write(b'foo\n')
shutil.rmtree(VAR_10)
with raises(IOError):
main(['list'])
VAR_13 = self.testapp.get('/VAR_11/', status=404)
assert VAR_13.status_int == 404
| from .base_config_test import BaseConfigTest, CollsDirMixin, VAR_9
import os
import tempfile
import shutil
import sys
import webtest
import time
import .gevent
from six import StringIO
import webtest
from pytest import raises
from mock import patch
from pywb import .get_test_dir
from pywb.warcserver.test.testutils import BaseTestClass
from pywb.manager.autoindex import AutoIndexer
from pywb.manager.manager import main
from pywb.indexer.cdxindexer import main as cdxindexer_main
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.apps.frontendapp import FrontEndApp
VAR_0 = 'archive'
VAR_1 = 'indexes'
VAR_2 = '_test_colls'
VAR_3 = 'index.cdxj'
VAR_4 = 'autoindex.cdxj'
class CLASS_0(CollsDirMixin, BaseConfigTest):
@classmethod
def FUNC_0(VAR_5):
super(CLASS_0, VAR_5).setup_class('config_test.yaml')
def FUNC_1(self, VAR_6, VAR_7):
for dir_ in VAR_7:
assert os.path.isdir(os.path.join(VAR_6, dir_))
def FUNC_2(self, VAR_8):
return os.path.join(get_test_dir(), 'warcs', VAR_8)
@patch('pywb.apps.cli.BaseCli.run_gevent', lambda *args, **kwargs: None)
def FUNC_3(self):
from pywb.apps.cli import wayback
wayback(['-p', '0'])
with raises(SystemExit):
wayback(['-a', '-p', '0'])
VAR_10 = os.path.join(self.root_dir, VAR_2)
os.mkdir(VAR_10)
wayback(['-a', '-p', '0', '--auto-interval', '0'])
def FUNC_4(self):
main(['init', 'test'])
VAR_10 = os.path.join(self.root_dir, VAR_2)
assert os.path.isdir(VAR_10)
VAR_11 = os.path.join(VAR_10, 'test')
assert os.path.isdir(VAR_11)
self._check_dirs(VAR_11, [VAR_1, VAR_0, 'static', 'templates'])
def FUNC_5(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
main(['add', 'test', VAR_12])
def FUNC_6(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_7(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
main(['init', 'foo'])
main(['add', 'foo', VAR_12])
def FUNC_8(self, VAR_9):
VAR_13 = self.get('/foo/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_9(self):
VAR_12 = self._get_sample_warc('iana.warc.gz')
VAR_14 = self._get_sample_warc('example-extra.warc')
main(['add', 'test', VAR_12, VAR_14])
with open(os.path.join(self.root_dir, VAR_2, 'blah'), 'w+b') as fh:
fh.write(b'foo\n')
with raises(IOError):
main(['add', 'test', 'non-existent-file.warc.gz'])
def FUNC_10(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140126200624{0}/http://www.iana.org/', VAR_9)
assert VAR_13.status_int == 200
def FUNC_11(self):
main(['init', 'nested'])
VAR_15 = os.path.join(self.root_dir, VAR_2, 'nested', VAR_0)
VAR_16 = os.path.join(VAR_15, 'A')
VAR_17 = os.path.join(VAR_15, 'B', 'sub')
os.makedirs(VAR_16)
os.makedirs(VAR_17)
VAR_12 = self._get_sample_warc('iana.warc.gz')
VAR_14 = self._get_sample_warc('example.warc.gz')
shutil.copy2(VAR_12, VAR_16)
shutil.copy2(VAR_14, VAR_17)
main(['index',
'nested',
os.path.join(VAR_16, 'iana.warc.gz'),
os.path.join(VAR_17, 'example.warc.gz')
])
VAR_18 = os.path.join(self.root_dir, VAR_2, 'nested', VAR_1, VAR_3)
with open(VAR_18) as fh:
VAR_42 = fh.read()
assert '1043' in VAR_42
assert '333' in VAR_42
assert 'B/sub/example.warc.gz' in VAR_42
assert '2258' in VAR_42
assert '334' in VAR_42
assert 'A/iana.warc.gz' in VAR_42
def FUNC_12(self, VAR_9):
VAR_13 = self.get('/nested/20140126200624{0}/http://www.iana.org/', VAR_9)
assert VAR_13.status_int == 200
VAR_13 = self.get('/nested/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_13(self):
VAR_19 = os.path.join(self.root_dir, VAR_2, 'test', VAR_1)
VAR_20 = os.path.join(VAR_19, VAR_3)
VAR_21 = os.path.join(VAR_19, 'index.bak')
shutil.copy(VAR_20, VAR_21)
main(['reindex', 'test'])
with open(VAR_20) as orig_fh:
VAR_43 = orig_fh.read()
with open(VAR_21) as bak_fh:
VAR_44 = bak_fh.read()
assert len(VAR_44.splitlines()) == len(VAR_43.splitlines())
assert VAR_43 == VAR_44
def FUNC_14(self):
VAR_22 = os.path.join(self.root_dir, VAR_2, 'test', 'static', 'abc.js')
with open(VAR_22, 'w+b') as fh:
fh.write(b'/* Some JS File */')
VAR_13 = self.testapp.get('/static/_/VAR_11/abc.js')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'application/javascript'
VAR_13.charset = 'utf-8'
assert '/* Some JS File */' in VAR_13.text
def FUNC_15(self):
VAR_22 = os.path.join(self.root_dir, 'static', 'foo.css')
with open(VAR_22, 'w+b') as fh:
fh.write(b'/* Some CSS File */')
VAR_13 = self.testapp.get('/static/foo.css')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/css'
VAR_13.charset = 'utf-8'
assert '/* Some CSS File */' in VAR_13.text
def FUNC_16(self):
main(['metadata', 'foo', '--set', 'title=Collection Title'])
VAR_13 = self.testapp.get('/')
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
VAR_13.charset = 'utf-8'
assert '(Collection Title)' in VAR_13.text
VAR_13 = self.testapp.get('/')
VAR_13.charset = 'utf-8'
assert '(Collection Title)' in VAR_13.text
def FUNC_17(self):
main(['metadata', 'foo', '--set',
'desc=Some Description Text',
'other=custom value'])
with raises(ValueError):
main(['metadata', 'foo', '--set', 'name_only'])
VAR_13 = self.testapp.get('/foo/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'Collection Title' in VAR_13.text
assert 'desc' in VAR_13.text
assert 'Some Description Text' in VAR_13.text
assert 'other' in VAR_13.text
assert 'custom value' in VAR_13.text
def FUNC_18(self):
VAR_23 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'search.html')
with open(VAR_23, 'w+b') as fh:
fh.write(b'pywb custom search page')
VAR_13 = self.testapp.get('/VAR_11/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'pywb custom search page' in VAR_13.text
def FUNC_19(self):
VAR_24 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'banner.html')
with open(VAR_24, 'w+b') as fh:
fh.write(b'<div>Custom Banner Here!</div>')
fh.write(b'\n{{ metadata | tojson }}')
def FUNC_20(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321/http://example.com/?example=1', VAR_9)
assert '<div>Custom Banner Here!</div>' in VAR_13.text
def FUNC_21(self):
VAR_23 = os.path.join(self.root_dir, VAR_2, 'test',
'templates', 'search.html')
main(['metadata', 'test', '--set', 'some=value'])
with open(VAR_23, 'w+b') as fh:
fh.write(b'overriden search page: ')
fh.write(b'{{ metadata | tojson }}\n')
self.app.rewriterapp.jinja_env.jinja_env.cache = {}
VAR_13 = self.testapp.get('/VAR_11/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'overriden search page: ' in VAR_13.text
assert '{"some":"value"}' in VAR_13.text, VAR_13.text
def FUNC_22(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert '<div>Custom Banner Here!</div>' in VAR_13.text
assert '{"some":"value"}' in VAR_13.text, VAR_13.text
def FUNC_23(self, VAR_9):
VAR_13 = self.get('/VAR_11/20140103030321{0}/http://example.com/?example=1', VAR_9)
assert VAR_13.status_int == 200
def FUNC_24(self):
main(['template', 'foo', '--list'])
main(['template', 'foo', '--add', 'query_html'])
assert os.path.isfile(os.path.join(self.root_dir, VAR_2, 'foo', 'templates', 'query.html'))
main(['template', 'foo', '--add', 'query_html', '-f'])
def FUNC_25(self):
main(['template', '--add', 'home_html'])
VAR_25 = os.path.join(self.root_dir, 'templates', 'index.html')
assert os.path.isfile(VAR_25)
with open(VAR_25, 'r+b') as fh:
VAR_45 = fh.read()
VAR_45 = buf.replace(b'Pywb Wayback Machine', b'Custom Test Homepage')
fh.seek(0)
fh.write(VAR_45)
VAR_13 = self.testapp.get('/')
VAR_13.charset = 'utf-8'
assert VAR_13.content_type == 'text/html'
assert 'Custom Test Homepage' in VAR_13.text, VAR_13.text
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def FUNC_26(self):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'n')
def FUNC_27(self):
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'other')
def FUNC_28(self):
with raises(IOError):
main(['template', 'foo', '--add', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'no')
def FUNC_29(self):
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
@patch('pywb.manager.manager.get_input', lambda x: 'yes')
def FUNC_30(self):
main(['template', 'foo', '--remove', 'query_html'])
def FUNC_31(self):
shutil.rmtree(os.path.join(self.root_dir, VAR_2, 'foo', 'templates'))
VAR_13 = self.testapp.get('/foo/')
VAR_13.charset = 'utf-8'
assert VAR_13.status_int == 200
assert VAR_13.content_type == 'text/html'
assert 'pywb custom search page' not in VAR_13.text
def FUNC_32(self):
VAR_26 = sys.stdout
VAR_27 = StringIO()
sys.stdout = VAR_27
try:
main(['list'])
finally:
sys.stdout = VAR_26
VAR_28 = sorted(VAR_27.getvalue().splitlines())
assert len(VAR_28) == 4
assert 'Collections:' in VAR_28
assert '- foo' in VAR_28
assert '- nested' in VAR_28
assert '- test' in VAR_28
def FUNC_33(self):
VAR_29 = os.path.join(self.root_dir, '_migrate')
os.mkdir(VAR_29)
cdxindexer_main(['-u', '-o', VAR_29, self._get_sample_warc('')])
cdxindexer_main(['-u', '-9', '-o', VAR_29, self._get_sample_warc('example.warc.gz')])
VAR_30 = os.listdir(VAR_29)
assert all(x.endswith('.cdx') for x in VAR_30)
@patch('pywb.manager.manager.get_input', lambda x: 'blah')
def FUNC_40():
main(['cdx-convert', VAR_29])
FUNC_40()
assert os.listdir(VAR_29) == VAR_30
@patch('pywb.manager.manager.get_input', lambda x: 'y')
def FUNC_41():
main(['cdx-convert', VAR_29])
FUNC_41()
VAR_31 = os.listdir(VAR_29)
assert len(VAR_30) == len(VAR_31)
assert all(x.endswith('.cdxj') for x in VAR_31)
with open(os.path.join(VAR_29, 'iana.cdxj'), 'rb') as fh:
VAR_46 = CDXObject(fh.readline())
assert VAR_46['urlkey'] == 'org,iana)/'
assert VAR_46['timestamp'] == '20140126200624'
assert VAR_46['url'] == 'http://www.iana.org/'
main(['cdx-convert', VAR_29])
def FUNC_34(self):
main(['init', 'auto'])
VAR_32 = os.path.join(self.root_dir, VAR_2, 'auto')
VAR_33 = os.path.join(VAR_32, VAR_0)
VAR_34 = os.path.join(VAR_33, 'sub')
os.makedirs(VAR_34)
def FUNC_42():
try:
time.sleep(1.0)
shutil.copy(self._get_sample_warc('example.warc.gz'), VAR_33)
shutil.copy(self._get_sample_warc('example-extra.warc'), VAR_34)
time.sleep(1.0)
finally:
VAR_35.interval = 0
VAR_35 = AutoIndexer(interval=0.25)
VAR_35.start()
VAR_36 = gevent.spawn(FUNC_42)
VAR_36.join()
VAR_37 = os.path.join(VAR_32, VAR_1, VAR_4)
assert os.path.isfile(VAR_37)
with open(VAR_37, 'r') as fh:
VAR_47 = fh.read()
assert '"example.warc.gz' in VAR_47, index
assert '"sub/example-extra.warc' in VAR_47, index
VAR_38 = os.path.getmtime(VAR_37)
VAR_35.interval = 0.25
VAR_35.start()
os.remove(VAR_37)
VAR_36 = gevent.spawn(FUNC_42)
VAR_36.join()
assert os.path.getmtime(VAR_37) > VAR_38
def FUNC_35(self):
with raises(KeyError):
main(['template', 'foo', '--remove', 'blah_html'])
with raises(IOError):
main(['template', '--remove', 'query_html'])
with raises(IOError):
main(['template', 'foo', '--remove', 'query_html'])
def FUNC_36(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
with raises(IOError):
main(['add', 'bar', VAR_12])
def FUNC_37(self):
VAR_12 = self._get_sample_warc('example.warc.gz')
VAR_39 = os.path.join(self.root_dir, VAR_2, 'test', VAR_0, 'invalid.warc.gz')
with raises(SystemExit):
main(['index', 'test'])
with raises(IOError):
main(['index', 'test', VAR_12])
with raises(IOError):
main(['index', 'test', VAR_39])
def FUNC_38(self):
with raises(ValueError):
main(['init', '../abc%'])
with raises(ValueError):
main(['init', '45^23'])
def FUNC_39(self):
VAR_10 = os.path.join(self.root_dir, VAR_2)
shutil.rmtree(os.path.join(VAR_10, 'foo', 'static'))
VAR_40 = os.path.join(VAR_10, 'foo', VAR_0)
shutil.rmtree(VAR_40)
with raises(IOError):
main(['add', 'foo', 'somewarc'])
VAR_41 = os.path.join(VAR_10, 'foo', VAR_1)
shutil.rmtree(VAR_41)
with open(VAR_41, 'w+b') as fh:
fh.write(b'foo\n')
shutil.rmtree(VAR_10)
with raises(IOError):
main(['list'])
VAR_13 = self.testapp.get('/VAR_11/', status=404)
assert VAR_13.status_int == 404
| [
2,
7,
9,
12,
14,
18,
21,
24,
27,
29,
30,
31,
35,
38,
39,
40,
45,
49,
52,
59,
61,
62,
65,
68,
70,
75,
78,
81,
83,
88,
90,
94,
99,
101,
103,
107,
113,
115,
116,
119,
122,
124,
127,
133,
135,
139,
142,
145,
148,
154,
158,
162,
166,
170,
173,
178,
182,
184,
186,
189,
192,
195,
200,
203,
209,
214,
217,
223,
229,
235,
236,
240,
245,
248,
253,
255,
258,
261,
267,
270,
276,
280,
283,
287,
291,
300,
301,
303,
307,
308,
310,
318,
325,
329,
334,
336,
337,
340,
341,
343,
345,
347,
350,
356,
361,
367,
368,
375,
382,
387,
390,
393,
395,
400,
406,
413,
418,
425,
430,
432,
434,
435,
437,
440,
444,
447,
451,
454,
457,
463,
464,
465,
467,
472,
475,
484,
487,
490,
493,
496,
499,
501,
502,
505,
507,
508,
509,
510,
512,
513,
514,
515,
517,
518,
520,
526,
529,
530,
533,
534,
537,
542,
545,
549,
550,
553,
554,
557,
558,
561,
567,
570,
576,
577,
579,
580,
583,
586,
587,
590,
591,
594,
596,
597,
600,
601,
604,
605,
55,
56,
57,
72,
73,
85,
86,
96,
97,
109,
110,
129,
130,
131,
132,
175,
176,
177,
197,
198,
211,
212,
225,
226,
227,
263,
264,
278,
279,
293,
294,
295,
296,
297,
320,
321,
331,
332,
333,
364,
365,
371,
372,
378,
379,
385,
386,
397,
398,
408,
409,
427,
428,
522,
523,
524,
525,
539,
540,
563,
564,
572,
573,
574
] | [
2,
7,
9,
12,
14,
18,
21,
24,
27,
29,
30,
31,
35,
38,
39,
40,
45,
49,
52,
59,
61,
62,
65,
68,
70,
75,
78,
81,
83,
88,
90,
94,
99,
101,
103,
107,
113,
115,
116,
119,
122,
124,
127,
133,
135,
139,
142,
145,
148,
154,
158,
162,
166,
170,
173,
178,
182,
184,
186,
189,
192,
195,
200,
203,
209,
214,
217,
223,
229,
235,
236,
240,
245,
248,
253,
255,
258,
261,
267,
270,
276,
280,
283,
287,
291,
300,
301,
303,
307,
308,
310,
316,
318,
324,
326,
330,
335,
337,
338,
341,
342,
344,
346,
348,
351,
357,
362,
368,
369,
376,
383,
388,
391,
394,
396,
401,
407,
414,
419,
426,
431,
433,
435,
436,
438,
441,
445,
448,
452,
455,
458,
464,
465,
466,
468,
473,
476,
485,
488,
491,
494,
497,
500,
502,
503,
506,
508,
509,
510,
511,
513,
514,
515,
516,
518,
519,
521,
527,
530,
531,
534,
535,
538,
543,
546,
550,
551,
554,
555,
558,
559,
562,
568,
571,
577,
578,
580,
581,
584,
587,
588,
591,
592,
595,
597,
598,
601,
602,
605,
606,
55,
56,
57,
72,
73,
85,
86,
96,
97,
109,
110,
129,
130,
131,
132,
175,
176,
177,
197,
198,
211,
212,
225,
226,
227,
263,
264,
278,
279,
293,
294,
295,
296,
297,
320,
321,
332,
333,
334,
365,
366,
372,
373,
379,
380,
386,
387,
398,
399,
409,
410,
428,
429,
523,
524,
525,
526,
540,
541,
564,
565,
573,
574,
575
] |
1CWE-79
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
try:
from urllib import parse as urllib_parse
except ImportError: # Python 2
import urllib as urllib_parse
import urlparse
urllib_parse.urlparse = urlparse.urlparse
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils import six
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(urllib_parse.quote(force_str(url), force_str(safe)))
urlquote = allow_lazy(urlquote, six.text_type)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(urllib_parse.quote_plus(force_str(url), force_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(urllib_parse.unquote(force_str(quoted_url)))
urlunquote = allow_lazy(urlunquote, six.text_type)
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(urllib_parse.unquote_plus(force_str(quoted_url)))
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return urllib_parse.urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if not six.PY3 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
factor = 0
if i < 0:
raise ValueError("Negative base36 conversion input.")
if not six.PY3:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
# Find starting factor
while True:
factor += 1
if i < 36 ** factor:
factor -= 1
break
base36 = []
# Construct base36 representation
while factor >= 0:
j = 36 ** factor
base36.append(digits[i // j])
i = i % j
factor -= 1
return ''.join(base36)
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = s.encode('utf-8') # base64encode should only return ASCII.
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urllib_parse.urlparse(url1), urllib_parse.urlparse(url2)
try:
return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port)
except ValueError:
return False
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host).
Always returns ``False`` on an empty url.
"""
if not url:
return False
netloc = urllib_parse.urlparse(url)[1]
return not netloc or netloc == host
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
try:
from urllib import parse as urllib_parse
except ImportError: # Python 2
import urllib as urllib_parse
import urlparse
urllib_parse.urlparse = urlparse.urlparse
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils import six
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(urllib_parse.quote(force_str(url), force_str(safe)))
urlquote = allow_lazy(urlquote, six.text_type)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(urllib_parse.quote_plus(force_str(url), force_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(urllib_parse.unquote(force_str(quoted_url)))
urlunquote = allow_lazy(urlunquote, six.text_type)
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(urllib_parse.unquote_plus(force_str(quoted_url)))
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return urllib_parse.urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if not six.PY3 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
factor = 0
if i < 0:
raise ValueError("Negative base36 conversion input.")
if not six.PY3:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
# Find starting factor
while True:
factor += 1
if i < 36 ** factor:
factor -= 1
break
base36 = []
# Construct base36 representation
while factor >= 0:
j = 36 ** factor
base36.append(digits[i // j])
i = i % j
factor -= 1
return ''.join(base36)
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = s.encode('utf-8') # base64encode should only return ASCII.
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urllib_parse.urlparse(url1), urllib_parse.urlparse(url2)
try:
return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port)
except ValueError:
return False
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if not url:
return False
url_info = urllib_parse.urlparse(url)
return (not url_info.netloc or url_info.netloc == host) and \
(not url_info.scheme or url_info.scheme in ['http', 'https'])
| xss | {
"code": [
" netloc = urllib_parse.urlparse(url)[1]",
" return not netloc or netloc == host"
],
"line_no": [
261,
262
]
} | {
"code": [
" url_info = urllib_parse.urlparse(url)",
" return (not url_info.netloc or url_info.netloc == host) and \\"
],
"line_no": [
261,
262
]
} | from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
try:
from urllib import parse as urllib_parse
except ImportError: # Python 2
import .urllib as urllib_parse
import .urlparse
urllib_parse.urlparse = urlparse.urlparse
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils import .six
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_41>\d{2})'
VAR_3 = r'(?P<VAR_41>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_39>\d{4})'
VAR_6 = r'(?P<VAR_39>\d{2})'
VAR_7 = r'(?P<VAR_42>\d{2}):(?P<VAR_43>\d{2}):(?P<VAR_44>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_22 %VAR_22 %VAR_22 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_22-%VAR_22-%VAR_22 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_22 %VAR_22 %VAR_22 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
def VAR_13(VAR_11, VAR_12='/'):
return force_text(urllib_parse.quote(force_str(VAR_11), force_str(VAR_12)))
VAR_13 = allow_lazy(VAR_13, six.text_type)
def VAR_14(VAR_11, VAR_12=''):
return force_text(urllib_parse.quote_plus(force_str(VAR_11), force_str(VAR_12)))
VAR_14 = allow_lazy(VAR_14, six.text_type)
def VAR_16(VAR_15):
return force_text(urllib_parse.unquote(force_str(VAR_15)))
VAR_16 = allow_lazy(VAR_16, six.text_type)
def VAR_17(VAR_15):
return force_text(urllib_parse.unquote_plus(force_str(VAR_15)))
VAR_17 = allow_lazy(VAR_17, six.text_type)
def FUNC_4(VAR_18, VAR_19=0):
if isinstance(VAR_18, MultiValueDict):
VAR_18 = query.lists()
elif hasattr(VAR_18, 'items'):
VAR_18 = query.items()
return urllib_parse.urlencode(
[(force_str(k),
[force_str(VAR_23) for VAR_23 in v] if isinstance(v, (list,tuple)) else force_str(v))
for k, v in VAR_18],
VAR_19)
def FUNC_5(VAR_20=None):
VAR_29 = formatdate(VAR_20)
return '%VAR_22-%s-%VAR_22 GMT' % (VAR_29[:7], VAR_29[8:11], VAR_29[12:25])
def FUNC_6(VAR_20=None):
return formatdate(VAR_20, usegmt=True)
def FUNC_7(VAR_21):
for regex in VAR_8, VAR_9, VAR_10:
VAR_38 = regex.match(VAR_21)
if VAR_38 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_21 format" % VAR_21)
try:
VAR_39 = int(VAR_38.group('year'))
if VAR_39 < 100:
if VAR_39 < 70:
VAR_39 += 2000
else:
VAR_39 += 1900
VAR_40 = VAR_1.index(VAR_38.group('mon').lower()) + 1
VAR_41 = int(VAR_38.group('day'))
VAR_42 = int(VAR_38.group('hour'))
VAR_43 = int(VAR_38.group('min'))
VAR_44 = int(VAR_38.group('sec'))
VAR_45 = datetime.datetime(VAR_39, VAR_40, VAR_41, VAR_42, VAR_43, VAR_44)
return calendar.timegm(VAR_45.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_21), sys.exc_info()[2])
def FUNC_8(VAR_21):
try:
return FUNC_7(VAR_21)
except Exception:
pass
def FUNC_9(VAR_22):
if len(VAR_22) > 13:
raise ValueError("Base36 input too large")
VAR_30 = int(VAR_22, 36)
if not six.PY3 and VAR_30 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_30
def FUNC_10(VAR_23):
VAR_31 = "0123456789abcdefghijklmnopqrstuvwxyz"
VAR_32 = 0
if VAR_23 < 0:
raise ValueError("Negative VAR_33 conversion input.")
if not six.PY3:
if not isinstance(VAR_23, six.integer_types):
raise TypeError("Non-integer VAR_33 conversion input.")
if VAR_23 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
while True:
VAR_32 += 1
if VAR_23 < 36 ** VAR_32:
factor -= 1
break
VAR_33 = []
while VAR_32 >= 0:
VAR_46 = 36 ** VAR_32
VAR_33.append(VAR_31[VAR_23 // VAR_46])
VAR_23 = VAR_23 % VAR_46
VAR_32 -= 1
return ''.join(VAR_33)
def FUNC_11(VAR_22):
return base64.urlsafe_b64encode(VAR_22).rstrip(b'\n=')
def FUNC_12(VAR_22):
VAR_22 = s.encode('utf-8') # base64encode should only return ASCII.
try:
return base64.urlsafe_b64decode(VAR_22.ljust(len(VAR_22) + len(VAR_22) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_24):
VAR_34 = VAR_0.findall(VAR_24)
if not VAR_34:
return [VAR_24]
VAR_34 = [e.encode('ascii').decode('unicode_escape') for e in VAR_34]
return VAR_34
def FUNC_14(VAR_25):
return '"%s"' % VAR_25.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_26, VAR_27):
VAR_35, VAR_36 = urllib_parse.urlparse(VAR_26), urllib_parse.urlparse(VAR_27)
try:
return (VAR_35.scheme, VAR_35.hostname, VAR_35.port) == (VAR_36.scheme, VAR_36.hostname, VAR_36.port)
except ValueError:
return False
def FUNC_16(VAR_11, VAR_28=None):
if not VAR_11:
return False
VAR_37 = urllib_parse.urlparse(VAR_11)[1]
return not VAR_37 or VAR_37 == VAR_28
| from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
try:
from urllib import parse as urllib_parse
except ImportError: # Python 2
import .urllib as urllib_parse
import .urlparse
urllib_parse.urlparse = urlparse.urlparse
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils import .six
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_41>\d{2})'
VAR_3 = r'(?P<VAR_41>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_39>\d{4})'
VAR_6 = r'(?P<VAR_39>\d{2})'
VAR_7 = r'(?P<VAR_42>\d{2}):(?P<VAR_43>\d{2}):(?P<VAR_44>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_22 %VAR_22 %VAR_22 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_22-%VAR_22-%VAR_22 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_22 %VAR_22 %VAR_22 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
def VAR_13(VAR_11, VAR_12='/'):
return force_text(urllib_parse.quote(force_str(VAR_11), force_str(VAR_12)))
VAR_13 = allow_lazy(VAR_13, six.text_type)
def VAR_14(VAR_11, VAR_12=''):
return force_text(urllib_parse.quote_plus(force_str(VAR_11), force_str(VAR_12)))
VAR_14 = allow_lazy(VAR_14, six.text_type)
def VAR_16(VAR_15):
return force_text(urllib_parse.unquote(force_str(VAR_15)))
VAR_16 = allow_lazy(VAR_16, six.text_type)
def VAR_17(VAR_15):
return force_text(urllib_parse.unquote_plus(force_str(VAR_15)))
VAR_17 = allow_lazy(VAR_17, six.text_type)
def FUNC_4(VAR_18, VAR_19=0):
if isinstance(VAR_18, MultiValueDict):
VAR_18 = query.lists()
elif hasattr(VAR_18, 'items'):
VAR_18 = query.items()
return urllib_parse.urlencode(
[(force_str(k),
[force_str(VAR_23) for VAR_23 in v] if isinstance(v, (list,tuple)) else force_str(v))
for k, v in VAR_18],
VAR_19)
def FUNC_5(VAR_20=None):
VAR_29 = formatdate(VAR_20)
return '%VAR_22-%s-%VAR_22 GMT' % (VAR_29[:7], VAR_29[8:11], VAR_29[12:25])
def FUNC_6(VAR_20=None):
return formatdate(VAR_20, usegmt=True)
def FUNC_7(VAR_21):
for regex in VAR_8, VAR_9, VAR_10:
VAR_38 = regex.match(VAR_21)
if VAR_38 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_21 format" % VAR_21)
try:
VAR_39 = int(VAR_38.group('year'))
if VAR_39 < 100:
if VAR_39 < 70:
VAR_39 += 2000
else:
VAR_39 += 1900
VAR_40 = VAR_1.index(VAR_38.group('mon').lower()) + 1
VAR_41 = int(VAR_38.group('day'))
VAR_42 = int(VAR_38.group('hour'))
VAR_43 = int(VAR_38.group('min'))
VAR_44 = int(VAR_38.group('sec'))
VAR_45 = datetime.datetime(VAR_39, VAR_40, VAR_41, VAR_42, VAR_43, VAR_44)
return calendar.timegm(VAR_45.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_21), sys.exc_info()[2])
def FUNC_8(VAR_21):
try:
return FUNC_7(VAR_21)
except Exception:
pass
def FUNC_9(VAR_22):
if len(VAR_22) > 13:
raise ValueError("Base36 input too large")
VAR_30 = int(VAR_22, 36)
if not six.PY3 and VAR_30 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_30
def FUNC_10(VAR_23):
VAR_31 = "0123456789abcdefghijklmnopqrstuvwxyz"
VAR_32 = 0
if VAR_23 < 0:
raise ValueError("Negative VAR_33 conversion input.")
if not six.PY3:
if not isinstance(VAR_23, six.integer_types):
raise TypeError("Non-integer VAR_33 conversion input.")
if VAR_23 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
while True:
VAR_32 += 1
if VAR_23 < 36 ** VAR_32:
factor -= 1
break
VAR_33 = []
while VAR_32 >= 0:
VAR_46 = 36 ** VAR_32
VAR_33.append(VAR_31[VAR_23 // VAR_46])
VAR_23 = VAR_23 % VAR_46
VAR_32 -= 1
return ''.join(VAR_33)
def FUNC_11(VAR_22):
return base64.urlsafe_b64encode(VAR_22).rstrip(b'\n=')
def FUNC_12(VAR_22):
VAR_22 = s.encode('utf-8') # base64encode should only return ASCII.
try:
return base64.urlsafe_b64decode(VAR_22.ljust(len(VAR_22) + len(VAR_22) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_24):
VAR_34 = VAR_0.findall(VAR_24)
if not VAR_34:
return [VAR_24]
VAR_34 = [e.encode('ascii').decode('unicode_escape') for e in VAR_34]
return VAR_34
def FUNC_14(VAR_25):
return '"%s"' % VAR_25.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_26, VAR_27):
VAR_35, VAR_36 = urllib_parse.urlparse(VAR_26), urllib_parse.urlparse(VAR_27)
try:
return (VAR_35.scheme, VAR_35.hostname, VAR_35.port) == (VAR_36.scheme, VAR_36.hostname, VAR_36.port)
except ValueError:
return False
def FUNC_16(VAR_11, VAR_28=None):
if not VAR_11:
return False
VAR_37 = urllib_parse.urlparse(VAR_11)
return (not VAR_37.netloc or VAR_37.netloc == VAR_28) and \
(not VAR_37.scheme or VAR_37.scheme in ['http', 'https'])
| [
2,
14,
17,
22,
24,
35,
45,
55,
63,
71,
87,
91,
95,
100,
105,
109,
113,
117,
120,
123,
124,
125,
148,
157,
158,
159,
165,
166,
167,
171,
172,
176,
190,
197,
204,
211,
222,
231,
235,
241,
251,
256,
263,
37,
38,
39,
40,
41,
42,
47,
48,
49,
50,
51,
52,
57,
58,
59,
60,
65,
66,
67,
68,
73,
74,
75,
76,
77,
89,
90,
91,
92,
93,
94,
95,
96,
97,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
115,
116,
117,
118,
119,
120,
121,
122,
150,
151,
152,
161,
162,
163,
164,
178,
179,
180,
206,
207,
208,
209,
213,
214,
215,
216,
224,
225,
226,
227,
228,
237,
238,
239,
243,
244,
245,
253,
254,
255,
256,
257,
258
] | [
2,
14,
17,
22,
24,
35,
45,
55,
63,
71,
87,
91,
95,
100,
105,
109,
113,
117,
120,
123,
124,
125,
148,
157,
158,
159,
165,
166,
167,
171,
172,
176,
190,
197,
204,
211,
222,
231,
235,
241,
251,
256,
264,
37,
38,
39,
40,
41,
42,
47,
48,
49,
50,
51,
52,
57,
58,
59,
60,
65,
66,
67,
68,
73,
74,
75,
76,
77,
89,
90,
91,
92,
93,
94,
95,
96,
97,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
115,
116,
117,
118,
119,
120,
121,
122,
150,
151,
152,
161,
162,
163,
164,
178,
179,
180,
206,
207,
208,
209,
213,
214,
215,
216,
224,
225,
226,
227,
228,
237,
238,
239,
243,
244,
245,
253,
254,
255,
256,
257,
258
] |
1CWE-79
| from django.core.exceptions import ValidationError
from shuup.utils.django_compat import force_text
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
class Problem(Exception):
""" User-visible exception. """
message = property(lambda self: self.args[0] if self.args else None)
def __init__(self, message, title=None):
super(Problem, self).__init__(message)
self.title = title
self.links = []
def with_link(self, url, title):
"""
Append a link to this Problem and return itself.
This API is designed after `Exception.with_traceback()`,
so you can fluently chain this in a `raise` statement::
raise Problem("Oops").with_link("...", "...")
:param url: URL string.
:type url: str
:param title: Title text.
:type title: str
:return: This same Problem.
:rtype: shuup.utils.excs.Problem
"""
self.links.append({"url": url, "title": title})
return self
class ExceptionalResponse(Exception):
def __init__(self, response):
self.response = response
super(ExceptionalResponse, self).__init__(force_text(response))
def extract_messages(obj_list):
"""
Extract "messages" from a list of exceptions or other objects.
For ValidationErrors, `messages` are flattened into the output.
For Exceptions, `args[0]` is added into the output.
For other objects, `force_text` is called.
:param obj_list: List of exceptions etc.
:type obj_list: Iterable[object]
:rtype: Iterable[str]
"""
for obj in obj_list:
if isinstance(obj, ValidationError):
for msg in obj.messages:
yield force_text(msg)
continue
if isinstance(obj, Exception):
if len(obj.args):
yield force_text(obj.args[0])
continue
yield force_text(obj)
| from django.core.exceptions import ValidationError
from django.utils.html import escape
from shuup.utils.django_compat import force_text
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
class Problem(Exception):
""" User-visible exception. """
message = property(lambda self: self.args[0] if self.args else None)
def __init__(self, message, title=None):
super(Problem, self).__init__(message)
self.title = title
self.links = []
def with_link(self, url, title):
"""
Append a link to this Problem and return itself.
This API is designed after `Exception.with_traceback()`,
so you can fluently chain this in a `raise` statement::
raise Problem("Oops").with_link("...", "...")
:param url: URL string.
:type url: str
:param title: Title text.
:type title: str
:return: This same Problem.
:rtype: shuup.utils.excs.Problem
"""
self.links.append({"url": url, "title": title})
return self
class ExceptionalResponse(Exception):
def __init__(self, response):
self.response = response
super(ExceptionalResponse, self).__init__(force_text(response))
def extract_messages(obj_list):
"""
Extract "messages" from a list of exceptions or other objects.
For ValidationErrors, `messages` are flattened into the output.
For Exceptions, `args[0]` is added into the output.
For other objects, `force_text` is called.
:param obj_list: List of exceptions etc.
:type obj_list: Iterable[object]
:rtype: Iterable[str]
"""
for obj in obj_list:
if isinstance(obj, ValidationError):
for msg in obj.messages:
yield escape(force_text(msg))
continue
if isinstance(obj, Exception):
if len(obj.args):
yield escape(force_text(obj.args[0]))
continue
yield escape(force_text(obj))
| xss | {
"code": [
" yield force_text(msg)",
" yield force_text(obj.args[0])",
" yield force_text(obj)"
],
"line_no": [
64,
68,
70
]
} | {
"code": [
" yield escape(force_text(msg))",
" yield escape(force_text(obj.args[0]))"
],
"line_no": [
65,
69
]
} | from django.core.exceptions import ValidationError
from shuup.utils.django_compat import force_text
class CLASS_0(Exception):
VAR_1 = property(lambda self: self.args[0] if self.args else None)
def __init__(self, VAR_1, VAR_2=None):
super(CLASS_0, self).__init__(VAR_1)
self.title = VAR_2
self.links = []
def FUNC_1(self, VAR_3, VAR_2):
self.links.append({"url": VAR_3, "title": VAR_2})
return self
class CLASS_1(Exception):
def __init__(self, VAR_4):
self.response = VAR_4
super(CLASS_1, self).__init__(force_text(VAR_4))
def FUNC_0(VAR_0):
for obj in VAR_0:
if isinstance(obj, ValidationError):
for msg in obj.messages:
yield force_text(msg)
continue
if isinstance(obj, Exception):
if len(obj.args):
yield force_text(obj.args[0])
continue
yield force_text(obj)
| from django.core.exceptions import ValidationError
from django.utils.html import escape
from shuup.utils.django_compat import force_text
class CLASS_0(Exception):
VAR_1 = property(lambda self: self.args[0] if self.args else None)
def __init__(self, VAR_1, VAR_2=None):
super(CLASS_0, self).__init__(VAR_1)
self.title = VAR_2
self.links = []
def FUNC_1(self, VAR_3, VAR_2):
self.links.append({"url": VAR_3, "title": VAR_2})
return self
class CLASS_1(Exception):
def __init__(self, VAR_4):
self.response = VAR_4
super(CLASS_1, self).__init__(force_text(VAR_4))
def FUNC_0(VAR_0):
for obj in VAR_0:
if isinstance(obj, ValidationError):
for msg in obj.messages:
yield escape(force_text(msg))
continue
if isinstance(obj, Exception):
if len(obj.args):
yield escape(force_text(obj.args[0]))
continue
yield escape(force_text(obj))
| [
2,
4,
5,
6,
7,
8,
9,
10,
11,
12,
15,
17,
22,
26,
29,
31,
41,
42,
47,
48,
52,
56,
71,
14,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38
] | [
3,
5,
6,
7,
8,
9,
10,
11,
12,
13,
16,
18,
23,
27,
30,
32,
42,
43,
48,
49,
53,
57,
72,
15,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from mock import ANY, Mock, call
from twisted.internet import defer
from synapse.api.errors import AuthError
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import override_config
from tests.utils import register_federation_servlets
# Some local users to test with
U_APPLE = UserID.from_string("@apple:test")
U_BANANA = UserID.from_string("@banana:test")
# Remote user
U_ONION = UserID.from_string("@onion:farm")
# Test room id
ROOM_ID = "a-room"
def _expect_edu_transaction(edu_type, content, origin="test"):
return {
"origin": origin,
"origin_server_ts": 1000000,
"pdus": [],
"edus": [{"edu_type": edu_type, "content": content}],
}
def _make_edu_transaction_json(edu_type, content):
return json.dumps(_expect_edu_transaction(edu_type, content)).encode("utf8")
class TypingNotificationsTestCase(unittest.HomeserverTestCase):
servlets = [register_federation_servlets]
def make_homeserver(self, reactor, clock):
# we mock out the keyring so as to skip the authentication check on the
# federation API call.
mock_keyring = Mock(spec=["verify_json_for_server"])
mock_keyring.verify_json_for_server.return_value = defer.succeed(True)
# we mock out the federation client too
mock_federation_client = Mock(spec=["put_json"])
mock_federation_client.put_json.return_value = defer.succeed((200, "OK"))
# the tests assume that we are starting at unix time 1000
reactor.pump((1000,))
hs = self.setup_test_homeserver(
notifier=Mock(),
http_client=mock_federation_client,
keyring=mock_keyring,
replication_streams={},
)
return hs
def prepare(self, reactor, clock, hs):
mock_notifier = hs.get_notifier()
self.on_new_event = mock_notifier.on_new_event
self.handler = hs.get_typing_handler()
self.event_source = hs.get_event_sources().sources["typing"]
self.datastore = hs.get_datastore()
retry_timings_res = {
"destination": "",
"retry_last_ts": 0,
"retry_interval": 0,
"failure_ts": None,
}
self.datastore.get_destination_retry_timings = Mock(
return_value=defer.succeed(retry_timings_res)
)
self.datastore.get_device_updates_by_remote = Mock(
return_value=make_awaitable((0, []))
)
self.datastore.get_destination_last_successful_stream_ordering = Mock(
return_value=make_awaitable(None)
)
def get_received_txn_response(*args):
return defer.succeed(None)
self.datastore.get_received_txn_response = get_received_txn_response
self.room_members = []
async def check_user_in_room(room_id, user_id):
if user_id not in [u.to_string() for u in self.room_members]:
raise AuthError(401, "User is not in the room")
return None
hs.get_auth().check_user_in_room = check_user_in_room
def get_joined_hosts_for_room(room_id):
return {member.domain for member in self.room_members}
self.datastore.get_joined_hosts_for_room = get_joined_hosts_for_room
async def get_users_in_room(room_id):
return {str(u) for u in self.room_members}
self.datastore.get_users_in_room = get_users_in_room
self.datastore.get_user_directory_stream_pos = Mock(
side_effect=(
# we deliberately return a non-None stream pos to avoid doing an initial_spam
lambda: make_awaitable(1)
)
)
self.datastore.get_current_state_deltas = Mock(return_value=(0, None))
self.datastore.get_to_device_stream_token = lambda: 0
self.datastore.get_new_device_msgs_for_remote = lambda *args, **kargs: make_awaitable(
([], 0)
)
self.datastore.delete_device_msgs_for_remote = lambda *args, **kargs: make_awaitable(
None
)
self.datastore.set_received_txn_response = lambda *args, **kwargs: make_awaitable(
None
)
def test_started_typing_local(self):
self.room_members = [U_APPLE, U_BANANA]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=20000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
@override_config({"send_federation": True})
def test_started_typing_remote_send(self):
self.room_members = [U_APPLE, U_ONION]
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=20000,
)
)
put_json = self.hs.get_http_client().put_json
put_json.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_APPLE.to_string(),
"typing": True,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
def test_started_typing_remote_recv(self):
self.room_members = [U_APPLE, U_ONION]
self.assertEquals(self.event_source.get_current_key(), 0)
(request, channel) = self.make_request(
"PUT",
"/_matrix/federation/v1/send/1000000",
_make_edu_transaction_json(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_ONION.to_string(),
"typing": True,
},
),
federation_auth_origin=b"farm",
)
self.assertEqual(channel.code, 200)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_ONION.to_string()]},
}
],
)
@override_config({"send_federation": True})
def test_stopped_typing(self):
self.room_members = [U_APPLE, U_BANANA, U_ONION]
# Gut-wrenching
from synapse.handlers.typing import RoomMember
member = RoomMember(ROOM_ID, U_APPLE.to_string())
self.handler._member_typing_until[member] = 1002000
self.handler._room_typing[ROOM_ID] = {U_APPLE.to_string()}
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.stopped_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
put_json = self.hs.get_http_client().put_json
put_json.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_APPLE.to_string(),
"typing": False,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}],
)
def test_typing_timeout(self):
self.room_members = [U_APPLE, U_BANANA]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
self.reactor.pump([16])
self.on_new_event.assert_has_calls([call("typing_key", 2, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 2)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=1)
)
self.assertEquals(
events[0],
[{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}],
)
# SYN-230 - see if we can still set after timeout
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 3, rooms=[ROOM_ID])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from mock import ANY, Mock, call
from twisted.internet import defer
from synapse.api.errors import AuthError
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import override_config
from tests.utils import register_federation_servlets
# Some local users to test with
U_APPLE = UserID.from_string("@apple:test")
U_BANANA = UserID.from_string("@banana:test")
# Remote user
U_ONION = UserID.from_string("@onion:farm")
# Test room id
ROOM_ID = "a-room"
def _expect_edu_transaction(edu_type, content, origin="test"):
return {
"origin": origin,
"origin_server_ts": 1000000,
"pdus": [],
"edus": [{"edu_type": edu_type, "content": content}],
}
def _make_edu_transaction_json(edu_type, content):
return json.dumps(_expect_edu_transaction(edu_type, content)).encode("utf8")
class TypingNotificationsTestCase(unittest.HomeserverTestCase):
servlets = [register_federation_servlets]
def make_homeserver(self, reactor, clock):
# we mock out the keyring so as to skip the authentication check on the
# federation API call.
mock_keyring = Mock(spec=["verify_json_for_server"])
mock_keyring.verify_json_for_server.return_value = defer.succeed(True)
# we mock out the federation client too
mock_federation_client = Mock(spec=["put_json"])
mock_federation_client.put_json.return_value = defer.succeed((200, "OK"))
# the tests assume that we are starting at unix time 1000
reactor.pump((1000,))
hs = self.setup_test_homeserver(
notifier=Mock(),
federation_http_client=mock_federation_client,
keyring=mock_keyring,
replication_streams={},
)
return hs
def prepare(self, reactor, clock, hs):
mock_notifier = hs.get_notifier()
self.on_new_event = mock_notifier.on_new_event
self.handler = hs.get_typing_handler()
self.event_source = hs.get_event_sources().sources["typing"]
self.datastore = hs.get_datastore()
retry_timings_res = {
"destination": "",
"retry_last_ts": 0,
"retry_interval": 0,
"failure_ts": None,
}
self.datastore.get_destination_retry_timings = Mock(
return_value=defer.succeed(retry_timings_res)
)
self.datastore.get_device_updates_by_remote = Mock(
return_value=make_awaitable((0, []))
)
self.datastore.get_destination_last_successful_stream_ordering = Mock(
return_value=make_awaitable(None)
)
def get_received_txn_response(*args):
return defer.succeed(None)
self.datastore.get_received_txn_response = get_received_txn_response
self.room_members = []
async def check_user_in_room(room_id, user_id):
if user_id not in [u.to_string() for u in self.room_members]:
raise AuthError(401, "User is not in the room")
return None
hs.get_auth().check_user_in_room = check_user_in_room
def get_joined_hosts_for_room(room_id):
return {member.domain for member in self.room_members}
self.datastore.get_joined_hosts_for_room = get_joined_hosts_for_room
async def get_users_in_room(room_id):
return {str(u) for u in self.room_members}
self.datastore.get_users_in_room = get_users_in_room
self.datastore.get_user_directory_stream_pos = Mock(
side_effect=(
# we deliberately return a non-None stream pos to avoid doing an initial_spam
lambda: make_awaitable(1)
)
)
self.datastore.get_current_state_deltas = Mock(return_value=(0, None))
self.datastore.get_to_device_stream_token = lambda: 0
self.datastore.get_new_device_msgs_for_remote = lambda *args, **kargs: make_awaitable(
([], 0)
)
self.datastore.delete_device_msgs_for_remote = lambda *args, **kargs: make_awaitable(
None
)
self.datastore.set_received_txn_response = lambda *args, **kwargs: make_awaitable(
None
)
def test_started_typing_local(self):
self.room_members = [U_APPLE, U_BANANA]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=20000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
@override_config({"send_federation": True})
def test_started_typing_remote_send(self):
self.room_members = [U_APPLE, U_ONION]
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=20000,
)
)
put_json = self.hs.get_federation_http_client().put_json
put_json.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_APPLE.to_string(),
"typing": True,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
def test_started_typing_remote_recv(self):
self.room_members = [U_APPLE, U_ONION]
self.assertEquals(self.event_source.get_current_key(), 0)
(request, channel) = self.make_request(
"PUT",
"/_matrix/federation/v1/send/1000000",
_make_edu_transaction_json(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_ONION.to_string(),
"typing": True,
},
),
federation_auth_origin=b"farm",
)
self.assertEqual(channel.code, 200)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_ONION.to_string()]},
}
],
)
@override_config({"send_federation": True})
def test_stopped_typing(self):
self.room_members = [U_APPLE, U_BANANA, U_ONION]
# Gut-wrenching
from synapse.handlers.typing import RoomMember
member = RoomMember(ROOM_ID, U_APPLE.to_string())
self.handler._member_typing_until[member] = 1002000
self.handler._room_typing[ROOM_ID] = {U_APPLE.to_string()}
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.stopped_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
put_json = self.hs.get_federation_http_client().put_json
put_json.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_APPLE.to_string(),
"typing": False,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}],
)
def test_typing_timeout(self):
self.room_members = [U_APPLE, U_BANANA]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
self.reactor.pump([16])
self.on_new_event.assert_has_calls([call("typing_key", 2, rooms=[ROOM_ID])])
self.assertEquals(self.event_source.get_current_key(), 2)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=1)
)
self.assertEquals(
events[0],
[{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}],
)
# SYN-230 - see if we can still set after timeout
self.get_success(
self.handler.started_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 3, rooms=[ROOM_ID])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": ROOM_ID,
"content": {"user_ids": [U_APPLE.to_string()]},
}
],
)
| open_redirect | {
"code": [
" http_client=mock_federation_client,",
" put_json = self.hs.get_http_client().put_json",
" put_json = self.hs.get_http_client().put_json"
],
"line_no": [
73,
195,
273
]
} | {
"code": [
" federation_http_client=mock_federation_client,",
" put_json = self.hs.get_federation_http_client().put_json",
" put_json = self.hs.get_federation_http_client().put_json"
],
"line_no": [
73,
195,
273
]
} |
import json
from mock import ANY, Mock, call
from twisted.internet import defer
from synapse.api.errors import AuthError
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import override_config
from tests.utils import register_federation_servlets
VAR_0 = UserID.from_string("@apple:test")
VAR_1 = UserID.from_string("@banana:test")
VAR_2 = UserID.from_string("@onion:farm")
VAR_3 = "a-room"
def FUNC_0(VAR_4, VAR_5, VAR_6="test"):
return {
"origin": VAR_6,
"origin_server_ts": 1000000,
"pdus": [],
"edus": [{"edu_type": VAR_4, "content": VAR_5}],
}
def FUNC_1(VAR_4, VAR_5):
return json.dumps(FUNC_0(VAR_4, VAR_5)).encode("utf8")
class CLASS_0(unittest.HomeserverTestCase):
VAR_7 = [register_federation_servlets]
def FUNC_2(self, VAR_8, VAR_9):
VAR_11 = Mock(spec=["verify_json_for_server"])
VAR_11.verify_json_for_server.return_value = defer.succeed(True)
VAR_12 = Mock(spec=["put_json"])
VAR_12.put_json.return_value = defer.succeed((200, "OK"))
VAR_8.pump((1000,))
VAR_10 = self.setup_test_homeserver(
notifier=Mock(),
http_client=VAR_12,
keyring=VAR_11,
replication_streams={},
)
return VAR_10
def FUNC_3(self, VAR_8, VAR_9, VAR_10):
VAR_13 = VAR_10.get_notifier()
self.on_new_event = VAR_13.on_new_event
self.handler = VAR_10.get_typing_handler()
self.event_source = VAR_10.get_event_sources().sources["typing"]
self.datastore = VAR_10.get_datastore()
VAR_14 = {
"destination": "",
"retry_last_ts": 0,
"retry_interval": 0,
"failure_ts": None,
}
self.datastore.get_destination_retry_timings = Mock(
return_value=defer.succeed(VAR_14)
)
self.datastore.get_device_updates_by_remote = Mock(
return_value=make_awaitable((0, []))
)
self.datastore.get_destination_last_successful_stream_ordering = Mock(
return_value=make_awaitable(None)
)
def FUNC_9(*VAR_15):
return defer.succeed(None)
self.datastore.get_received_txn_response = FUNC_9
self.room_members = []
async def FUNC_10(VAR_16, VAR_17):
if VAR_17 not in [u.to_string() for u in self.room_members]:
raise AuthError(401, "User is not in the room")
return None
VAR_10.get_auth().check_user_in_room = FUNC_10
def FUNC_11(VAR_16):
return {VAR_22.domain for VAR_22 in self.room_members}
self.datastore.get_joined_hosts_for_room = FUNC_11
async def FUNC_12(VAR_16):
return {str(u) for u in self.room_members}
self.datastore.get_users_in_room = FUNC_12
self.datastore.get_user_directory_stream_pos = Mock(
side_effect=(
lambda: make_awaitable(1)
)
)
self.datastore.get_current_state_deltas = Mock(return_value=(0, None))
self.datastore.get_to_device_stream_token = lambda: 0
self.datastore.get_new_device_msgs_for_remote = lambda *VAR_15, **kargs: make_awaitable(
([], 0)
)
self.datastore.delete_device_msgs_for_remote = lambda *VAR_15, **kargs: make_awaitable(
None
)
self.datastore.set_received_txn_response = lambda *VAR_15, **kwargs: make_awaitable(
None
)
def FUNC_4(self):
self.room_members = [VAR_0, VAR_1]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=20000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
@override_config({"send_federation": True})
def FUNC_5(self):
self.room_members = [VAR_0, VAR_2]
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=20000,
)
)
VAR_19 = self.hs.get_http_client().put_json
VAR_19.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=FUNC_0(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_0.to_string(),
"typing": True,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
def FUNC_6(self):
self.room_members = [VAR_0, VAR_2]
self.assertEquals(self.event_source.get_current_key(), 0)
(VAR_20, VAR_21) = self.make_request(
"PUT",
"/_matrix/federation/v1/send/1000000",
FUNC_1(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_2.to_string(),
"typing": True,
},
),
federation_auth_origin=b"farm",
)
self.assertEqual(VAR_21.code, 200)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_2.to_string()]},
}
],
)
@override_config({"send_federation": True})
def FUNC_7(self):
self.room_members = [VAR_0, VAR_1, VAR_2]
from synapse.handlers.typing import RoomMember
VAR_22 = RoomMember(VAR_3, VAR_0.to_string())
self.handler._member_typing_until[VAR_22] = 1002000
self.handler._room_typing[VAR_3] = {VAR_0.to_string()}
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.stopped_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
VAR_19 = self.hs.get_http_client().put_json
VAR_19.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=FUNC_0(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_0.to_string(),
"typing": False,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[{"type": "m.typing", "room_id": VAR_3, "content": {"user_ids": []}}],
)
def FUNC_8(self):
self.room_members = [VAR_0, VAR_1]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
self.reactor.pump([16])
self.on_new_event.assert_has_calls([call("typing_key", 2, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 2)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=1)
)
self.assertEquals(
VAR_18[0],
[{"type": "m.typing", "room_id": VAR_3, "content": {"user_ids": []}}],
)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 3, rooms=[VAR_3])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
|
import json
from mock import ANY, Mock, call
from twisted.internet import defer
from synapse.api.errors import AuthError
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import override_config
from tests.utils import register_federation_servlets
VAR_0 = UserID.from_string("@apple:test")
VAR_1 = UserID.from_string("@banana:test")
VAR_2 = UserID.from_string("@onion:farm")
VAR_3 = "a-room"
def FUNC_0(VAR_4, VAR_5, VAR_6="test"):
return {
"origin": VAR_6,
"origin_server_ts": 1000000,
"pdus": [],
"edus": [{"edu_type": VAR_4, "content": VAR_5}],
}
def FUNC_1(VAR_4, VAR_5):
return json.dumps(FUNC_0(VAR_4, VAR_5)).encode("utf8")
class CLASS_0(unittest.HomeserverTestCase):
VAR_7 = [register_federation_servlets]
def FUNC_2(self, VAR_8, VAR_9):
VAR_11 = Mock(spec=["verify_json_for_server"])
VAR_11.verify_json_for_server.return_value = defer.succeed(True)
VAR_12 = Mock(spec=["put_json"])
VAR_12.put_json.return_value = defer.succeed((200, "OK"))
VAR_8.pump((1000,))
VAR_10 = self.setup_test_homeserver(
notifier=Mock(),
federation_http_client=VAR_12,
keyring=VAR_11,
replication_streams={},
)
return VAR_10
def FUNC_3(self, VAR_8, VAR_9, VAR_10):
VAR_13 = VAR_10.get_notifier()
self.on_new_event = VAR_13.on_new_event
self.handler = VAR_10.get_typing_handler()
self.event_source = VAR_10.get_event_sources().sources["typing"]
self.datastore = VAR_10.get_datastore()
VAR_14 = {
"destination": "",
"retry_last_ts": 0,
"retry_interval": 0,
"failure_ts": None,
}
self.datastore.get_destination_retry_timings = Mock(
return_value=defer.succeed(VAR_14)
)
self.datastore.get_device_updates_by_remote = Mock(
return_value=make_awaitable((0, []))
)
self.datastore.get_destination_last_successful_stream_ordering = Mock(
return_value=make_awaitable(None)
)
def FUNC_9(*VAR_15):
return defer.succeed(None)
self.datastore.get_received_txn_response = FUNC_9
self.room_members = []
async def FUNC_10(VAR_16, VAR_17):
if VAR_17 not in [u.to_string() for u in self.room_members]:
raise AuthError(401, "User is not in the room")
return None
VAR_10.get_auth().check_user_in_room = FUNC_10
def FUNC_11(VAR_16):
return {VAR_22.domain for VAR_22 in self.room_members}
self.datastore.get_joined_hosts_for_room = FUNC_11
async def FUNC_12(VAR_16):
return {str(u) for u in self.room_members}
self.datastore.get_users_in_room = FUNC_12
self.datastore.get_user_directory_stream_pos = Mock(
side_effect=(
lambda: make_awaitable(1)
)
)
self.datastore.get_current_state_deltas = Mock(return_value=(0, None))
self.datastore.get_to_device_stream_token = lambda: 0
self.datastore.get_new_device_msgs_for_remote = lambda *VAR_15, **kargs: make_awaitable(
([], 0)
)
self.datastore.delete_device_msgs_for_remote = lambda *VAR_15, **kargs: make_awaitable(
None
)
self.datastore.set_received_txn_response = lambda *VAR_15, **kwargs: make_awaitable(
None
)
def FUNC_4(self):
self.room_members = [VAR_0, VAR_1]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=20000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
@override_config({"send_federation": True})
def FUNC_5(self):
self.room_members = [VAR_0, VAR_2]
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=20000,
)
)
VAR_19 = self.hs.get_federation_http_client().put_json
VAR_19.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=FUNC_0(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_0.to_string(),
"typing": True,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
def FUNC_6(self):
self.room_members = [VAR_0, VAR_2]
self.assertEquals(self.event_source.get_current_key(), 0)
(VAR_20, VAR_21) = self.make_request(
"PUT",
"/_matrix/federation/v1/send/1000000",
FUNC_1(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_2.to_string(),
"typing": True,
},
),
federation_auth_origin=b"farm",
)
self.assertEqual(VAR_21.code, 200)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_2.to_string()]},
}
],
)
@override_config({"send_federation": True})
def FUNC_7(self):
self.room_members = [VAR_0, VAR_1, VAR_2]
from synapse.handlers.typing import RoomMember
VAR_22 = RoomMember(VAR_3, VAR_0.to_string())
self.handler._member_typing_until[VAR_22] = 1002000
self.handler._room_typing[VAR_3] = {VAR_0.to_string()}
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.stopped_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
VAR_19 = self.hs.get_federation_http_client().put_json
VAR_19.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=FUNC_0(
"m.typing",
VAR_5={
"room_id": VAR_3,
"user_id": VAR_0.to_string(),
"typing": False,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[{"type": "m.typing", "room_id": VAR_3, "content": {"user_ids": []}}],
)
def FUNC_8(self):
self.room_members = [VAR_0, VAR_1]
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[VAR_3])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
self.reactor.pump([16])
self.on_new_event.assert_has_calls([call("typing_key", 2, rooms=[VAR_3])])
self.assertEquals(self.event_source.get_current_key(), 2)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=1)
)
self.assertEquals(
VAR_18[0],
[{"type": "m.typing", "room_id": VAR_3, "content": {"user_ids": []}}],
)
self.get_success(
self.handler.started_typing(
target_user=VAR_0,
requester=create_requester(VAR_0),
VAR_16=VAR_3,
timeout=10000,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 3, rooms=[VAR_3])])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
VAR_18 = self.get_success(
self.event_source.get_new_events(room_ids=[VAR_3], from_key=0)
)
self.assertEquals(
VAR_18[0],
[
{
"type": "m.typing",
"room_id": VAR_3,
"content": {"user_ids": [VAR_0.to_string()]},
}
],
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
20,
22,
25,
30,
31,
34,
35,
37,
38,
40,
41,
49,
50,
53,
54,
57,
59,
60,
63,
64,
67,
68,
70,
77,
79,
83,
85,
87,
98,
102,
106,
109,
111,
113,
118,
120,
123,
125,
128,
130,
133,
137,
139,
150,
153,
155,
164,
166,
181,
185,
194,
212,
215,
217,
232,
234,
249,
253,
254,
256,
260,
262,
270,
272,
290,
299,
302,
304,
313,
316,
331,
333,
335,
344,
345,
346,
355,
358,
373
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
20,
22,
25,
30,
31,
34,
35,
37,
38,
40,
41,
49,
50,
53,
54,
57,
59,
60,
63,
64,
67,
68,
70,
77,
79,
83,
85,
87,
98,
102,
106,
109,
111,
113,
118,
120,
123,
125,
128,
130,
133,
137,
139,
150,
153,
155,
164,
166,
181,
185,
194,
212,
215,
217,
232,
234,
249,
253,
254,
256,
260,
262,
270,
272,
290,
299,
302,
304,
313,
316,
331,
333,
335,
344,
345,
346,
355,
358,
373
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2012-2019 mutschler, jkrehm, cervinko, janeczku, OzzieIsaacs, csitko
# ok11, issmirnov, idalin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
import sys
import datetime
import itertools
import uuid
from flask import session as flask_session
from binascii import hexlify
from flask_login import AnonymousUserMixin, current_user
from flask_login import user_logged_in
from contextlib import contextmanager
try:
from flask_dance.consumer.backend.sqla import OAuthConsumerMixin
oauth_support = True
except ImportError as e:
# fails on flask-dance >1.3, due to renaming
try:
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin
oauth_support = True
except ImportError as e:
oauth_support = False
from sqlalchemy import create_engine, exc, exists, event, text
from sqlalchemy import Column, ForeignKey
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.expression import func
try:
# Compatibility with sqlalchemy 2.0
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
from werkzeug.security import generate_password_hash
from . import constants, logger, cli
log = logger.create()
session = None
app_DB_path = None
Base = declarative_base()
searched_ids = {}
def signal_store_user_session(object, user):
store_user_session()
def store_user_session():
if flask_session.get('_user_id', ""):
try:
if not check_user_session(flask_session.get('_user_id', ""), flask_session.get('_id', "")):
user_session = User_Sessions(flask_session.get('_user_id', ""), flask_session.get('_id', ""))
session.add(user_session)
session.commit()
log.info("Login and store session : " + flask_session.get('_id', ""))
else:
log.info("Found stored session : " + flask_session.get('_id', ""))
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.exception(e)
else:
log.error("No user id in session")
def delete_user_session(user_id, session_key):
try:
log.info("Deleted session_key : " + session_key)
session.query(User_Sessions).filter(User_Sessions.user_id==user_id,
User_Sessions.session_key==session_key).delete()
session.commit()
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
log.exception(e)
def check_user_session(user_id, session_key):
try:
return bool(session.query(User_Sessions).filter(User_Sessions.user_id==user_id,
User_Sessions.session_key==session_key).one_or_none())
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
log.exception(e)
user_logged_in.connect(signal_store_user_session)
def store_ids(result):
ids = list()
for element in result:
ids.append(element.id)
searched_ids[current_user.id] = ids
class UserBase:
@property
def is_authenticated(self):
return self.is_active
def _has_role(self, role_flag):
return constants.has_flag(self.role, role_flag)
def role_admin(self):
return self._has_role(constants.ROLE_ADMIN)
def role_download(self):
return self._has_role(constants.ROLE_DOWNLOAD)
def role_upload(self):
return self._has_role(constants.ROLE_UPLOAD)
def role_edit(self):
return self._has_role(constants.ROLE_EDIT)
def role_passwd(self):
return self._has_role(constants.ROLE_PASSWD)
def role_anonymous(self):
return self._has_role(constants.ROLE_ANONYMOUS)
def role_edit_shelfs(self):
return self._has_role(constants.ROLE_EDIT_SHELFS)
def role_delete_books(self):
return self._has_role(constants.ROLE_DELETE_BOOKS)
def role_viewer(self):
return self._has_role(constants.ROLE_VIEWER)
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return self.role_anonymous()
def get_id(self):
return str(self.id)
def filter_language(self):
return self.default_language
def check_visibility(self, value):
if value == constants.SIDEBAR_RECENT:
return True
return constants.has_flag(self.sidebar_view, value)
def show_detail_random(self):
return self.check_visibility(constants.DETAIL_RANDOM)
def list_denied_tags(self):
mct = self.denied_tags or ""
return [t.strip() for t in mct.split(",")]
def list_allowed_tags(self):
mct = self.allowed_tags or ""
return [t.strip() for t in mct.split(",")]
def list_denied_column_values(self):
mct = self.denied_column_value or ""
return [t.strip() for t in mct.split(",")]
def list_allowed_column_values(self):
mct = self.allowed_column_value or ""
return [t.strip() for t in mct.split(",")]
def get_view_property(self, page, prop):
if not self.view_settings.get(page):
return None
return self.view_settings[page].get(prop)
def set_view_property(self, page, prop, value):
if not self.view_settings.get(page):
self.view_settings[page] = dict()
self.view_settings[page][prop] = value
try:
flag_modified(self, "view_settings")
except AttributeError:
pass
try:
session.commit()
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
# ToDo: Error message
def __repr__(self):
return '<User %r>' % self.name
# Baseclass for Users in Calibre-Web, settings which are depending on certain users are stored here. It is derived from
# User Base (all access methods are declared there)
class User(UserBase, Base):
__tablename__ = 'user'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
name = Column(String(64), unique=True)
email = Column(String(120), unique=True, default="")
role = Column(SmallInteger, default=constants.ROLE_USER)
password = Column(String)
kindle_mail = Column(String(120), default="")
shelf = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
downloads = relationship('Downloads', backref='user', lazy='dynamic')
locale = Column(String(2), default="en")
sidebar_view = Column(Integer, default=1)
default_language = Column(String(3), default="all")
denied_tags = Column(String, default="")
allowed_tags = Column(String, default="")
denied_column_value = Column(String, default="")
allowed_column_value = Column(String, default="")
remote_auth_token = relationship('RemoteAuthToken', backref='user', lazy='dynamic')
view_settings = Column(JSON, default={})
kobo_only_shelves_sync = Column(Integer, default=0)
if oauth_support:
class OAuth(OAuthConsumerMixin, Base):
provider_user_id = Column(String(256))
user_id = Column(Integer, ForeignKey(User.id))
user = relationship(User)
class OAuthProvider(Base):
__tablename__ = 'oauthProvider'
id = Column(Integer, primary_key=True)
provider_name = Column(String)
oauth_client_id = Column(String)
oauth_client_secret = Column(String)
active = Column(Boolean)
# Class for anonymous user is derived from User base and completly overrides methods and properties for the
# anonymous user
class Anonymous(AnonymousUserMixin, UserBase):
def __init__(self):
self.loadSettings()
def loadSettings(self):
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
.first() # type: User
self.name = data.name
self.role = data.role
self.id=data.id
self.sidebar_view = data.sidebar_view
self.default_language = data.default_language
self.locale = data.locale
self.kindle_mail = data.kindle_mail
self.denied_tags = data.denied_tags
self.allowed_tags = data.allowed_tags
self.denied_column_value = data.denied_column_value
self.allowed_column_value = data.allowed_column_value
self.view_settings = data.view_settings
self.kobo_only_shelves_sync = data.kobo_only_shelves_sync
def role_admin(self):
return False
@property
def is_active(self):
return False
@property
def is_anonymous(self):
return True
@property
def is_authenticated(self):
return False
def get_view_property(self, page, prop):
if 'view' in flask_session:
if not flask_session['view'].get(page):
return None
return flask_session['view'][page].get(prop)
return None
def set_view_property(self, page, prop, value):
if 'view' in flask_session:
if not flask_session['view'].get(page):
flask_session['view'][page] = dict()
flask_session['view'][page][prop] = value
return None
class User_Sessions(Base):
__tablename__ = 'user_session'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
session_key = Column(String, default="")
def __init__(self, user_id, session_key):
self.user_id = user_id
self.session_key = session_key
# Baseclass representing Shelfs in calibre-web in app.db
class Shelf(Base):
__tablename__ = 'shelf'
id = Column(Integer, primary_key=True)
uuid = Column(String, default=lambda: str(uuid.uuid4()))
name = Column(String)
is_public = Column(Integer, default=0)
user_id = Column(Integer, ForeignKey('user.id'))
kobo_sync = Column(Boolean, default=False)
books = relationship("BookShelf", backref="ub_shelf", cascade="all, delete-orphan", lazy="dynamic")
created = Column(DateTime, default=datetime.datetime.utcnow)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def __repr__(self):
return '<Shelf %d:%r>' % (self.id, self.name)
# Baseclass representing Relationship between books and Shelfs in Calibre-Web in app.db (N:M)
class BookShelf(Base):
__tablename__ = 'book_shelf_link'
id = Column(Integer, primary_key=True)
book_id = Column(Integer)
order = Column(Integer)
shelf = Column(Integer, ForeignKey('shelf.id'))
date_added = Column(DateTime, default=datetime.datetime.utcnow)
def __repr__(self):
return '<Book %r>' % self.id
# This table keeps track of deleted Shelves so that deletes can be propagated to any paired Kobo device.
class ShelfArchive(Base):
__tablename__ = 'shelf_archive'
id = Column(Integer, primary_key=True)
uuid = Column(String)
user_id = Column(Integer, ForeignKey('user.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
class ReadBook(Base):
__tablename__ = 'book_read_link'
STATUS_UNREAD = 0
STATUS_FINISHED = 1
STATUS_IN_PROGRESS = 2
id = Column(Integer, primary_key=True)
book_id = Column(Integer, unique=False)
user_id = Column(Integer, ForeignKey('user.id'), unique=False)
read_status = Column(Integer, unique=False, default=STATUS_UNREAD, nullable=False)
kobo_reading_state = relationship("KoboReadingState", uselist=False,
primaryjoin="and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
"ReadBook.book_id == foreign(KoboReadingState.book_id))",
cascade="all",
backref=backref("book_read_link",
uselist=False))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
last_time_started_reading = Column(DateTime, nullable=True)
times_started_reading = Column(Integer, default=0, nullable=False)
class Bookmark(Base):
__tablename__ = 'bookmark'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
format = Column(String(collation='NOCASE'))
bookmark_key = Column(String)
# Baseclass representing books that are archived on the user's Kobo device.
class ArchivedBook(Base):
__tablename__ = 'archived_book'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
is_archived = Column(Boolean, unique=False)
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
# The Kobo ReadingState API keeps track of 4 timestamped entities:
# ReadingState, StatusInfo, Statistics, CurrentBookmark
# Which we map to the following 4 tables:
# KoboReadingState, ReadBook, KoboStatistics and KoboBookmark
class KoboReadingState(Base):
__tablename__ = 'kobo_reading_state'
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
priority_timestamp = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
current_bookmark = relationship("KoboBookmark", uselist=False, backref="kobo_reading_state", cascade="all")
statistics = relationship("KoboStatistics", uselist=False, backref="kobo_reading_state", cascade="all")
class KoboBookmark(Base):
__tablename__ = 'kobo_bookmark'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
location_source = Column(String)
location_type = Column(String)
location_value = Column(String)
progress_percent = Column(Float)
content_source_progress_percent = Column(Float)
class KoboStatistics(Base):
__tablename__ = 'kobo_statistics'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
remaining_time_minutes = Column(Integer)
spent_reading_minutes = Column(Integer)
# Updates the last_modified timestamp in the KoboReadingState table if any of its children tables are modified.
@event.listens_for(Session, 'before_flush')
def receive_before_flush(session, flush_context, instances):
for change in itertools.chain(session.new, session.dirty):
if isinstance(change, (ReadBook, KoboStatistics, KoboBookmark)):
if change.kobo_reading_state:
change.kobo_reading_state.last_modified = datetime.datetime.utcnow()
# Maintain the last_modified bit for the Shelf table.
for change in itertools.chain(session.new, session.deleted):
if isinstance(change, BookShelf):
change.ub_shelf.last_modified = datetime.datetime.utcnow()
# Baseclass representing Downloads from calibre-web in app.db
class Downloads(Base):
__tablename__ = 'downloads'
id = Column(Integer, primary_key=True)
book_id = Column(Integer)
user_id = Column(Integer, ForeignKey('user.id'))
def __repr__(self):
return '<Download %r' % self.book_id
# Baseclass representing allowed domains for registration
class Registration(Base):
__tablename__ = 'registration'
id = Column(Integer, primary_key=True)
domain = Column(String)
allow = Column(Integer)
def __repr__(self):
return u"<Registration('{0}')>".format(self.domain)
class RemoteAuthToken(Base):
__tablename__ = 'remote_auth_token'
id = Column(Integer, primary_key=True)
auth_token = Column(String, unique=True)
user_id = Column(Integer, ForeignKey('user.id'))
verified = Column(Boolean, default=False)
expiration = Column(DateTime)
token_type = Column(Integer, default=0)
def __init__(self):
self.auth_token = (hexlify(os.urandom(4))).decode('utf-8')
self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from now
def __repr__(self):
return '<Token %r>' % self.id
# Add missing tables during migration of database
def add_missing_tables(engine, session):
if not engine.dialect.has_table(engine.connect(), "book_read_link"):
ReadBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "bookmark"):
Bookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_reading_state"):
KoboReadingState.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_bookmark"):
KoboBookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_statistics"):
KoboStatistics.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "archived_book"):
ArchivedBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "registration"):
Registration.__table__.create(bind=engine)
with engine.connect() as conn:
conn.execute("insert into registration (domain, allow) values('%.%',1)")
session.commit()
# migrate all settings missing in registration table
def migrate_registration_table(engine, session):
try:
session.query(exists().where(Registration.allow)).scalar()
session.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
conn.execute("update registration set 'allow' = 1")
session.commit()
try:
# Handle table exists, but no content
cnt = session.query(Registration).count()
if not cnt:
with engine.connect() as conn:
conn.execute("insert into registration (domain, allow) values('%.%',1)")
session.commit()
except exc.OperationalError: # Database is not writeable
print('Settings database is not writeable. Exiting...')
sys.exit(2)
# Remove login capability of user Guest
def migrate_guest_password(engine):
try:
with engine.connect() as conn:
trans = conn.begin()
conn.execute(text("UPDATE user SET password='' where name = 'Guest' and password !=''"))
trans.commit()
except exc.OperationalError:
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def migrate_shelfs(engine, session):
try:
session.query(exists().where(Shelf.uuid)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
conn.execute("ALTER TABLE shelf ADD column 'created' DATETIME")
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
for shelf in session.query(Shelf).all():
shelf.uuid = str(uuid.uuid4())
shelf.created = datetime.datetime.now()
shelf.last_modified = datetime.datetime.now()
for book_shelf in session.query(BookShelf).all():
book_shelf.date_added = datetime.datetime.now()
session.commit()
try:
session.query(exists().where(Shelf.kobo_sync)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
session.commit()
try:
session.query(exists().where(BookShelf.order)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
session.commit()
def migrate_readBook(engine, session):
try:
session.query(exists().where(ReadBook.read_status)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
session.commit()
test = session.query(ReadBook).filter(ReadBook.last_modified == None).all()
for book in test:
book.last_modified = datetime.datetime.utcnow()
session.commit()
def migrate_remoteAuthToken(engine, session):
try:
session.query(exists().where(RemoteAuthToken.token_type)).scalar()
session.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
conn.execute("update remote_auth_token set 'token_type' = 0")
session.commit()
# Migrate database to current version, has to be updated after every database change. Currently migration from
# everywhere to current should work. Migration is done by checking if relevant columns are existing, and than adding
# rows with SQL commands
def migrate_Database(session):
engine = session.bind
add_missing_tables(engine, session)
migrate_registration_table(engine, session)
migrate_readBook(engine, session)
migrate_remoteAuthToken(engine, session)
migrate_shelfs(engine, session)
try:
create = False
session.query(exists().where(User.sidebar_view)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
session.commit()
create = True
try:
if create:
with engine.connect() as conn:
conn.execute("SELECT language_books FROM user")
session.commit()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)",
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
session.commit()
try:
session.query(exists().where(User.denied_tags)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''")
session.commit()
try:
session.query(exists().where(User.view_settings)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'")
session.commit()
try:
session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `kobo_only_shelves_sync` SMALLINT DEFAULT 0")
session.commit()
try:
# check if name is in User table instead of nickname
session.query(exists().where(User.name)).scalar()
except exc.OperationalError:
# Create new table user_id and copy contents of table user into it
with engine.connect() as conn:
conn.execute(text("CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"name VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"denied_tags VARCHAR,"
"allowed_tags VARCHAR,"
"denied_column_value VARCHAR,"
"allowed_column_value VARCHAR,"
"view_settings JSON,"
"kobo_only_shelves_sync SMALLINT,"
"UNIQUE (name),"
"UNIQUE (email))"))
conn.execute(text("INSERT INTO user_id(id, name, email, role, password, kindle_mail,locale,"
"sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
"allowed_column_value, view_settings, kobo_only_shelves_sync)"
"SELECT id, nickname, email, role, password, kindle_mail, locale,"
"sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
"allowed_column_value, view_settings, kobo_only_shelves_sync FROM user"))
# delete old user table and rename new user_id table to user:
conn.execute(text("DROP TABLE user"))
conn.execute(text("ALTER TABLE user_id RENAME TO user"))
session.commit()
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
is None:
create_anonymous_user(session)
migrate_guest_password(engine)
def clean_database(session):
# Remove expired remote login tokens
now = datetime.datetime.now()
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
filter(RemoteAuthToken.token_type != 1).delete()
session.commit()
# Save downloaded books per user in calibre-web's own database
def update_download(book_id, user_id):
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id == book_id).first()
if not check:
new_download = Downloads(user_id=user_id, book_id=book_id)
session.add(new_download)
try:
session.commit()
except exc.OperationalError:
session.rollback()
# Delete non exisiting downloaded books in calibre-web's own database
def delete_download(book_id):
session.query(Downloads).filter(book_id == Downloads.book_id).delete()
try:
session.commit()
except exc.OperationalError:
session.rollback()
# Generate user Guest (translated text), as anonymous user, no rights
def create_anonymous_user(session):
user = User()
user.name = "Guest"
user.email = 'no@email'
user.role = constants.ROLE_ANONYMOUS
user.password = ''
session.add(user)
try:
session.commit()
except Exception:
session.rollback()
# Generate User admin with admin123 password, and access to everything
def create_admin_user(session):
user = User()
user.name = "admin"
user.role = constants.ADMIN_USER_ROLES
user.sidebar_view = constants.ADMIN_USER_SIDEBAR
user.password = generate_password_hash(constants.DEFAULT_PASSWORD)
session.add(user)
try:
session.commit()
except Exception:
session.rollback()
def init_db(app_db_path):
# Open session for database connection
global session
global app_DB_path
app_DB_path = app_db_path
engine = create_engine(u'sqlite:///{0}'.format(app_db_path), echo=False)
Session = scoped_session(sessionmaker())
Session.configure(bind=engine)
session = Session()
if os.path.exists(app_db_path):
Base.metadata.create_all(engine)
migrate_Database(session)
clean_database(session)
else:
Base.metadata.create_all(engine)
create_admin_user(session)
create_anonymous_user(session)
if cli.user_credentials:
username, password = cli.user_credentials.split(':', 1)
user = session.query(User).filter(func.lower(User.name) == username.lower()).first()
if user:
if not password:
print("Empty password is not allowed")
sys.exit(4)
user.password = generate_password_hash(password)
if session_commit() == "":
print("Password for user '{}' changed".format(username))
sys.exit(0)
else:
print("Failed changing password")
sys.exit(3)
else:
print("Username '{}' not valid, can't change password".format(username))
sys.exit(3)
def dispose():
global session
old_session = session
session = None
if old_session:
try:
old_session.close()
except Exception:
pass
if old_session.bind:
try:
old_session.bind.dispose()
except Exception:
pass
def session_commit(success=None):
try:
session.commit()
if success:
log.info(success)
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.debug_or_exception(e)
return ""
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2012-2019 mutschler, jkrehm, cervinko, janeczku, OzzieIsaacs, csitko
# ok11, issmirnov, idalin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
import sys
import datetime
import itertools
import uuid
from flask import session as flask_session
from binascii import hexlify
from flask_login import AnonymousUserMixin, current_user
from flask_login import user_logged_in
from contextlib import contextmanager
try:
from flask_dance.consumer.backend.sqla import OAuthConsumerMixin
oauth_support = True
except ImportError as e:
# fails on flask-dance >1.3, due to renaming
try:
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin
oauth_support = True
except ImportError as e:
oauth_support = False
from sqlalchemy import create_engine, exc, exists, event, text
from sqlalchemy import Column, ForeignKey
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.expression import func
try:
# Compatibility with sqlalchemy 2.0
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session
from werkzeug.security import generate_password_hash
from . import constants, logger, cli
log = logger.create()
session = None
app_DB_path = None
Base = declarative_base()
searched_ids = {}
def signal_store_user_session(object, user):
store_user_session()
def store_user_session():
if flask_session.get('_user_id', ""):
try:
if not check_user_session(flask_session.get('_user_id', ""), flask_session.get('_id', "")):
user_session = User_Sessions(flask_session.get('_user_id', ""), flask_session.get('_id', ""))
session.add(user_session)
session.commit()
log.debug("Login and store session : " + flask_session.get('_id', ""))
else:
log.debug("Found stored session: " + flask_session.get('_id', ""))
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.exception(e)
else:
log.error("No user id in session")
def delete_user_session(user_id, session_key):
try:
log.debug("Deleted session_key: " + session_key)
session.query(User_Sessions).filter(User_Sessions.user_id==user_id,
User_Sessions.session_key==session_key).delete()
session.commit()
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
log.exception(e)
def check_user_session(user_id, session_key):
try:
return bool(session.query(User_Sessions).filter(User_Sessions.user_id==user_id,
User_Sessions.session_key==session_key).one_or_none())
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
log.exception(e)
user_logged_in.connect(signal_store_user_session)
def store_ids(result):
ids = list()
for element in result:
ids.append(element.id)
searched_ids[current_user.id] = ids
class UserBase:
@property
def is_authenticated(self):
return self.is_active
def _has_role(self, role_flag):
return constants.has_flag(self.role, role_flag)
def role_admin(self):
return self._has_role(constants.ROLE_ADMIN)
def role_download(self):
return self._has_role(constants.ROLE_DOWNLOAD)
def role_upload(self):
return self._has_role(constants.ROLE_UPLOAD)
def role_edit(self):
return self._has_role(constants.ROLE_EDIT)
def role_passwd(self):
return self._has_role(constants.ROLE_PASSWD)
def role_anonymous(self):
return self._has_role(constants.ROLE_ANONYMOUS)
def role_edit_shelfs(self):
return self._has_role(constants.ROLE_EDIT_SHELFS)
def role_delete_books(self):
return self._has_role(constants.ROLE_DELETE_BOOKS)
def role_viewer(self):
return self._has_role(constants.ROLE_VIEWER)
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return self.role_anonymous()
def get_id(self):
return str(self.id)
def filter_language(self):
return self.default_language
def check_visibility(self, value):
if value == constants.SIDEBAR_RECENT:
return True
return constants.has_flag(self.sidebar_view, value)
def show_detail_random(self):
return self.check_visibility(constants.DETAIL_RANDOM)
def list_denied_tags(self):
mct = self.denied_tags or ""
return [t.strip() for t in mct.split(",")]
def list_allowed_tags(self):
mct = self.allowed_tags or ""
return [t.strip() for t in mct.split(",")]
def list_denied_column_values(self):
mct = self.denied_column_value or ""
return [t.strip() for t in mct.split(",")]
def list_allowed_column_values(self):
mct = self.allowed_column_value or ""
return [t.strip() for t in mct.split(",")]
def get_view_property(self, page, prop):
if not self.view_settings.get(page):
return None
return self.view_settings[page].get(prop)
def set_view_property(self, page, prop, value):
if not self.view_settings.get(page):
self.view_settings[page] = dict()
self.view_settings[page][prop] = value
try:
flag_modified(self, "view_settings")
except AttributeError:
pass
try:
session.commit()
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
# ToDo: Error message
def __repr__(self):
return '<User %r>' % self.name
# Baseclass for Users in Calibre-Web, settings which are depending on certain users are stored here. It is derived from
# User Base (all access methods are declared there)
class User(UserBase, Base):
__tablename__ = 'user'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
name = Column(String(64), unique=True)
email = Column(String(120), unique=True, default="")
role = Column(SmallInteger, default=constants.ROLE_USER)
password = Column(String)
kindle_mail = Column(String(120), default="")
shelf = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
downloads = relationship('Downloads', backref='user', lazy='dynamic')
locale = Column(String(2), default="en")
sidebar_view = Column(Integer, default=1)
default_language = Column(String(3), default="all")
denied_tags = Column(String, default="")
allowed_tags = Column(String, default="")
denied_column_value = Column(String, default="")
allowed_column_value = Column(String, default="")
remote_auth_token = relationship('RemoteAuthToken', backref='user', lazy='dynamic')
view_settings = Column(JSON, default={})
kobo_only_shelves_sync = Column(Integer, default=0)
if oauth_support:
class OAuth(OAuthConsumerMixin, Base):
provider_user_id = Column(String(256))
user_id = Column(Integer, ForeignKey(User.id))
user = relationship(User)
class OAuthProvider(Base):
__tablename__ = 'oauthProvider'
id = Column(Integer, primary_key=True)
provider_name = Column(String)
oauth_client_id = Column(String)
oauth_client_secret = Column(String)
active = Column(Boolean)
# Class for anonymous user is derived from User base and completly overrides methods and properties for the
# anonymous user
class Anonymous(AnonymousUserMixin, UserBase):
def __init__(self):
self.loadSettings()
def loadSettings(self):
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
.first() # type: User
self.name = data.name
self.role = data.role
self.id=data.id
self.sidebar_view = data.sidebar_view
self.default_language = data.default_language
self.locale = data.locale
self.kindle_mail = data.kindle_mail
self.denied_tags = data.denied_tags
self.allowed_tags = data.allowed_tags
self.denied_column_value = data.denied_column_value
self.allowed_column_value = data.allowed_column_value
self.view_settings = data.view_settings
self.kobo_only_shelves_sync = data.kobo_only_shelves_sync
def role_admin(self):
return False
@property
def is_active(self):
return False
@property
def is_anonymous(self):
return True
@property
def is_authenticated(self):
return False
def get_view_property(self, page, prop):
if 'view' in flask_session:
if not flask_session['view'].get(page):
return None
return flask_session['view'][page].get(prop)
return None
def set_view_property(self, page, prop, value):
if 'view' in flask_session:
if not flask_session['view'].get(page):
flask_session['view'][page] = dict()
flask_session['view'][page][prop] = value
return None
class User_Sessions(Base):
__tablename__ = 'user_session'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
session_key = Column(String, default="")
def __init__(self, user_id, session_key):
self.user_id = user_id
self.session_key = session_key
# Baseclass representing Shelfs in calibre-web in app.db
class Shelf(Base):
__tablename__ = 'shelf'
id = Column(Integer, primary_key=True)
uuid = Column(String, default=lambda: str(uuid.uuid4()))
name = Column(String)
is_public = Column(Integer, default=0)
user_id = Column(Integer, ForeignKey('user.id'))
kobo_sync = Column(Boolean, default=False)
books = relationship("BookShelf", backref="ub_shelf", cascade="all, delete-orphan", lazy="dynamic")
created = Column(DateTime, default=datetime.datetime.utcnow)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def __repr__(self):
return '<Shelf %d:%r>' % (self.id, self.name)
# Baseclass representing Relationship between books and Shelfs in Calibre-Web in app.db (N:M)
class BookShelf(Base):
__tablename__ = 'book_shelf_link'
id = Column(Integer, primary_key=True)
book_id = Column(Integer)
order = Column(Integer)
shelf = Column(Integer, ForeignKey('shelf.id'))
date_added = Column(DateTime, default=datetime.datetime.utcnow)
def __repr__(self):
return '<Book %r>' % self.id
# This table keeps track of deleted Shelves so that deletes can be propagated to any paired Kobo device.
class ShelfArchive(Base):
__tablename__ = 'shelf_archive'
id = Column(Integer, primary_key=True)
uuid = Column(String)
user_id = Column(Integer, ForeignKey('user.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
class ReadBook(Base):
__tablename__ = 'book_read_link'
STATUS_UNREAD = 0
STATUS_FINISHED = 1
STATUS_IN_PROGRESS = 2
id = Column(Integer, primary_key=True)
book_id = Column(Integer, unique=False)
user_id = Column(Integer, ForeignKey('user.id'), unique=False)
read_status = Column(Integer, unique=False, default=STATUS_UNREAD, nullable=False)
kobo_reading_state = relationship("KoboReadingState", uselist=False,
primaryjoin="and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
"ReadBook.book_id == foreign(KoboReadingState.book_id))",
cascade="all",
backref=backref("book_read_link",
uselist=False))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
last_time_started_reading = Column(DateTime, nullable=True)
times_started_reading = Column(Integer, default=0, nullable=False)
class Bookmark(Base):
__tablename__ = 'bookmark'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
format = Column(String(collation='NOCASE'))
bookmark_key = Column(String)
# Baseclass representing books that are archived on the user's Kobo device.
class ArchivedBook(Base):
__tablename__ = 'archived_book'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
is_archived = Column(Boolean, unique=False)
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
# The Kobo ReadingState API keeps track of 4 timestamped entities:
# ReadingState, StatusInfo, Statistics, CurrentBookmark
# Which we map to the following 4 tables:
# KoboReadingState, ReadBook, KoboStatistics and KoboBookmark
class KoboReadingState(Base):
__tablename__ = 'kobo_reading_state'
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
priority_timestamp = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
current_bookmark = relationship("KoboBookmark", uselist=False, backref="kobo_reading_state", cascade="all")
statistics = relationship("KoboStatistics", uselist=False, backref="kobo_reading_state", cascade="all")
class KoboBookmark(Base):
__tablename__ = 'kobo_bookmark'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
location_source = Column(String)
location_type = Column(String)
location_value = Column(String)
progress_percent = Column(Float)
content_source_progress_percent = Column(Float)
class KoboStatistics(Base):
__tablename__ = 'kobo_statistics'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
remaining_time_minutes = Column(Integer)
spent_reading_minutes = Column(Integer)
# Updates the last_modified timestamp in the KoboReadingState table if any of its children tables are modified.
@event.listens_for(Session, 'before_flush')
def receive_before_flush(session, flush_context, instances):
for change in itertools.chain(session.new, session.dirty):
if isinstance(change, (ReadBook, KoboStatistics, KoboBookmark)):
if change.kobo_reading_state:
change.kobo_reading_state.last_modified = datetime.datetime.utcnow()
# Maintain the last_modified bit for the Shelf table.
for change in itertools.chain(session.new, session.deleted):
if isinstance(change, BookShelf):
change.ub_shelf.last_modified = datetime.datetime.utcnow()
# Baseclass representing Downloads from calibre-web in app.db
class Downloads(Base):
__tablename__ = 'downloads'
id = Column(Integer, primary_key=True)
book_id = Column(Integer)
user_id = Column(Integer, ForeignKey('user.id'))
def __repr__(self):
return '<Download %r' % self.book_id
# Baseclass representing allowed domains for registration
class Registration(Base):
__tablename__ = 'registration'
id = Column(Integer, primary_key=True)
domain = Column(String)
allow = Column(Integer)
def __repr__(self):
return u"<Registration('{0}')>".format(self.domain)
class RemoteAuthToken(Base):
__tablename__ = 'remote_auth_token'
id = Column(Integer, primary_key=True)
auth_token = Column(String, unique=True)
user_id = Column(Integer, ForeignKey('user.id'))
verified = Column(Boolean, default=False)
expiration = Column(DateTime)
token_type = Column(Integer, default=0)
def __init__(self):
self.auth_token = (hexlify(os.urandom(4))).decode('utf-8')
self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from now
def __repr__(self):
return '<Token %r>' % self.id
# Add missing tables during migration of database
def add_missing_tables(engine, session):
if not engine.dialect.has_table(engine.connect(), "book_read_link"):
ReadBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "bookmark"):
Bookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_reading_state"):
KoboReadingState.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_bookmark"):
KoboBookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_statistics"):
KoboStatistics.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "archived_book"):
ArchivedBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "registration"):
Registration.__table__.create(bind=engine)
with engine.connect() as conn:
conn.execute("insert into registration (domain, allow) values('%.%',1)")
session.commit()
# migrate all settings missing in registration table
def migrate_registration_table(engine, session):
try:
session.query(exists().where(Registration.allow)).scalar()
session.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
conn.execute("update registration set 'allow' = 1")
session.commit()
try:
# Handle table exists, but no content
cnt = session.query(Registration).count()
if not cnt:
with engine.connect() as conn:
conn.execute("insert into registration (domain, allow) values('%.%',1)")
session.commit()
except exc.OperationalError: # Database is not writeable
print('Settings database is not writeable. Exiting...')
sys.exit(2)
# Remove login capability of user Guest
def migrate_guest_password(engine):
try:
with engine.connect() as conn:
trans = conn.begin()
conn.execute(text("UPDATE user SET password='' where name = 'Guest' and password !=''"))
trans.commit()
except exc.OperationalError:
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def migrate_shelfs(engine, session):
try:
session.query(exists().where(Shelf.uuid)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
conn.execute("ALTER TABLE shelf ADD column 'created' DATETIME")
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
for shelf in session.query(Shelf).all():
shelf.uuid = str(uuid.uuid4())
shelf.created = datetime.datetime.now()
shelf.last_modified = datetime.datetime.now()
for book_shelf in session.query(BookShelf).all():
book_shelf.date_added = datetime.datetime.now()
session.commit()
try:
session.query(exists().where(Shelf.kobo_sync)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
session.commit()
try:
session.query(exists().where(BookShelf.order)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
session.commit()
def migrate_readBook(engine, session):
try:
session.query(exists().where(ReadBook.read_status)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
session.commit()
test = session.query(ReadBook).filter(ReadBook.last_modified == None).all()
for book in test:
book.last_modified = datetime.datetime.utcnow()
session.commit()
def migrate_remoteAuthToken(engine, session):
try:
session.query(exists().where(RemoteAuthToken.token_type)).scalar()
session.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
conn.execute("update remote_auth_token set 'token_type' = 0")
session.commit()
# Migrate database to current version, has to be updated after every database change. Currently migration from
# everywhere to current should work. Migration is done by checking if relevant columns are existing, and than adding
# rows with SQL commands
def migrate_Database(session):
engine = session.bind
add_missing_tables(engine, session)
migrate_registration_table(engine, session)
migrate_readBook(engine, session)
migrate_remoteAuthToken(engine, session)
migrate_shelfs(engine, session)
try:
create = False
session.query(exists().where(User.sidebar_view)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
session.commit()
create = True
try:
if create:
with engine.connect() as conn:
conn.execute("SELECT language_books FROM user")
session.commit()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)",
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
session.commit()
try:
session.query(exists().where(User.denied_tags)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''")
session.commit()
try:
session.query(exists().where(User.view_settings)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'")
session.commit()
try:
session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
except exc.OperationalError:
with engine.connect() as conn:
conn.execute("ALTER TABLE user ADD column `kobo_only_shelves_sync` SMALLINT DEFAULT 0")
session.commit()
try:
# check if name is in User table instead of nickname
session.query(exists().where(User.name)).scalar()
except exc.OperationalError:
# Create new table user_id and copy contents of table user into it
with engine.connect() as conn:
conn.execute(text("CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"name VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"denied_tags VARCHAR,"
"allowed_tags VARCHAR,"
"denied_column_value VARCHAR,"
"allowed_column_value VARCHAR,"
"view_settings JSON,"
"kobo_only_shelves_sync SMALLINT,"
"UNIQUE (name),"
"UNIQUE (email))"))
conn.execute(text("INSERT INTO user_id(id, name, email, role, password, kindle_mail,locale,"
"sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
"allowed_column_value, view_settings, kobo_only_shelves_sync)"
"SELECT id, nickname, email, role, password, kindle_mail, locale,"
"sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
"allowed_column_value, view_settings, kobo_only_shelves_sync FROM user"))
# delete old user table and rename new user_id table to user:
conn.execute(text("DROP TABLE user"))
conn.execute(text("ALTER TABLE user_id RENAME TO user"))
session.commit()
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
is None:
create_anonymous_user(session)
migrate_guest_password(engine)
def clean_database(session):
# Remove expired remote login tokens
now = datetime.datetime.now()
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
filter(RemoteAuthToken.token_type != 1).delete()
session.commit()
# Save downloaded books per user in calibre-web's own database
def update_download(book_id, user_id):
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id == book_id).first()
if not check:
new_download = Downloads(user_id=user_id, book_id=book_id)
session.add(new_download)
try:
session.commit()
except exc.OperationalError:
session.rollback()
# Delete non exisiting downloaded books in calibre-web's own database
def delete_download(book_id):
session.query(Downloads).filter(book_id == Downloads.book_id).delete()
try:
session.commit()
except exc.OperationalError:
session.rollback()
# Generate user Guest (translated text), as anonymous user, no rights
def create_anonymous_user(session):
user = User()
user.name = "Guest"
user.email = 'no@email'
user.role = constants.ROLE_ANONYMOUS
user.password = ''
session.add(user)
try:
session.commit()
except Exception:
session.rollback()
# Generate User admin with admin123 password, and access to everything
def create_admin_user(session):
user = User()
user.name = "admin"
user.role = constants.ADMIN_USER_ROLES
user.sidebar_view = constants.ADMIN_USER_SIDEBAR
user.password = generate_password_hash(constants.DEFAULT_PASSWORD)
session.add(user)
try:
session.commit()
except Exception:
session.rollback()
def init_db(app_db_path):
# Open session for database connection
global session
global app_DB_path
app_DB_path = app_db_path
engine = create_engine(u'sqlite:///{0}'.format(app_db_path), echo=False)
Session = scoped_session(sessionmaker())
Session.configure(bind=engine)
session = Session()
if os.path.exists(app_db_path):
Base.metadata.create_all(engine)
migrate_Database(session)
clean_database(session)
else:
Base.metadata.create_all(engine)
create_admin_user(session)
create_anonymous_user(session)
if cli.user_credentials:
username, password = cli.user_credentials.split(':', 1)
user = session.query(User).filter(func.lower(User.name) == username.lower()).first()
if user:
if not password:
print("Empty password is not allowed")
sys.exit(4)
user.password = generate_password_hash(password)
if session_commit() == "":
print("Password for user '{}' changed".format(username))
sys.exit(0)
else:
print("Failed changing password")
sys.exit(3)
else:
print("Username '{}' not valid, can't change password".format(username))
sys.exit(3)
def dispose():
global session
old_session = session
session = None
if old_session:
try:
old_session.close()
except Exception:
pass
if old_session.bind:
try:
old_session.bind.dispose()
except Exception:
pass
def session_commit(success=None):
try:
session.commit()
if success:
log.info(success)
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.debug_or_exception(e)
return ""
| xss | {
"code": [
" log.info(\"Login and store session : \" + flask_session.get('_id', \"\"))",
" log.info(\"Found stored session : \" + flask_session.get('_id', \"\"))",
" log.info(\"Deleted session_key : \" + session_key)"
],
"line_no": [
76,
78,
87
]
} | {
"code": [
" log.debug(\"Login and store session : \" + flask_session.get('_id', \"\"))",
" log.debug(\"Found stored session: \" + flask_session.get('_id', \"\"))",
" log.debug(\"Deleted session_key: \" + session_key)"
],
"line_no": [
76,
78,
87
]
} |
from __future__ import division, print_function, unicode_literals
import os
import sys
import datetime
import itertools
import .uuid
from flask import .session as VAR_92
from binascii import hexlify
from flask_login import .AnonymousUserMixin, VAR_18
from flask_login import .user_logged_in
from contextlib import contextmanager
try:
from flask_dance.consumer.backend.sqla import .OAuthConsumerMixin
VAR_16 = True
except ImportError as e:
try:
from flask_dance.consumer.storage.sqla import .OAuthConsumerMixin
VAR_16 = True
except ImportError as e:
VAR_16 = False
from sqlalchemy import .create_engine, exc, exists, event, text
from sqlalchemy import Column, ForeignKey
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.expression import func
try:
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, sessionmaker, VAR_83, scoped_session
from werkzeug.security import generate_password_hash
from . import constants, logger, cli
VAR_0 = logger.create()
VAR_1 = None
VAR_2 = None
VAR_3 = declarative_base()
VAR_4 = {}
def FUNC_0(VAR_5, VAR_6):
FUNC_1()
def FUNC_1():
if VAR_92.get('_user_id', ""):
try:
if not FUNC_3(VAR_92.get('_user_id', ""), VAR_92.get('_id', "")):
VAR_94 = CLASS_4(VAR_92.get('_user_id', ""), VAR_92.get('_id', ""))
VAR_1.add(VAR_94)
VAR_1.commit()
VAR_0.info("Login and store VAR_1 : " + VAR_92.get('_id', ""))
else:
VAR_0.info("Found stored VAR_1 : " + VAR_92.get('_id', ""))
except (exc.OperationalError, exc.InvalidRequestError) as e:
VAR_1.rollback()
VAR_0.exception(e)
else:
VAR_0.error("No VAR_6 VAR_23 in session")
def FUNC_2(VAR_7, VAR_8):
try:
VAR_0.info("Deleted VAR_8 : " + VAR_8)
VAR_1.query(CLASS_4).filter(CLASS_4.user_id==VAR_7,
CLASS_4.session_key==VAR_8).delete()
VAR_1.commit()
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
VAR_0.exception(e)
def FUNC_3(VAR_7, VAR_8):
try:
return bool(VAR_1.query(CLASS_4).filter(CLASS_4.user_id==VAR_7,
CLASS_4.session_key==VAR_8).one_or_none())
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
VAR_0.exception(e)
user_logged_in.connect(FUNC_0)
def FUNC_4(VAR_9):
VAR_17 = list()
for element in VAR_9:
VAR_17.append(element.id)
VAR_4[VAR_18.id] = VAR_17
class CLASS_0:
@property
def FUNC_21(self):
return self.is_active
def FUNC_22(self, VAR_19):
return constants.has_flag(self.role, VAR_19)
def FUNC_23(self):
return self._has_role(constants.ROLE_ADMIN)
def FUNC_24(self):
return self._has_role(constants.ROLE_DOWNLOAD)
def FUNC_25(self):
return self._has_role(constants.ROLE_UPLOAD)
def FUNC_26(self):
return self._has_role(constants.ROLE_EDIT)
def FUNC_27(self):
return self._has_role(constants.ROLE_PASSWD)
def FUNC_28(self):
return self._has_role(constants.ROLE_ANONYMOUS)
def FUNC_29(self):
return self._has_role(constants.ROLE_EDIT_SHELFS)
def FUNC_30(self):
return self._has_role(constants.ROLE_DELETE_BOOKS)
def FUNC_31(self):
return self._has_role(constants.ROLE_VIEWER)
@property
def FUNC_32(self):
return True
@property
def FUNC_33(self):
return self.role_anonymous()
def FUNC_34(self):
return str(self.id)
def FUNC_35(self):
return self.default_language
def FUNC_36(self, VAR_20):
if VAR_20 == constants.SIDEBAR_RECENT:
return True
return constants.has_flag(self.sidebar_view, VAR_20)
def FUNC_37(self):
return self.check_visibility(constants.DETAIL_RANDOM)
def FUNC_38(self):
VAR_85 = self.denied_tags or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_39(self):
VAR_85 = self.allowed_tags or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_40(self):
VAR_85 = self.denied_column_value or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_41(self):
VAR_85 = self.allowed_column_value or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_42(self, VAR_21, VAR_22):
if not self.view_settings.get(VAR_21):
return None
return self.view_settings[VAR_21].get(VAR_22)
def FUNC_43(self, VAR_21, VAR_22, VAR_20):
if not self.view_settings.get(VAR_21):
self.view_settings[VAR_21] = dict()
self.view_settings[VAR_21][VAR_22] = VAR_20
try:
flag_modified(self, "view_settings")
except AttributeError:
pass
try:
VAR_1.commit()
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
def __repr__(self):
return '<CLASS_1 %r>' % self.name
class CLASS_1(CLASS_0, VAR_3):
__tablename__ = 'user'
__table_args__ = {'sqlite_autoincrement': True}
VAR_23 = Column(Integer, primary_key=True)
VAR_24 = Column(String(64), unique=True)
VAR_25 = Column(String(120), unique=True, default="")
VAR_26 = Column(SmallInteger, default=constants.ROLE_USER)
VAR_27 = Column(String)
VAR_28 = Column(String(120), default="")
VAR_29 = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
VAR_30 = relationship('Downloads', backref='user', lazy='dynamic')
VAR_31 = Column(String(2), default="en")
VAR_32 = Column(Integer, default=1)
VAR_33 = Column(String(3), default="all")
VAR_34 = Column(String, default="")
VAR_35 = Column(String, default="")
VAR_36 = Column(String, default="")
VAR_37 = Column(String, default="")
VAR_38 = relationship('RemoteAuthToken', backref='user', lazy='dynamic')
VAR_39 = Column(JSON, default={})
VAR_40 = Column(Integer, default=0)
if VAR_16:
class CLASS_17(OAuthConsumerMixin, VAR_3):
VAR_86 = Column(String(256))
VAR_7 = Column(Integer, ForeignKey(CLASS_1.id))
VAR_6 = relationship(CLASS_1)
class CLASS_2(VAR_3):
__tablename__ = 'oauthProvider'
VAR_23 = Column(Integer, primary_key=True)
VAR_41 = Column(String)
VAR_42 = Column(String)
VAR_43 = Column(String)
VAR_44 = Column(Boolean)
class CLASS_3(AnonymousUserMixin, CLASS_0):
def __init__(self):
self.loadSettings()
def FUNC_44(self):
VAR_87 = VAR_1.query(CLASS_1).filter(CLASS_1.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
.first() # type: CLASS_1
self.name = VAR_87.name
self.role = VAR_87.role
self.id=VAR_87.id
self.sidebar_view = VAR_87.sidebar_view
self.default_language = VAR_87.default_language
self.locale = VAR_87.locale
self.kindle_mail = VAR_87.kindle_mail
self.denied_tags = VAR_87.denied_tags
self.allowed_tags = VAR_87.allowed_tags
self.denied_column_value = VAR_87.denied_column_value
self.allowed_column_value = VAR_87.allowed_column_value
self.view_settings = VAR_87.view_settings
self.kobo_only_shelves_sync = VAR_87.kobo_only_shelves_sync
def FUNC_23(self):
return False
@property
def FUNC_32(self):
return False
@property
def FUNC_33(self):
return True
@property
def FUNC_21(self):
return False
def FUNC_42(self, VAR_21, VAR_22):
if 'view' in VAR_92:
if not VAR_92['view'].get(VAR_21):
return None
return VAR_92['view'][VAR_21].get(VAR_22)
return None
def FUNC_43(self, VAR_21, VAR_22, VAR_20):
if 'view' in VAR_92:
if not VAR_92['view'].get(VAR_21):
VAR_92['view'][VAR_21] = dict()
VAR_92['view'][VAR_21][VAR_22] = VAR_20
return None
class CLASS_4(VAR_3):
__tablename__ = 'user_session'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_8 = Column(String, default="")
def __init__(self, VAR_7, VAR_8):
self.user_id = VAR_7
self.session_key = VAR_8
class CLASS_5(VAR_3):
__tablename__ = 'shelf'
VAR_23 = Column(Integer, primary_key=True)
VAR_45 = Column(String, default=lambda: str(VAR_45.uuid4()))
VAR_24 = Column(String)
VAR_46 = Column(Integer, default=0)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_47 = Column(Boolean, default=False)
VAR_48 = relationship("BookShelf", backref="ub_shelf", cascade="all, delete-orphan", lazy="dynamic")
VAR_49 = Column(DateTime, default=datetime.datetime.utcnow)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def __repr__(self):
return '<CLASS_5 %d:%r>' % (self.id, self.name)
class CLASS_6(VAR_3):
__tablename__ = 'book_shelf_link'
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer)
VAR_51 = Column(Integer)
VAR_29 = Column(Integer, ForeignKey('shelf.id'))
VAR_52 = Column(DateTime, default=datetime.datetime.utcnow)
def __repr__(self):
return '<Book %r>' % self.id
class CLASS_7(VAR_3):
__tablename__ = 'shelf_archive'
VAR_23 = Column(Integer, primary_key=True)
VAR_45 = Column(String)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow)
class CLASS_8(VAR_3):
__tablename__ = 'book_read_link'
VAR_53 = 0
VAR_54 = 1
VAR_55 = 2
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer, unique=False)
VAR_7 = Column(Integer, ForeignKey('user.id'), unique=False)
VAR_56 = Column(Integer, unique=False, default=VAR_53, nullable=False)
VAR_57 = relationship("KoboReadingState", uselist=False,
primaryjoin="and_(CLASS_8.user_id == foreign(CLASS_11.user_id), "
"ReadBook.book_id == foreign(CLASS_11.book_id))",
cascade="all",
backref=backref("book_read_link",
uselist=False))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_58 = Column(DateTime, nullable=True)
VAR_59 = Column(Integer, default=0, nullable=False)
class CLASS_9(VAR_3):
__tablename__ = 'bookmark'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_60 = Column(String(collation='NOCASE'))
VAR_61 = Column(String)
class CLASS_10(VAR_3):
__tablename__ = 'archived_book'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_62 = Column(Boolean, unique=False)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow)
class CLASS_11(VAR_3):
__tablename__ = 'kobo_reading_state'
VAR_23 = Column(Integer, primary_key=True, autoincrement=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_63 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_64 = relationship("KoboBookmark", uselist=False, backref="kobo_reading_state", cascade="all")
VAR_65 = relationship("KoboStatistics", uselist=False, backref="kobo_reading_state", cascade="all")
class CLASS_12(VAR_3):
__tablename__ = 'kobo_bookmark'
VAR_23 = Column(Integer, primary_key=True)
VAR_66 = Column(Integer, ForeignKey('kobo_reading_state.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_67 = Column(String)
VAR_68 = Column(String)
VAR_69 = Column(String)
VAR_70 = Column(Float)
VAR_71 = Column(Float)
class CLASS_13(VAR_3):
__tablename__ = 'kobo_statistics'
VAR_23 = Column(Integer, primary_key=True)
VAR_66 = Column(Integer, ForeignKey('kobo_reading_state.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_72 = Column(Integer)
VAR_73 = Column(Integer)
@event.listens_for(VAR_83, 'before_flush')
def FUNC_5(VAR_1, VAR_10, VAR_11):
for change in itertools.chain(VAR_1.new, VAR_1.dirty):
if isinstance(change, (CLASS_8, CLASS_13, CLASS_12)):
if change.kobo_reading_state:
change.kobo_reading_state.last_modified = datetime.datetime.utcnow()
for change in itertools.chain(VAR_1.new, VAR_1.deleted):
if isinstance(change, CLASS_6):
change.ub_shelf.last_modified = datetime.datetime.utcnow()
class CLASS_14(VAR_3):
__tablename__ = 'downloads'
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer)
VAR_7 = Column(Integer, ForeignKey('user.id'))
def __repr__(self):
return '<Download %r' % self.book_id
class CLASS_15(VAR_3):
__tablename__ = 'registration'
VAR_23 = Column(Integer, primary_key=True)
VAR_74 = Column(String)
VAR_75 = Column(Integer)
def __repr__(self):
return u"<CLASS_15('{0}')>".format(self.domain)
class CLASS_16(VAR_3):
__tablename__ = 'remote_auth_token'
VAR_23 = Column(Integer, primary_key=True)
VAR_76 = Column(String, unique=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_77 = Column(Boolean, default=False)
VAR_78 = Column(DateTime)
VAR_79 = Column(Integer, default=0)
def __init__(self):
self.auth_token = (hexlify(os.urandom(4))).decode('utf-8')
self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from VAR_81
def __repr__(self):
return '<Token %r>' % self.id
def FUNC_6(VAR_12, VAR_1):
if not VAR_12.dialect.has_table(VAR_12.connect(), "book_read_link"):
CLASS_8.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "bookmark"):
CLASS_9.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_reading_state"):
CLASS_11.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_bookmark"):
CLASS_12.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_statistics"):
CLASS_13.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "archived_book"):
CLASS_10.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "registration"):
CLASS_15.__table__.create(bind=VAR_12)
with VAR_12.connect() as conn:
conn.execute("insert into registration (VAR_74, VAR_75) values('%.%',1)")
VAR_1.commit()
def FUNC_7(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_15.allow)).scalar()
VAR_1.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
conn.execute("update registration set 'allow' = 1")
VAR_1.commit()
try:
VAR_88 = VAR_1.query(CLASS_15).count()
if not VAR_88:
with VAR_12.connect() as conn:
conn.execute("insert into registration (VAR_74, VAR_75) values('%.%',1)")
VAR_1.commit()
except exc.OperationalError: # Database is not writeable
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def FUNC_8(VAR_12):
try:
with VAR_12.connect() as conn:
VAR_93 = conn.begin()
conn.execute(text("UPDATE VAR_6 SET VAR_27='' where VAR_24 = 'Guest' and VAR_27 !=''"))
VAR_93.commit()
except exc.OperationalError:
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def FUNC_9(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_5.uuid)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_29 ADD column 'uuid' STRING")
conn.execute("ALTER TABLE VAR_29 ADD column 'created' DATETIME")
conn.execute("ALTER TABLE VAR_29 ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
conn.execute("ALTER TABLE VAR_29 ADD column 'kobo_sync' BOOLEAN DEFAULT false")
for VAR_29 in VAR_1.query(CLASS_5).all():
VAR_29.uuid = str(VAR_45.uuid4())
VAR_29.created = datetime.datetime.now()
VAR_29.last_modified = datetime.datetime.now()
for book_shelf in VAR_1.query(CLASS_6).all():
book_shelf.date_added = datetime.datetime.now()
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_5.kobo_sync)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_29 ADD column 'kobo_sync' BOOLEAN DEFAULT false")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_6.order)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
VAR_1.commit()
def FUNC_10(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_8.read_status)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
VAR_1.commit()
VAR_80 = VAR_1.query(CLASS_8).filter(CLASS_8.last_modified == None).all()
for book in VAR_80:
book.last_modified = datetime.datetime.utcnow()
VAR_1.commit()
def FUNC_11(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_16.token_type)).scalar()
VAR_1.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_38 ADD column 'token_type' INTEGER DEFAULT 0")
conn.execute("update VAR_38 set 'token_type' = 0")
VAR_1.commit()
def FUNC_12(VAR_1):
VAR_12 = VAR_1.bind
FUNC_6(VAR_12, VAR_1)
FUNC_7(VAR_12, VAR_1)
FUNC_10(VAR_12, VAR_1)
FUNC_11(VAR_12, VAR_1)
FUNC_9(VAR_12, VAR_1)
try:
VAR_89 = False
VAR_1.query(exists().where(CLASS_1.sidebar_view)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_32` Integer DEFAULT 1")
VAR_1.commit()
VAR_89 = True
try:
if VAR_89:
with VAR_12.connect() as conn:
conn.execute("SELECT language_books FROM user")
VAR_1.commit()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("UPDATE VAR_6 SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)",
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.denied_tags)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_34` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_35` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_36` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_37` String DEFAULT ''")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.view_settings)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_39` VARCHAR(10) DEFAULT '{}'")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.kobo_only_shelves_sync)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_40` SMALLINT DEFAULT 0")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.name)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute(text("CREATE TABLE VAR_7 (VAR_23 INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"name VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"denied_tags VARCHAR,"
"allowed_tags VARCHAR,"
"denied_column_value VARCHAR,"
"allowed_column_value VARCHAR,"
"view_settings JSON,"
"kobo_only_shelves_sync SMALLINT,"
"UNIQUE (VAR_24),"
"UNIQUE (VAR_25))"))
conn.execute(text("INSERT INTO VAR_7(VAR_23, VAR_24, VAR_25, VAR_26, VAR_27, VAR_28,VAR_31,"
"sidebar_view, VAR_33, VAR_34, VAR_35, VAR_36, "
"allowed_column_value, VAR_39, VAR_40)"
"SELECT VAR_23, nickname, VAR_25, VAR_26, VAR_27, VAR_28, VAR_31,"
"sidebar_view, VAR_33, VAR_34, VAR_35, VAR_36, "
"allowed_column_value, VAR_39, VAR_40 FROM user"))
conn.execute(text("DROP TABLE user"))
conn.execute(text("ALTER TABLE VAR_7 RENAME TO user"))
VAR_1.commit()
if VAR_1.query(CLASS_1).filter(CLASS_1.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
is None:
FUNC_16(VAR_1)
FUNC_8(VAR_12)
def FUNC_13(VAR_1):
VAR_81 = datetime.datetime.now()
VAR_1.query(CLASS_16).filter(VAR_81 > CLASS_16.expiration).\
filter(CLASS_16.token_type != 1).delete()
VAR_1.commit()
def FUNC_14(VAR_13, VAR_7):
VAR_82 = VAR_1.query(CLASS_14).filter(CLASS_14.user_id == VAR_7).filter(CLASS_14.book_id == VAR_13).first()
if not VAR_82:
VAR_90 = CLASS_14(VAR_7=user_id, VAR_13=book_id)
VAR_1.add(VAR_90)
try:
VAR_1.commit()
except exc.OperationalError:
VAR_1.rollback()
def FUNC_15(VAR_13):
VAR_1.query(CLASS_14).filter(VAR_13 == CLASS_14.book_id).delete()
try:
VAR_1.commit()
except exc.OperationalError:
VAR_1.rollback()
def FUNC_16(VAR_1):
VAR_6 = CLASS_1()
VAR_6.name = "Guest"
VAR_6.email = 'no@email'
VAR_6.role = constants.ROLE_ANONYMOUS
VAR_6.password = ''
VAR_1.add(VAR_6)
try:
VAR_1.commit()
except Exception:
VAR_1.rollback()
def FUNC_17(VAR_1):
VAR_6 = CLASS_1()
VAR_6.name = "admin"
VAR_6.role = constants.ADMIN_USER_ROLES
VAR_6.sidebar_view = constants.ADMIN_USER_SIDEBAR
VAR_6.password = generate_password_hash(constants.DEFAULT_PASSWORD)
VAR_1.add(VAR_6)
try:
VAR_1.commit()
except Exception:
VAR_1.rollback()
def FUNC_18(VAR_14):
global VAR_1
global VAR_2
app_DB_path = VAR_14
VAR_12 = create_engine(u'sqlite:///{0}'.format(VAR_14), echo=False)
VAR_83 = scoped_session(sessionmaker())
VAR_83.configure(bind=VAR_12)
VAR_1 = VAR_83()
if os.path.exists(VAR_14):
VAR_3.metadata.create_all(VAR_12)
FUNC_12(VAR_1)
FUNC_13(VAR_1)
else:
VAR_3.metadata.create_all(VAR_12)
FUNC_17(VAR_1)
FUNC_16(VAR_1)
if cli.user_credentials:
VAR_91, VAR_27 = cli.user_credentials.split(':', 1)
VAR_6 = VAR_1.query(CLASS_1).filter(func.lower(CLASS_1.name) == VAR_91.lower()).first()
if VAR_6:
if not VAR_27:
print("Empty VAR_27 is not allowed")
sys.exit(4)
VAR_6.password = generate_password_hash(VAR_27)
if FUNC_20() == "":
print("Password for VAR_6 '{}' changed".format(VAR_91))
sys.exit(0)
else:
print("Failed changing password")
sys.exit(3)
else:
print("Username '{}' not valid, can't change password".format(VAR_91))
sys.exit(3)
def FUNC_19():
global VAR_1
VAR_84 = VAR_1
VAR_1 = None
if VAR_84:
try:
VAR_84.close()
except Exception:
pass
if VAR_84.bind:
try:
VAR_84.bind.dispose()
except Exception:
pass
def FUNC_20(VAR_15=None):
try:
VAR_1.commit()
if VAR_15:
VAR_0.info(VAR_15)
except (exc.OperationalError, exc.InvalidRequestError) as e:
VAR_1.rollback()
VAR_0.debug_or_exception(e)
return ""
|
from __future__ import division, print_function, unicode_literals
import os
import sys
import datetime
import itertools
import .uuid
from flask import .session as VAR_92
from binascii import hexlify
from flask_login import .AnonymousUserMixin, VAR_18
from flask_login import .user_logged_in
from contextlib import contextmanager
try:
from flask_dance.consumer.backend.sqla import .OAuthConsumerMixin
VAR_16 = True
except ImportError as e:
try:
from flask_dance.consumer.storage.sqla import .OAuthConsumerMixin
VAR_16 = True
except ImportError as e:
VAR_16 = False
from sqlalchemy import .create_engine, exc, exists, event, text
from sqlalchemy import Column, ForeignKey
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.expression import func
try:
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, sessionmaker, VAR_83, scoped_session
from werkzeug.security import generate_password_hash
from . import constants, logger, cli
VAR_0 = logger.create()
VAR_1 = None
VAR_2 = None
VAR_3 = declarative_base()
VAR_4 = {}
def FUNC_0(VAR_5, VAR_6):
FUNC_1()
def FUNC_1():
if VAR_92.get('_user_id', ""):
try:
if not FUNC_3(VAR_92.get('_user_id', ""), VAR_92.get('_id', "")):
VAR_94 = CLASS_4(VAR_92.get('_user_id', ""), VAR_92.get('_id', ""))
VAR_1.add(VAR_94)
VAR_1.commit()
VAR_0.debug("Login and store VAR_1 : " + VAR_92.get('_id', ""))
else:
VAR_0.debug("Found stored VAR_1: " + VAR_92.get('_id', ""))
except (exc.OperationalError, exc.InvalidRequestError) as e:
VAR_1.rollback()
VAR_0.exception(e)
else:
VAR_0.error("No VAR_6 VAR_23 in session")
def FUNC_2(VAR_7, VAR_8):
try:
VAR_0.debug("Deleted VAR_8: " + VAR_8)
VAR_1.query(CLASS_4).filter(CLASS_4.user_id==VAR_7,
CLASS_4.session_key==VAR_8).delete()
VAR_1.commit()
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
VAR_0.exception(e)
def FUNC_3(VAR_7, VAR_8):
try:
return bool(VAR_1.query(CLASS_4).filter(CLASS_4.user_id==VAR_7,
CLASS_4.session_key==VAR_8).one_or_none())
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
VAR_0.exception(e)
user_logged_in.connect(FUNC_0)
def FUNC_4(VAR_9):
VAR_17 = list()
for element in VAR_9:
VAR_17.append(element.id)
VAR_4[VAR_18.id] = VAR_17
class CLASS_0:
@property
def FUNC_21(self):
return self.is_active
def FUNC_22(self, VAR_19):
return constants.has_flag(self.role, VAR_19)
def FUNC_23(self):
return self._has_role(constants.ROLE_ADMIN)
def FUNC_24(self):
return self._has_role(constants.ROLE_DOWNLOAD)
def FUNC_25(self):
return self._has_role(constants.ROLE_UPLOAD)
def FUNC_26(self):
return self._has_role(constants.ROLE_EDIT)
def FUNC_27(self):
return self._has_role(constants.ROLE_PASSWD)
def FUNC_28(self):
return self._has_role(constants.ROLE_ANONYMOUS)
def FUNC_29(self):
return self._has_role(constants.ROLE_EDIT_SHELFS)
def FUNC_30(self):
return self._has_role(constants.ROLE_DELETE_BOOKS)
def FUNC_31(self):
return self._has_role(constants.ROLE_VIEWER)
@property
def FUNC_32(self):
return True
@property
def FUNC_33(self):
return self.role_anonymous()
def FUNC_34(self):
return str(self.id)
def FUNC_35(self):
return self.default_language
def FUNC_36(self, VAR_20):
if VAR_20 == constants.SIDEBAR_RECENT:
return True
return constants.has_flag(self.sidebar_view, VAR_20)
def FUNC_37(self):
return self.check_visibility(constants.DETAIL_RANDOM)
def FUNC_38(self):
VAR_85 = self.denied_tags or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_39(self):
VAR_85 = self.allowed_tags or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_40(self):
VAR_85 = self.denied_column_value or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_41(self):
VAR_85 = self.allowed_column_value or ""
return [t.strip() for t in VAR_85.split(",")]
def FUNC_42(self, VAR_21, VAR_22):
if not self.view_settings.get(VAR_21):
return None
return self.view_settings[VAR_21].get(VAR_22)
def FUNC_43(self, VAR_21, VAR_22, VAR_20):
if not self.view_settings.get(VAR_21):
self.view_settings[VAR_21] = dict()
self.view_settings[VAR_21][VAR_22] = VAR_20
try:
flag_modified(self, "view_settings")
except AttributeError:
pass
try:
VAR_1.commit()
except (exc.OperationalError, exc.InvalidRequestError):
VAR_1.rollback()
def __repr__(self):
return '<CLASS_1 %r>' % self.name
class CLASS_1(CLASS_0, VAR_3):
__tablename__ = 'user'
__table_args__ = {'sqlite_autoincrement': True}
VAR_23 = Column(Integer, primary_key=True)
VAR_24 = Column(String(64), unique=True)
VAR_25 = Column(String(120), unique=True, default="")
VAR_26 = Column(SmallInteger, default=constants.ROLE_USER)
VAR_27 = Column(String)
VAR_28 = Column(String(120), default="")
VAR_29 = relationship('Shelf', backref='user', lazy='dynamic', order_by='Shelf.name')
VAR_30 = relationship('Downloads', backref='user', lazy='dynamic')
VAR_31 = Column(String(2), default="en")
VAR_32 = Column(Integer, default=1)
VAR_33 = Column(String(3), default="all")
VAR_34 = Column(String, default="")
VAR_35 = Column(String, default="")
VAR_36 = Column(String, default="")
VAR_37 = Column(String, default="")
VAR_38 = relationship('RemoteAuthToken', backref='user', lazy='dynamic')
VAR_39 = Column(JSON, default={})
VAR_40 = Column(Integer, default=0)
if VAR_16:
class CLASS_17(OAuthConsumerMixin, VAR_3):
VAR_86 = Column(String(256))
VAR_7 = Column(Integer, ForeignKey(CLASS_1.id))
VAR_6 = relationship(CLASS_1)
class CLASS_2(VAR_3):
__tablename__ = 'oauthProvider'
VAR_23 = Column(Integer, primary_key=True)
VAR_41 = Column(String)
VAR_42 = Column(String)
VAR_43 = Column(String)
VAR_44 = Column(Boolean)
class CLASS_3(AnonymousUserMixin, CLASS_0):
def __init__(self):
self.loadSettings()
def FUNC_44(self):
VAR_87 = VAR_1.query(CLASS_1).filter(CLASS_1.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
.first() # type: CLASS_1
self.name = VAR_87.name
self.role = VAR_87.role
self.id=VAR_87.id
self.sidebar_view = VAR_87.sidebar_view
self.default_language = VAR_87.default_language
self.locale = VAR_87.locale
self.kindle_mail = VAR_87.kindle_mail
self.denied_tags = VAR_87.denied_tags
self.allowed_tags = VAR_87.allowed_tags
self.denied_column_value = VAR_87.denied_column_value
self.allowed_column_value = VAR_87.allowed_column_value
self.view_settings = VAR_87.view_settings
self.kobo_only_shelves_sync = VAR_87.kobo_only_shelves_sync
def FUNC_23(self):
return False
@property
def FUNC_32(self):
return False
@property
def FUNC_33(self):
return True
@property
def FUNC_21(self):
return False
def FUNC_42(self, VAR_21, VAR_22):
if 'view' in VAR_92:
if not VAR_92['view'].get(VAR_21):
return None
return VAR_92['view'][VAR_21].get(VAR_22)
return None
def FUNC_43(self, VAR_21, VAR_22, VAR_20):
if 'view' in VAR_92:
if not VAR_92['view'].get(VAR_21):
VAR_92['view'][VAR_21] = dict()
VAR_92['view'][VAR_21][VAR_22] = VAR_20
return None
class CLASS_4(VAR_3):
__tablename__ = 'user_session'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_8 = Column(String, default="")
def __init__(self, VAR_7, VAR_8):
self.user_id = VAR_7
self.session_key = VAR_8
class CLASS_5(VAR_3):
__tablename__ = 'shelf'
VAR_23 = Column(Integer, primary_key=True)
VAR_45 = Column(String, default=lambda: str(VAR_45.uuid4()))
VAR_24 = Column(String)
VAR_46 = Column(Integer, default=0)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_47 = Column(Boolean, default=False)
VAR_48 = relationship("BookShelf", backref="ub_shelf", cascade="all, delete-orphan", lazy="dynamic")
VAR_49 = Column(DateTime, default=datetime.datetime.utcnow)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def __repr__(self):
return '<CLASS_5 %d:%r>' % (self.id, self.name)
class CLASS_6(VAR_3):
__tablename__ = 'book_shelf_link'
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer)
VAR_51 = Column(Integer)
VAR_29 = Column(Integer, ForeignKey('shelf.id'))
VAR_52 = Column(DateTime, default=datetime.datetime.utcnow)
def __repr__(self):
return '<Book %r>' % self.id
class CLASS_7(VAR_3):
__tablename__ = 'shelf_archive'
VAR_23 = Column(Integer, primary_key=True)
VAR_45 = Column(String)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow)
class CLASS_8(VAR_3):
__tablename__ = 'book_read_link'
VAR_53 = 0
VAR_54 = 1
VAR_55 = 2
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer, unique=False)
VAR_7 = Column(Integer, ForeignKey('user.id'), unique=False)
VAR_56 = Column(Integer, unique=False, default=VAR_53, nullable=False)
VAR_57 = relationship("KoboReadingState", uselist=False,
primaryjoin="and_(CLASS_8.user_id == foreign(CLASS_11.user_id), "
"ReadBook.book_id == foreign(CLASS_11.book_id))",
cascade="all",
backref=backref("book_read_link",
uselist=False))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_58 = Column(DateTime, nullable=True)
VAR_59 = Column(Integer, default=0, nullable=False)
class CLASS_9(VAR_3):
__tablename__ = 'bookmark'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_60 = Column(String(collation='NOCASE'))
VAR_61 = Column(String)
class CLASS_10(VAR_3):
__tablename__ = 'archived_book'
VAR_23 = Column(Integer, primary_key=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_62 = Column(Boolean, unique=False)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow)
class CLASS_11(VAR_3):
__tablename__ = 'kobo_reading_state'
VAR_23 = Column(Integer, primary_key=True, autoincrement=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_13 = Column(Integer)
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_63 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_64 = relationship("KoboBookmark", uselist=False, backref="kobo_reading_state", cascade="all")
VAR_65 = relationship("KoboStatistics", uselist=False, backref="kobo_reading_state", cascade="all")
class CLASS_12(VAR_3):
__tablename__ = 'kobo_bookmark'
VAR_23 = Column(Integer, primary_key=True)
VAR_66 = Column(Integer, ForeignKey('kobo_reading_state.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_67 = Column(String)
VAR_68 = Column(String)
VAR_69 = Column(String)
VAR_70 = Column(Float)
VAR_71 = Column(Float)
class CLASS_13(VAR_3):
__tablename__ = 'kobo_statistics'
VAR_23 = Column(Integer, primary_key=True)
VAR_66 = Column(Integer, ForeignKey('kobo_reading_state.id'))
VAR_50 = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
VAR_72 = Column(Integer)
VAR_73 = Column(Integer)
@event.listens_for(VAR_83, 'before_flush')
def FUNC_5(VAR_1, VAR_10, VAR_11):
for change in itertools.chain(VAR_1.new, VAR_1.dirty):
if isinstance(change, (CLASS_8, CLASS_13, CLASS_12)):
if change.kobo_reading_state:
change.kobo_reading_state.last_modified = datetime.datetime.utcnow()
for change in itertools.chain(VAR_1.new, VAR_1.deleted):
if isinstance(change, CLASS_6):
change.ub_shelf.last_modified = datetime.datetime.utcnow()
class CLASS_14(VAR_3):
__tablename__ = 'downloads'
VAR_23 = Column(Integer, primary_key=True)
VAR_13 = Column(Integer)
VAR_7 = Column(Integer, ForeignKey('user.id'))
def __repr__(self):
return '<Download %r' % self.book_id
class CLASS_15(VAR_3):
__tablename__ = 'registration'
VAR_23 = Column(Integer, primary_key=True)
VAR_74 = Column(String)
VAR_75 = Column(Integer)
def __repr__(self):
return u"<CLASS_15('{0}')>".format(self.domain)
class CLASS_16(VAR_3):
__tablename__ = 'remote_auth_token'
VAR_23 = Column(Integer, primary_key=True)
VAR_76 = Column(String, unique=True)
VAR_7 = Column(Integer, ForeignKey('user.id'))
VAR_77 = Column(Boolean, default=False)
VAR_78 = Column(DateTime)
VAR_79 = Column(Integer, default=0)
def __init__(self):
self.auth_token = (hexlify(os.urandom(4))).decode('utf-8')
self.expiration = datetime.datetime.now() + datetime.timedelta(minutes=10) # 10 min from VAR_81
def __repr__(self):
return '<Token %r>' % self.id
def FUNC_6(VAR_12, VAR_1):
if not VAR_12.dialect.has_table(VAR_12.connect(), "book_read_link"):
CLASS_8.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "bookmark"):
CLASS_9.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_reading_state"):
CLASS_11.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_bookmark"):
CLASS_12.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "kobo_statistics"):
CLASS_13.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "archived_book"):
CLASS_10.__table__.create(bind=VAR_12)
if not VAR_12.dialect.has_table(VAR_12.connect(), "registration"):
CLASS_15.__table__.create(bind=VAR_12)
with VAR_12.connect() as conn:
conn.execute("insert into registration (VAR_74, VAR_75) values('%.%',1)")
VAR_1.commit()
def FUNC_7(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_15.allow)).scalar()
VAR_1.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
conn.execute("update registration set 'allow' = 1")
VAR_1.commit()
try:
VAR_88 = VAR_1.query(CLASS_15).count()
if not VAR_88:
with VAR_12.connect() as conn:
conn.execute("insert into registration (VAR_74, VAR_75) values('%.%',1)")
VAR_1.commit()
except exc.OperationalError: # Database is not writeable
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def FUNC_8(VAR_12):
try:
with VAR_12.connect() as conn:
VAR_93 = conn.begin()
conn.execute(text("UPDATE VAR_6 SET VAR_27='' where VAR_24 = 'Guest' and VAR_27 !=''"))
VAR_93.commit()
except exc.OperationalError:
print('Settings database is not writeable. Exiting...')
sys.exit(2)
def FUNC_9(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_5.uuid)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_29 ADD column 'uuid' STRING")
conn.execute("ALTER TABLE VAR_29 ADD column 'created' DATETIME")
conn.execute("ALTER TABLE VAR_29 ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
conn.execute("ALTER TABLE VAR_29 ADD column 'kobo_sync' BOOLEAN DEFAULT false")
for VAR_29 in VAR_1.query(CLASS_5).all():
VAR_29.uuid = str(VAR_45.uuid4())
VAR_29.created = datetime.datetime.now()
VAR_29.last_modified = datetime.datetime.now()
for book_shelf in VAR_1.query(CLASS_6).all():
book_shelf.date_added = datetime.datetime.now()
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_5.kobo_sync)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_29 ADD column 'kobo_sync' BOOLEAN DEFAULT false")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_6.order)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
VAR_1.commit()
def FUNC_10(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_8.read_status)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
VAR_1.commit()
VAR_80 = VAR_1.query(CLASS_8).filter(CLASS_8.last_modified == None).all()
for book in VAR_80:
book.last_modified = datetime.datetime.utcnow()
VAR_1.commit()
def FUNC_11(VAR_12, VAR_1):
try:
VAR_1.query(exists().where(CLASS_16.token_type)).scalar()
VAR_1.commit()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_38 ADD column 'token_type' INTEGER DEFAULT 0")
conn.execute("update VAR_38 set 'token_type' = 0")
VAR_1.commit()
def FUNC_12(VAR_1):
VAR_12 = VAR_1.bind
FUNC_6(VAR_12, VAR_1)
FUNC_7(VAR_12, VAR_1)
FUNC_10(VAR_12, VAR_1)
FUNC_11(VAR_12, VAR_1)
FUNC_9(VAR_12, VAR_1)
try:
VAR_89 = False
VAR_1.query(exists().where(CLASS_1.sidebar_view)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_32` Integer DEFAULT 1")
VAR_1.commit()
VAR_89 = True
try:
if VAR_89:
with VAR_12.connect() as conn:
conn.execute("SELECT language_books FROM user")
VAR_1.commit()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("UPDATE VAR_6 SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)",
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.denied_tags)).scalar()
except exc.OperationalError: # Database is not compatible, some columns are missing
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_34` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_35` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_36` String DEFAULT ''")
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_37` String DEFAULT ''")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.view_settings)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_39` VARCHAR(10) DEFAULT '{}'")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.kobo_only_shelves_sync)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute("ALTER TABLE VAR_6 ADD column `VAR_40` SMALLINT DEFAULT 0")
VAR_1.commit()
try:
VAR_1.query(exists().where(CLASS_1.name)).scalar()
except exc.OperationalError:
with VAR_12.connect() as conn:
conn.execute(text("CREATE TABLE VAR_7 (VAR_23 INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"name VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"denied_tags VARCHAR,"
"allowed_tags VARCHAR,"
"denied_column_value VARCHAR,"
"allowed_column_value VARCHAR,"
"view_settings JSON,"
"kobo_only_shelves_sync SMALLINT,"
"UNIQUE (VAR_24),"
"UNIQUE (VAR_25))"))
conn.execute(text("INSERT INTO VAR_7(VAR_23, VAR_24, VAR_25, VAR_26, VAR_27, VAR_28,VAR_31,"
"sidebar_view, VAR_33, VAR_34, VAR_35, VAR_36, "
"allowed_column_value, VAR_39, VAR_40)"
"SELECT VAR_23, nickname, VAR_25, VAR_26, VAR_27, VAR_28, VAR_31,"
"sidebar_view, VAR_33, VAR_34, VAR_35, VAR_36, "
"allowed_column_value, VAR_39, VAR_40 FROM user"))
conn.execute(text("DROP TABLE user"))
conn.execute(text("ALTER TABLE VAR_7 RENAME TO user"))
VAR_1.commit()
if VAR_1.query(CLASS_1).filter(CLASS_1.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
is None:
FUNC_16(VAR_1)
FUNC_8(VAR_12)
def FUNC_13(VAR_1):
VAR_81 = datetime.datetime.now()
VAR_1.query(CLASS_16).filter(VAR_81 > CLASS_16.expiration).\
filter(CLASS_16.token_type != 1).delete()
VAR_1.commit()
def FUNC_14(VAR_13, VAR_7):
VAR_82 = VAR_1.query(CLASS_14).filter(CLASS_14.user_id == VAR_7).filter(CLASS_14.book_id == VAR_13).first()
if not VAR_82:
VAR_90 = CLASS_14(VAR_7=user_id, VAR_13=book_id)
VAR_1.add(VAR_90)
try:
VAR_1.commit()
except exc.OperationalError:
VAR_1.rollback()
def FUNC_15(VAR_13):
VAR_1.query(CLASS_14).filter(VAR_13 == CLASS_14.book_id).delete()
try:
VAR_1.commit()
except exc.OperationalError:
VAR_1.rollback()
def FUNC_16(VAR_1):
VAR_6 = CLASS_1()
VAR_6.name = "Guest"
VAR_6.email = 'no@email'
VAR_6.role = constants.ROLE_ANONYMOUS
VAR_6.password = ''
VAR_1.add(VAR_6)
try:
VAR_1.commit()
except Exception:
VAR_1.rollback()
def FUNC_17(VAR_1):
VAR_6 = CLASS_1()
VAR_6.name = "admin"
VAR_6.role = constants.ADMIN_USER_ROLES
VAR_6.sidebar_view = constants.ADMIN_USER_SIDEBAR
VAR_6.password = generate_password_hash(constants.DEFAULT_PASSWORD)
VAR_1.add(VAR_6)
try:
VAR_1.commit()
except Exception:
VAR_1.rollback()
def FUNC_18(VAR_14):
global VAR_1
global VAR_2
app_DB_path = VAR_14
VAR_12 = create_engine(u'sqlite:///{0}'.format(VAR_14), echo=False)
VAR_83 = scoped_session(sessionmaker())
VAR_83.configure(bind=VAR_12)
VAR_1 = VAR_83()
if os.path.exists(VAR_14):
VAR_3.metadata.create_all(VAR_12)
FUNC_12(VAR_1)
FUNC_13(VAR_1)
else:
VAR_3.metadata.create_all(VAR_12)
FUNC_17(VAR_1)
FUNC_16(VAR_1)
if cli.user_credentials:
VAR_91, VAR_27 = cli.user_credentials.split(':', 1)
VAR_6 = VAR_1.query(CLASS_1).filter(func.lower(CLASS_1.name) == VAR_91.lower()).first()
if VAR_6:
if not VAR_27:
print("Empty VAR_27 is not allowed")
sys.exit(4)
VAR_6.password = generate_password_hash(VAR_27)
if FUNC_20() == "":
print("Password for VAR_6 '{}' changed".format(VAR_91))
sys.exit(0)
else:
print("Failed changing password")
sys.exit(3)
else:
print("Username '{}' not valid, can't change password".format(VAR_91))
sys.exit(3)
def FUNC_19():
global VAR_1
VAR_84 = VAR_1
VAR_1 = None
if VAR_84:
try:
VAR_84.close()
except Exception:
pass
if VAR_84.bind:
try:
VAR_84.bind.dispose()
except Exception:
pass
def FUNC_20(VAR_15=None):
try:
VAR_1.commit()
if VAR_15:
VAR_0.info(VAR_15)
except (exc.OperationalError, exc.InvalidRequestError) as e:
VAR_1.rollback()
VAR_0.debug_or_exception(e)
return ""
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
28,
32,
37,
49,
55,
57,
59,
64,
65,
68,
84,
94,
95,
103,
104,
106,
112,
113,
115,
119,
122,
125,
128,
131,
134,
137,
140,
143,
146,
149,
153,
157,
160,
163,
168,
171,
175,
179,
183,
187,
192,
205,
206,
209,
210,
211,
212,
216,
235,
236,
242,
243,
246,
252,
253,
254,
255,
259,
276,
277,
280,
284,
288,
292,
299,
306,
309,
313,
317,
318,
319,
322,
332,
335,
336,
337,
340,
346,
349,
350,
351,
354,
359,
360,
363,
367,
381,
382,
385,
391,
392,
393,
396,
402,
403,
404,
405,
406,
407,
410,
418,
419,
422,
431,
432,
435,
441,
442,
443,
450,
454,
455,
456,
459,
463,
466,
467,
468,
471,
475,
478,
479,
482,
489,
493,
496,
497,
498,
517,
518,
519,
530,
539,
540,
541,
551,
552,
570,
575,
578,
585,
586,
602,
603,
613,
614,
615,
616,
668,
670,
673,
698,
705,
707,
708,
710,
715,
716,
717,
720,
728,
729,
730,
737,
738,
745,
751,
752,
753,
759,
761,
767,
768,
770,
773,
776,
780,
789,
807,
808,
811,
824,
834
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
28,
32,
37,
49,
55,
57,
59,
64,
65,
68,
84,
94,
95,
103,
104,
106,
112,
113,
115,
119,
122,
125,
128,
131,
134,
137,
140,
143,
146,
149,
153,
157,
160,
163,
168,
171,
175,
179,
183,
187,
192,
205,
206,
209,
210,
211,
212,
216,
235,
236,
242,
243,
246,
252,
253,
254,
255,
259,
276,
277,
280,
284,
288,
292,
299,
306,
309,
313,
317,
318,
319,
322,
332,
335,
336,
337,
340,
346,
349,
350,
351,
354,
359,
360,
363,
367,
381,
382,
385,
391,
392,
393,
396,
402,
403,
404,
405,
406,
407,
410,
418,
419,
422,
431,
432,
435,
441,
442,
443,
450,
454,
455,
456,
459,
463,
466,
467,
468,
471,
475,
478,
479,
482,
489,
493,
496,
497,
498,
517,
518,
519,
530,
539,
540,
541,
551,
552,
570,
575,
578,
585,
586,
602,
603,
613,
614,
615,
616,
668,
670,
673,
698,
705,
707,
708,
710,
715,
716,
717,
720,
728,
729,
730,
737,
738,
745,
751,
752,
753,
759,
761,
767,
768,
770,
773,
776,
780,
789,
807,
808,
811,
824,
834
] |
0CWE-22
| import os
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile
from s3file.middleware import S3FileMiddleware
from s3file.storages import storage
class TestS3FileMiddleware:
def test_get_files_from_storage(self):
content = b"test_get_files_from_storage"
name = storage.save(
"tmp/s3file/test_get_files_from_storage", ContentFile(content)
)
files = S3FileMiddleware.get_files_from_storage(
[os.path.join(storage.aws_location, name)]
)
file = next(files)
assert file.read() == content
def test_process_request(self, rf):
uploaded_file = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
request = rf.post("/", data={"file": uploaded_file})
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
request = rf.post(
"/",
data={
"file": "custom/location/tmp/s3file/s3_file.txt",
"s3file": "file",
},
)
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"s3file"
def test_process_request__multiple_files(self, rf):
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
storage.save("tmp/s3file/s3_other_file.txt", ContentFile(b"other s3file"))
request = rf.post(
"/",
data={
"file": [
"custom/location/tmp/s3file/s3_file.txt",
"custom/location/tmp/s3file/s3_other_file.txt",
],
"s3file": ["file", "other_file"],
},
)
S3FileMiddleware(lambda x: None)(request)
files = request.FILES.getlist("file")
assert files[0].read() == b"s3file"
assert files[1].read() == b"other s3file"
def test_process_request__no_location(self, rf, settings):
settings.AWS_LOCATION = ""
uploaded_file = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
request = rf.post("/", data={"file": uploaded_file})
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
request = rf.post(
"/", data={"file": "tmp/s3file/s3_file.txt", "s3file": "file"}
)
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"s3file"
def test_process_request__no_file(self, rf, caplog):
request = rf.post("/", data={"file": "does_not_exist.txt", "s3file": "file"})
S3FileMiddleware(lambda x: None)(request)
assert not request.FILES.getlist("file")
assert "File not found: does_not_exist.txt" in caplog.text
| import os
import pytest
from django.core.exceptions import PermissionDenied, SuspiciousFileOperation
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile
from s3file.middleware import S3FileMiddleware
from s3file.storages import storage
class TestS3FileMiddleware:
def test_get_files_from_storage(self, freeze_upload_folder):
content = b"test_get_files_from_storage"
name = storage.save(
"tmp/s3file/test_get_files_from_storage", ContentFile(content)
)
files = S3FileMiddleware.get_files_from_storage(
[os.path.join(storage.aws_location, name)],
"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
)
file = next(files)
assert file.read() == content
def test_process_request(self, freeze_upload_folder, rf):
uploaded_file = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
request = rf.post("/", data={"file": uploaded_file})
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
request = rf.post(
"/",
data={
"file": "custom/location/tmp/s3file/s3_file.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"s3file"
def test_process_request__location_escape(self, freeze_upload_folder, rf):
storage.save("secrets/passwords.txt", ContentFile(b"keep this secret"))
request = rf.post(
"/",
data={
"file": "custom/location/secrets/passwords.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(request)
assert "Illegal signature!" in str(e.value)
def test_process_request__multiple_files(self, freeze_upload_folder, rf):
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
storage.save("tmp/s3file/s3_other_file.txt", ContentFile(b"other s3file"))
request = rf.post(
"/",
data={
"file": [
"custom/location/tmp/s3file/s3_file.txt",
"custom/location/tmp/s3file/s3_other_file.txt",
],
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
"other_file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
"s3file": ["file", "other_file"],
},
)
S3FileMiddleware(lambda x: None)(request)
files = request.FILES.getlist("file")
assert files[0].read() == b"s3file"
assert files[1].read() == b"other s3file"
def test_process_request__no_location(self, freeze_upload_folder, rf, settings):
settings.AWS_LOCATION = ""
uploaded_file = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
request = rf.post("/", data={"file": uploaded_file})
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
request = rf.post(
"/",
data={
"file": f"tmp/s3file/s3_file.txt",
"s3file": "file",
"file-s3f-signature": "scjzm3N8njBQIVSGEhOchtM0TkGyb2U6OXGLVlRUZhY",
},
)
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"s3file"
def test_process_request__no_file(self, freeze_upload_folder, rf, caplog):
request = rf.post(
"/",
data={
"file": "custom/location/tmp/s3file/does_not_exist.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
S3FileMiddleware(lambda x: None)(request)
assert not request.FILES.getlist("file")
assert (
"File not found: custom/location/tmp/s3file/does_not_exist.txt"
in caplog.text
)
def test_process_request__no_signature(self, rf, caplog):
request = rf.post(
"/", data={"file": "tmp/s3file/does_not_exist.txt", "s3file": "file"}
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(request)
def test_process_request__wrong_signature(self, rf, caplog):
request = rf.post(
"/",
data={
"file": "tmp/s3file/does_not_exist.txt",
"s3file": "file",
"file-s3f-signature": "fake",
},
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(request)
| path_disclosure | {
"code": [
" def test_get_files_from_storage(self):",
" [os.path.join(storage.aws_location, name)]",
" def test_process_request(self, rf):",
" def test_process_request__multiple_files(self, rf):",
" def test_process_request__no_location(self, rf, settings):",
" \"/\", data={\"file\": \"tmp/s3file/s3_file.txt\", \"s3file\": \"file\"}",
" def test_process_request__no_file(self, rf, caplog):",
" request = rf.post(\"/\", data={\"file\": \"does_not_exist.txt\", \"s3file\": \"file\"})",
" assert \"File not found: does_not_exist.txt\" in caplog.text"
],
"line_no": [
11,
17,
22,
41,
59,
69,
75,
76,
79
]
} | {
"code": [
"import pytest",
"from django.core.exceptions import PermissionDenied, SuspiciousFileOperation",
" def test_get_files_from_storage(self, freeze_upload_folder):",
" [os.path.join(storage.aws_location, name)],",
" \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" def test_process_request(self, freeze_upload_folder, rf):",
" \"file-s3f-signature\": \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" def test_process_request__location_escape(self, freeze_upload_folder, rf):",
" storage.save(\"secrets/passwords.txt\", ContentFile(b\"keep this secret\"))",
" request = rf.post(",
" \"/\",",
" data={",
" \"file\": \"custom/location/secrets/passwords.txt\",",
" \"s3file\": \"file\",",
" \"file-s3f-signature\": \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" },",
" )",
" with pytest.raises(PermissionDenied) as e:",
" S3FileMiddleware(lambda x: None)(request)",
" assert \"Illegal signature!\" in str(e.value)",
" def test_process_request__multiple_files(self, freeze_upload_folder, rf):",
" \"file-s3f-signature\": \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" \"other_file-s3f-signature\": \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" def test_process_request__no_location(self, freeze_upload_folder, rf, settings):",
" \"/\",",
" data={",
" \"file\": f\"tmp/s3file/s3_file.txt\",",
" \"s3file\": \"file\",",
" \"file-s3f-signature\": \"scjzm3N8njBQIVSGEhOchtM0TkGyb2U6OXGLVlRUZhY\",",
" },",
" def test_process_request__no_file(self, freeze_upload_folder, rf, caplog):",
" request = rf.post(",
" \"/\",",
" data={",
" \"file\": \"custom/location/tmp/s3file/does_not_exist.txt\",",
" \"s3file\": \"file\",",
" \"file-s3f-signature\": \"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc\",",
" },",
" )",
" assert (",
" \"File not found: custom/location/tmp/s3file/does_not_exist.txt\"",
" in caplog.text",
" )",
" def test_process_request__no_signature(self, rf, caplog):",
" request = rf.post(",
" \"/\", data={\"file\": \"tmp/s3file/does_not_exist.txt\", \"s3file\": \"file\"}",
" )",
" with pytest.raises(PermissionDenied) as e:",
" S3FileMiddleware(lambda x: None)(request)",
" def test_process_request__wrong_signature(self, rf, caplog):",
" request = rf.post(",
" \"/\",",
" data={",
" \"file\": \"tmp/s3file/does_not_exist.txt\",",
" \"s3file\": \"file\",",
" \"file-s3f-signature\": \"fake\",",
" },",
" )",
" with pytest.raises(PermissionDenied) as e:",
" S3FileMiddleware(lambda x: None)(request)"
],
"line_no": [
3,
4,
13,
19,
20,
25,
38,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
59,
69,
70,
79,
89,
90,
91,
92,
93,
94,
100,
101,
102,
103,
104,
105,
106,
107,
108,
111,
112,
113,
114,
116,
117,
118,
119,
120,
121,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133
]
} | import os
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile
from s3file.middleware import S3FileMiddleware
from s3file.storages import storage
class CLASS_0:
def FUNC_0(self):
VAR_3 = b"test_get_files_from_storage"
VAR_4 = storage.save(
"tmp/s3file/test_get_files_from_storage", ContentFile(VAR_3)
)
VAR_5 = S3FileMiddleware.get_files_from_storage(
[os.path.join(storage.aws_location, VAR_4)]
)
VAR_6 = next(VAR_5)
assert VAR_6.read() == VAR_3
def FUNC_1(self, VAR_0):
VAR_7 = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
VAR_8 = VAR_0.post("/", data={"file": VAR_7})
S3FileMiddleware(lambda x: None)(VAR_8)
assert VAR_8.FILES.getlist("file")
assert VAR_8.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
VAR_8 = VAR_0.post(
"/",
data={
"file": "custom/location/tmp/s3file/s3_file.txt",
"s3file": "file",
},
)
S3FileMiddleware(lambda x: None)(VAR_8)
assert VAR_8.FILES.getlist("file")
assert VAR_8.FILES.get("file").read() == b"s3file"
def FUNC_2(self, VAR_0):
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
storage.save("tmp/s3file/s3_other_file.txt", ContentFile(b"other s3file"))
VAR_8 = VAR_0.post(
"/",
data={
"file": [
"custom/location/tmp/s3file/s3_file.txt",
"custom/location/tmp/s3file/s3_other_file.txt",
],
"s3file": ["file", "other_file"],
},
)
S3FileMiddleware(lambda x: None)(VAR_8)
VAR_5 = VAR_8.FILES.getlist("file")
assert VAR_5[0].read() == b"s3file"
assert VAR_5[1].read() == b"other s3file"
def FUNC_3(self, VAR_0, VAR_1):
settings.AWS_LOCATION = ""
VAR_7 = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
VAR_8 = VAR_0.post("/", data={"file": VAR_7})
S3FileMiddleware(lambda x: None)(VAR_8)
assert VAR_8.FILES.getlist("file")
assert VAR_8.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
VAR_8 = VAR_0.post(
"/", data={"file": "tmp/s3file/s3_file.txt", "s3file": "file"}
)
S3FileMiddleware(lambda x: None)(VAR_8)
assert VAR_8.FILES.getlist("file")
assert VAR_8.FILES.get("file").read() == b"s3file"
def FUNC_4(self, VAR_0, VAR_2):
VAR_8 = VAR_0.post("/", data={"file": "does_not_exist.txt", "s3file": "file"})
S3FileMiddleware(lambda x: None)(VAR_8)
assert not VAR_8.FILES.getlist("file")
assert "File not found: does_not_exist.txt" in VAR_2.text
| import os
import pytest
from django.core.exceptions import PermissionDenied, SuspiciousFileOperation
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile
from s3file.middleware import S3FileMiddleware
from s3file.storages import storage
class CLASS_0:
def FUNC_0(self, VAR_0):
VAR_4 = b"test_get_files_from_storage"
VAR_5 = storage.save(
"tmp/s3file/test_get_files_from_storage", ContentFile(VAR_4)
)
VAR_6 = S3FileMiddleware.get_files_from_storage(
[os.path.join(storage.aws_location, VAR_5)],
"tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
)
VAR_7 = next(VAR_6)
assert VAR_7.read() == VAR_4
def FUNC_1(self, VAR_0, VAR_1):
VAR_8 = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
VAR_9 = VAR_1.post("/", data={"file": VAR_8})
S3FileMiddleware(lambda x: None)(VAR_9)
assert VAR_9.FILES.getlist("file")
assert VAR_9.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
VAR_9 = VAR_1.post(
"/",
data={
"file": "custom/location/tmp/s3file/s3_file.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
S3FileMiddleware(lambda x: None)(VAR_9)
assert VAR_9.FILES.getlist("file")
assert VAR_9.FILES.get("file").read() == b"s3file"
def FUNC_2(self, VAR_0, VAR_1):
storage.save("secrets/passwords.txt", ContentFile(b"keep this secret"))
VAR_9 = VAR_1.post(
"/",
data={
"file": "custom/location/secrets/passwords.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(VAR_9)
assert "Illegal signature!" in str(e.value)
def FUNC_3(self, VAR_0, VAR_1):
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
storage.save("tmp/s3file/s3_other_file.txt", ContentFile(b"other s3file"))
VAR_9 = VAR_1.post(
"/",
data={
"file": [
"custom/location/tmp/s3file/s3_file.txt",
"custom/location/tmp/s3file/s3_other_file.txt",
],
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
"other_file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
"s3file": ["file", "other_file"],
},
)
S3FileMiddleware(lambda x: None)(VAR_9)
VAR_6 = VAR_9.FILES.getlist("file")
assert VAR_6[0].read() == b"s3file"
assert VAR_6[1].read() == b"other s3file"
def FUNC_4(self, VAR_0, VAR_1, VAR_2):
settings.AWS_LOCATION = ""
VAR_8 = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
VAR_9 = VAR_1.post("/", data={"file": VAR_8})
S3FileMiddleware(lambda x: None)(VAR_9)
assert VAR_9.FILES.getlist("file")
assert VAR_9.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
VAR_9 = VAR_1.post(
"/",
data={
"file": f"tmp/s3file/s3_file.txt",
"s3file": "file",
"file-s3f-signature": "scjzm3N8njBQIVSGEhOchtM0TkGyb2U6OXGLVlRUZhY",
},
)
S3FileMiddleware(lambda x: None)(VAR_9)
assert VAR_9.FILES.getlist("file")
assert VAR_9.FILES.get("file").read() == b"s3file"
def FUNC_5(self, VAR_0, VAR_1, VAR_3):
VAR_9 = VAR_1.post(
"/",
data={
"file": "custom/location/tmp/s3file/does_not_exist.txt",
"s3file": "file",
"file-s3f-signature": "tFV9nGZlq9WX1I5Sotit18z1f4C_3lPnj33_zo4LZRc",
},
)
S3FileMiddleware(lambda x: None)(VAR_9)
assert not VAR_9.FILES.getlist("file")
assert (
"File not found: custom/location/tmp/s3file/does_not_exist.txt"
in VAR_3.text
)
def FUNC_6(self, VAR_1, VAR_3):
VAR_9 = VAR_1.post(
"/", data={"file": "tmp/s3file/does_not_exist.txt", "s3file": "file"}
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(VAR_9)
def FUNC_7(self, VAR_1, VAR_3):
VAR_9 = VAR_1.post(
"/",
data={
"file": "tmp/s3file/does_not_exist.txt",
"s3file": "file",
"file-s3f-signature": "fake",
},
)
with pytest.raises(PermissionDenied) as e:
S3FileMiddleware(lambda x: None)(VAR_9)
| [
2,
5,
8,
9,
21,
28,
40,
58,
66,
74,
80
] | [
2,
7,
10,
11,
24,
31,
44,
58,
78,
86,
99,
115,
122,
134
] |
0CWE-22
| import configparser
from flask import Flask, render_template, session, request, flash, redirect, url_for, Response, abort, jsonify, send_file
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from app.common.decorator import return_500_if_errors
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
hostname = socket.gethostname()
isLocal = None
munhak_rows_data = None
if hostname[:7] == "DESKTOP":
isLocal = True
else:
isLocal = False
app = Flask(__name__)
def update():
gc = gspread.authorize(credentials).open("문학따먹기")
wks = gc.get_worksheet(0)
rows = wks.get_all_values()
print(rows)
try:
data = []
for row in rows[1:]:
row_tuple = namedtuple("Munhak", rows[0])(*row)
row_tuple = row_tuple._replace(keywords=json.loads(row_tuple.keywords))
if row_tuple.is_available == "TRUE":
data.append(row_tuple)
except:
pass
global munhak_rows_data
munhak_rows_data = data
print(data)
# print(munhak_rows)
return
if isLocal:
config = configparser.ConfigParser()
config.read('config.ini')
pg_db_username = config['DEFAULT']['LOCAL_DB_USERNAME']
pg_db_password = config['DEFAULT']['LOCAL_DB_PASSWORD']
pg_db_name = config['DEFAULT']['LOCAL_DB_NAME']
pg_db_hostname = config['DEFAULT']['LOCAL_DB_HOSTNAME']
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=pg_db_username,
DB_PASS=pg_db_password,
DB_ADDR=pg_db_hostname,
DB_NAME=pg_db_name)
app.config["SECRET_KEY"] = config['DEFAULT']['SECRET_KEY']
credentials = ServiceAccountCredentials.from_json_keyfile_name(config['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], scope)
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
app.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
credentials = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), scope)
update()
@app.route('/')
def index():
munhak_rows = copy.deepcopy(munhak_rows_data)
data = {
"total_munhak" : len(munhak_rows),
"source_list" : sorted(set([munhak_row.source for munhak_row in munhak_rows]))
}
print(data)
session["quiz_count"] = 0
return render_template("quiz/index.html", data=data)
@app.route("/get-quiz")
def get_quiz():
if "quiz_count" not in session:
session["quiz_count"] = 0
session["total_munhak"] = len(munhak_rows_data)
if "solved_quiz" not in session:
session["solved_quiz"] = []
session["result"] = None
quiz_no = session["quiz_count"] + 1
solved_quiz = session["solved_quiz"]
if "current_munhak" not in session or session["current_munhak"] is None:
# munhak_rows = Munhak.query.filter_by(is_available=True).all()
munhak_rows = copy.deepcopy(munhak_rows_data)
not_solved_munhak_rows = [munhak_row for munhak_row in munhak_rows if munhak_row.munhak_seq not in solved_quiz]
if len(not_solved_munhak_rows) == 0:
session["result"] = True
return redirect(url_for("result"))
correct_munhak_row = random.choice(not_solved_munhak_rows)
for _ in [munhak_row for munhak_row in munhak_rows if munhak_row.title == correct_munhak_row.title]:
munhak_rows.remove(_)
random.shuffle(munhak_rows)
option_munhak_rows = munhak_rows[0:3] + [correct_munhak_row]
random.shuffle(option_munhak_rows)
correct = option_munhak_rows.index(correct_munhak_row)
print(correct)
# correct = random.randrange(0, 4)
#
# answer_row = not_solved_munhak_rows[correct]
#
session["correct"] = correct
hint = random.choice(correct_munhak_row.keywords)
hint = hint.replace("\\", "")
session["current_munhak"] = {
"munhak_seq": correct_munhak_row.munhak_seq,
"source": correct_munhak_row.source,
"category": correct_munhak_row.category,
"hint": hint,
"title": correct_munhak_row.title,
"writer": correct_munhak_row.writer
}
session["options"] = [munhak_row._asdict() for munhak_row in option_munhak_rows]
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": correct_munhak_row.category,
"hint": hint,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in option_munhak_rows
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
else:
# print(hint)
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": session["current_munhak"]["category"],
"hint": session["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in session["options"]
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
@app.route('/quiz')
def quiz():
return render_template("quiz/quiz_container.html")
@app.route("/answer", methods=["GET", "POST"])
def answer():
print(session)
option = request.form.get("option", None)
if option is None or (not type(option) != int):
return abort(400)
option = int(option)
correct = session["correct"]
if correct is None:
return abort(401)
current_munhak = session["current_munhak"]
if current_munhak is None:
return abort(401)
if correct == option:
session["quiz_count"] += 1
session["solved_quiz"].append(current_munhak["munhak_seq"])
session["current_munhak"] = None
# current_munhak = jsonify(current_munhak)
return "success"
else:
if "quiz_count" not in session:
session["quiz_count"] = 0
if "solved_quiz" not in session:
# session["solved_quiz"] = []
session["result"] = False
return "failed", 404
@app.route("/result", methods=["GET", "POST"])
def result():
is_success = session["result"]
data = {
"is_success" : is_success,
"solved_count" : session["quiz_count"],
"correct" : session["correct"],
"current_munhak" : session["current_munhak"]
}
session["quiz_count"] = 0
session["solved_quiz"] = []
session["current_munhak"] = None
print(data)
return render_template("quiz/result.html", data = data)
@app.route('/update')
def update_():
if request.args.get("key", None) != app.config["SECRET_KEY"]:
return "error"
update()
session.clear()
return f"success! {len(munhak_rows_data)}"
@app.route('/images/<path:path>')
def get_image(path):
def get_absolute_path(path):
import os
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
rel_path = path
abs_file_path = os.path.join(script_dir, rel_path)
return abs_file_path
return send_file(
get_absolute_path(f"./images/{path}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
app.run()
| import configparser
from flask import Flask, render_template, session, request, flash, redirect, url_for, Response, abort, jsonify, send_file
from werkzeug.utils import safe_join
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from app.common.decorator import return_500_if_errors
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
hostname = socket.gethostname()
isLocal = None
munhak_rows_data = None
if hostname[:7] == "DESKTOP":
isLocal = True
else:
isLocal = False
app = Flask(__name__)
def update():
gc = gspread.authorize(credentials).open("문학따먹기")
wks = gc.get_worksheet(0)
rows = wks.get_all_values()
print(rows)
try:
data = []
for row in rows[1:]:
row_tuple = namedtuple("Munhak", rows[0])(*row)
row_tuple = row_tuple._replace(keywords=json.loads(row_tuple.keywords))
if row_tuple.is_available == "TRUE":
data.append(row_tuple)
except:
pass
global munhak_rows_data
munhak_rows_data = data
print(data)
# print(munhak_rows)
return
if isLocal:
config = configparser.ConfigParser()
config.read('config.ini')
pg_db_username = config['DEFAULT']['LOCAL_DB_USERNAME']
pg_db_password = config['DEFAULT']['LOCAL_DB_PASSWORD']
pg_db_name = config['DEFAULT']['LOCAL_DB_NAME']
pg_db_hostname = config['DEFAULT']['LOCAL_DB_HOSTNAME']
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=pg_db_username,
DB_PASS=pg_db_password,
DB_ADDR=pg_db_hostname,
DB_NAME=pg_db_name)
app.config["SECRET_KEY"] = config['DEFAULT']['SECRET_KEY']
credentials = ServiceAccountCredentials.from_json_keyfile_name(config['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], scope)
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
app.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
credentials = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), scope)
update()
@app.route('/')
def index():
munhak_rows = copy.deepcopy(munhak_rows_data)
data = {
"total_munhak" : len(munhak_rows),
"source_list" : sorted(set([munhak_row.source for munhak_row in munhak_rows]))
}
print(data)
session["quiz_count"] = 0
return render_template("quiz/index.html", data=data)
@app.route("/get-quiz")
def get_quiz():
if "quiz_count" not in session:
session["quiz_count"] = 0
session["total_munhak"] = len(munhak_rows_data)
if "solved_quiz" not in session:
session["solved_quiz"] = []
session["result"] = None
quiz_no = session["quiz_count"] + 1
solved_quiz = session["solved_quiz"]
if "current_munhak" not in session or session["current_munhak"] is None:
# munhak_rows = Munhak.query.filter_by(is_available=True).all()
munhak_rows = copy.deepcopy(munhak_rows_data)
not_solved_munhak_rows = [munhak_row for munhak_row in munhak_rows if munhak_row.munhak_seq not in solved_quiz]
if len(not_solved_munhak_rows) == 0:
session["result"] = True
return redirect(url_for("result"))
correct_munhak_row = random.choice(not_solved_munhak_rows)
for _ in [munhak_row for munhak_row in munhak_rows if munhak_row.title == correct_munhak_row.title]:
munhak_rows.remove(_)
random.shuffle(munhak_rows)
option_munhak_rows = munhak_rows[0:3] + [correct_munhak_row]
random.shuffle(option_munhak_rows)
correct = option_munhak_rows.index(correct_munhak_row)
print(correct)
# correct = random.randrange(0, 4)
#
# answer_row = not_solved_munhak_rows[correct]
#
session["correct"] = correct
hint = random.choice(correct_munhak_row.keywords)
hint = hint.replace("\\", "")
session["current_munhak"] = {
"munhak_seq": correct_munhak_row.munhak_seq,
"source": correct_munhak_row.source,
"category": correct_munhak_row.category,
"hint": hint,
"title": correct_munhak_row.title,
"writer": correct_munhak_row.writer
}
session["options"] = [munhak_row._asdict() for munhak_row in option_munhak_rows]
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": correct_munhak_row.category,
"hint": hint,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in option_munhak_rows
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
else:
# print(hint)
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": session["current_munhak"]["category"],
"hint": session["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in session["options"]
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
@app.route('/quiz')
def quiz():
return render_template("quiz/quiz_container.html")
@app.route("/answer", methods=["GET", "POST"])
def answer():
print(session)
option = request.form.get("option", None)
if option is None or (not type(option) != int):
return abort(400)
option = int(option)
correct = session["correct"]
if correct is None:
return abort(401)
current_munhak = session["current_munhak"]
if current_munhak is None:
return abort(401)
if correct == option:
session["quiz_count"] += 1
session["solved_quiz"].append(current_munhak["munhak_seq"])
session["current_munhak"] = None
# current_munhak = jsonify(current_munhak)
return "success"
else:
if "quiz_count" not in session:
session["quiz_count"] = 0
if "solved_quiz" not in session:
# session["solved_quiz"] = []
session["result"] = False
return "failed", 404
@app.route("/result", methods=["GET", "POST"])
def result():
is_success = session["result"]
data = {
"is_success" : is_success,
"solved_count" : session["quiz_count"],
"correct" : session["correct"],
"current_munhak" : session["current_munhak"]
}
session["quiz_count"] = 0
session["solved_quiz"] = []
session["current_munhak"] = None
print(data)
return render_template("quiz/result.html", data = data)
@app.route('/update')
def update_():
if request.args.get("key", None) != app.config["SECRET_KEY"]:
return "error"
update()
session.clear()
return f"success! {len(munhak_rows_data)}"
@app.route('/images/<path:path>')
def get_image(path):
def get_absolute_path(path):
import os
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
rel_path = path
abs_file_path = safe_join(script_dir, rel_path)
return abs_file_path
return send_file(
get_absolute_path(f"./images/{path}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
app.run()
| path_disclosure | {
"code": [
" abs_file_path = os.path.join(script_dir, rel_path)"
],
"line_no": [
270
]
} | {
"code": [
"from werkzeug.utils import safe_join",
" abs_file_path = safe_join(script_dir, rel_path)"
],
"line_no": [
4,
271
]
} | import .configparser
from flask import Flask, render_template, VAR_17, request, flash, redirect, url_for, Response, abort, jsonify, send_file
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from VAR_4.common.decorator import return_500_if_errors
VAR_0 = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
VAR_1 = socket.gethostname()
VAR_2 = None
VAR_3 = None
if VAR_1[:7] == "DESKTOP":
VAR_2 = True
else:
VAR_2 = False
VAR_4 = Flask(__name__)
def FUNC_0():
VAR_6 = gspread.authorize(VAR_14).open("문학따먹기")
VAR_7 = VAR_6.get_worksheet(0)
VAR_8 = VAR_7.get_all_values()
print(VAR_8)
try:
VAR_16 = []
for row in VAR_8[1:]:
VAR_31 = namedtuple("Munhak", VAR_8[0])(*row)
VAR_31 = VAR_31._replace(keywords=json.loads(VAR_31.keywords))
if VAR_31.is_available == "TRUE":
VAR_16.append(VAR_31)
except:
pass
global VAR_3
munhak_rows_data = VAR_16
print(VAR_16)
return
if VAR_2:
VAR_9 = configparser.ConfigParser()
VAR_9.read('config.ini')
VAR_10 = VAR_9['DEFAULT']['LOCAL_DB_USERNAME']
VAR_11 = VAR_9['DEFAULT']['LOCAL_DB_PASSWORD']
VAR_12 = VAR_9['DEFAULT']['LOCAL_DB_NAME']
VAR_13 = VAR_9['DEFAULT']['LOCAL_DB_HOSTNAME']
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=VAR_10,
DB_PASS=VAR_11,
DB_ADDR=VAR_13,
DB_NAME=VAR_12)
VAR_4.config["SECRET_KEY"] = VAR_9['DEFAULT']['SECRET_KEY']
VAR_14 = ServiceAccountCredentials.from_json_keyfile_name(VAR_9['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], VAR_0)
else:
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
VAR_4.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
VAR_14 = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), VAR_0)
FUNC_0()
@VAR_4.route('/')
def FUNC_1():
VAR_15 = copy.deepcopy(VAR_3)
VAR_16 = {
"total_munhak" : len(VAR_15),
"source_list" : sorted(set([munhak_row.source for munhak_row in VAR_15]))
}
print(VAR_16)
VAR_17["quiz_count"] = 0
return render_template("quiz/FUNC_1.html", VAR_16=data)
@VAR_4.route("/get-quiz")
def FUNC_2():
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
VAR_17["total_munhak"] = len(VAR_3)
if "solved_quiz" not in VAR_17:
VAR_17["solved_quiz"] = []
VAR_17["result"] = None
VAR_18 = VAR_17["quiz_count"] + 1
VAR_19 = VAR_17["solved_quiz"]
if "current_munhak" not in VAR_17 or VAR_17["current_munhak"] is None:
VAR_15 = copy.deepcopy(VAR_3)
VAR_24 = [munhak_row for munhak_row in VAR_15 if munhak_row.munhak_seq not in VAR_19]
if len(VAR_24) == 0:
VAR_17["result"] = True
return redirect(url_for("result"))
VAR_25 = random.choice(VAR_24)
for _ in [munhak_row for munhak_row in VAR_15 if munhak_row.title == VAR_25.title]:
VAR_15.remove(_)
random.shuffle(VAR_15)
VAR_26 = VAR_15[0:3] + [VAR_25]
random.shuffle(VAR_26)
VAR_21 = VAR_26.index(VAR_25)
print(VAR_21)
VAR_17["correct"] = VAR_21
VAR_27 = random.choice(VAR_25.keywords)
VAR_27 = hint.replace("\\", "")
VAR_17["current_munhak"] = {
"munhak_seq": VAR_25.munhak_seq,
"source": VAR_25.source,
"category": VAR_25.category,
"hint": VAR_27,
"title": VAR_25.title,
"writer": VAR_25.writer
}
VAR_17["options"] = [munhak_row._asdict() for munhak_row in VAR_26]
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_25.category,
"hint": VAR_27,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in VAR_26
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
else:
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_17["current_munhak"]["category"],
"hint": VAR_17["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in VAR_17["options"]
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
@VAR_4.route('/quiz')
def FUNC_3():
return render_template("quiz/quiz_container.html")
@VAR_4.route("/answer", methods=["GET", "POST"])
def FUNC_4():
print(VAR_17)
VAR_20 = request.form.get("option", None)
if VAR_20 is None or (not type(VAR_20) != int):
return abort(400)
VAR_20 = int(VAR_20)
VAR_21 = VAR_17["correct"]
if VAR_21 is None:
return abort(401)
VAR_22 = VAR_17["current_munhak"]
if VAR_22 is None:
return abort(401)
if VAR_21 == VAR_20:
VAR_17["quiz_count"] += 1
VAR_17["solved_quiz"].append(VAR_22["munhak_seq"])
VAR_17["current_munhak"] = None
return "success"
else:
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
if "solved_quiz" not in VAR_17:
VAR_17["result"] = False
return "failed", 404
@VAR_4.route("/result", methods=["GET", "POST"])
def FUNC_5():
VAR_23 = VAR_17["result"]
VAR_16 = {
"is_success" : VAR_23,
"solved_count" : VAR_17["quiz_count"],
"correct" : VAR_17["correct"],
"current_munhak" : VAR_17["current_munhak"]
}
VAR_17["quiz_count"] = 0
VAR_17["solved_quiz"] = []
VAR_17["current_munhak"] = None
print(VAR_16)
return render_template("quiz/FUNC_5.html", VAR_16 = data)
@VAR_4.route('/update')
def FUNC_6():
if request.args.get("key", None) != VAR_4.config["SECRET_KEY"]:
return "error"
FUNC_0()
VAR_17.clear()
return f"success! {len(VAR_3)}"
@VAR_4.route('/images/<VAR_5:path>')
def FUNC_7(VAR_5):
def FUNC_8(VAR_5):
import os
VAR_28 = os.path.dirname(__file__) # <-- absolute dir the script is in
VAR_29 = VAR_5
VAR_30 = os.path.join(VAR_28, VAR_29)
return VAR_30
return send_file(
FUNC_8(f"./images/{VAR_5}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
VAR_4.run()
| import .configparser
from flask import Flask, render_template, VAR_17, request, flash, redirect, url_for, Response, abort, jsonify, send_file
from werkzeug.utils import safe_join
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from VAR_4.common.decorator import return_500_if_errors
VAR_0 = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
VAR_1 = socket.gethostname()
VAR_2 = None
VAR_3 = None
if VAR_1[:7] == "DESKTOP":
VAR_2 = True
else:
VAR_2 = False
VAR_4 = Flask(__name__)
def FUNC_0():
VAR_6 = gspread.authorize(VAR_14).open("문학따먹기")
VAR_7 = VAR_6.get_worksheet(0)
VAR_8 = VAR_7.get_all_values()
print(VAR_8)
try:
VAR_16 = []
for row in VAR_8[1:]:
VAR_31 = namedtuple("Munhak", VAR_8[0])(*row)
VAR_31 = VAR_31._replace(keywords=json.loads(VAR_31.keywords))
if VAR_31.is_available == "TRUE":
VAR_16.append(VAR_31)
except:
pass
global VAR_3
munhak_rows_data = VAR_16
print(VAR_16)
return
if VAR_2:
VAR_9 = configparser.ConfigParser()
VAR_9.read('config.ini')
VAR_10 = VAR_9['DEFAULT']['LOCAL_DB_USERNAME']
VAR_11 = VAR_9['DEFAULT']['LOCAL_DB_PASSWORD']
VAR_12 = VAR_9['DEFAULT']['LOCAL_DB_NAME']
VAR_13 = VAR_9['DEFAULT']['LOCAL_DB_HOSTNAME']
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=VAR_10,
DB_PASS=VAR_11,
DB_ADDR=VAR_13,
DB_NAME=VAR_12)
VAR_4.config["SECRET_KEY"] = VAR_9['DEFAULT']['SECRET_KEY']
VAR_14 = ServiceAccountCredentials.from_json_keyfile_name(VAR_9['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], VAR_0)
else:
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
VAR_4.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
VAR_14 = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), VAR_0)
FUNC_0()
@VAR_4.route('/')
def FUNC_1():
VAR_15 = copy.deepcopy(VAR_3)
VAR_16 = {
"total_munhak" : len(VAR_15),
"source_list" : sorted(set([munhak_row.source for munhak_row in VAR_15]))
}
print(VAR_16)
VAR_17["quiz_count"] = 0
return render_template("quiz/FUNC_1.html", VAR_16=data)
@VAR_4.route("/get-quiz")
def FUNC_2():
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
VAR_17["total_munhak"] = len(VAR_3)
if "solved_quiz" not in VAR_17:
VAR_17["solved_quiz"] = []
VAR_17["result"] = None
VAR_18 = VAR_17["quiz_count"] + 1
VAR_19 = VAR_17["solved_quiz"]
if "current_munhak" not in VAR_17 or VAR_17["current_munhak"] is None:
VAR_15 = copy.deepcopy(VAR_3)
VAR_24 = [munhak_row for munhak_row in VAR_15 if munhak_row.munhak_seq not in VAR_19]
if len(VAR_24) == 0:
VAR_17["result"] = True
return redirect(url_for("result"))
VAR_25 = random.choice(VAR_24)
for _ in [munhak_row for munhak_row in VAR_15 if munhak_row.title == VAR_25.title]:
VAR_15.remove(_)
random.shuffle(VAR_15)
VAR_26 = VAR_15[0:3] + [VAR_25]
random.shuffle(VAR_26)
VAR_21 = VAR_26.index(VAR_25)
print(VAR_21)
VAR_17["correct"] = VAR_21
VAR_27 = random.choice(VAR_25.keywords)
VAR_27 = hint.replace("\\", "")
VAR_17["current_munhak"] = {
"munhak_seq": VAR_25.munhak_seq,
"source": VAR_25.source,
"category": VAR_25.category,
"hint": VAR_27,
"title": VAR_25.title,
"writer": VAR_25.writer
}
VAR_17["options"] = [munhak_row._asdict() for munhak_row in VAR_26]
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_25.category,
"hint": VAR_27,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in VAR_26
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
else:
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_17["current_munhak"]["category"],
"hint": VAR_17["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in VAR_17["options"]
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
@VAR_4.route('/quiz')
def FUNC_3():
return render_template("quiz/quiz_container.html")
@VAR_4.route("/answer", methods=["GET", "POST"])
def FUNC_4():
print(VAR_17)
VAR_20 = request.form.get("option", None)
if VAR_20 is None or (not type(VAR_20) != int):
return abort(400)
VAR_20 = int(VAR_20)
VAR_21 = VAR_17["correct"]
if VAR_21 is None:
return abort(401)
VAR_22 = VAR_17["current_munhak"]
if VAR_22 is None:
return abort(401)
if VAR_21 == VAR_20:
VAR_17["quiz_count"] += 1
VAR_17["solved_quiz"].append(VAR_22["munhak_seq"])
VAR_17["current_munhak"] = None
return "success"
else:
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
if "solved_quiz" not in VAR_17:
VAR_17["result"] = False
return "failed", 404
@VAR_4.route("/result", methods=["GET", "POST"])
def FUNC_5():
VAR_23 = VAR_17["result"]
VAR_16 = {
"is_success" : VAR_23,
"solved_count" : VAR_17["quiz_count"],
"correct" : VAR_17["correct"],
"current_munhak" : VAR_17["current_munhak"]
}
VAR_17["quiz_count"] = 0
VAR_17["solved_quiz"] = []
VAR_17["current_munhak"] = None
print(VAR_16)
return render_template("quiz/FUNC_5.html", VAR_16 = data)
@VAR_4.route('/update')
def FUNC_6():
if request.args.get("key", None) != VAR_4.config["SECRET_KEY"]:
return "error"
FUNC_0()
VAR_17.clear()
return f"success! {len(VAR_3)}"
@VAR_4.route('/images/<VAR_5:path>')
def FUNC_7(VAR_5):
def FUNC_8(VAR_5):
import os
VAR_28 = os.path.dirname(__file__) # <-- absolute dir the script is in
VAR_29 = VAR_5
VAR_30 = safe_join(VAR_28, VAR_29)
return VAR_30
return send_file(
FUNC_8(f"./images/{VAR_5}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
VAR_4.run()
| [
2,
14,
16,
19,
22,
24,
29,
31,
32,
34,
36,
38,
42,
49,
50,
53,
57,
59,
60,
61,
65,
70,
76,
78,
80,
82,
88,
89,
90,
92,
93,
94,
103,
106,
107,
116,
119,
121,
122,
124,
126,
130,
132,
135,
137,
139,
143,
144,
145,
146,
147,
149,
152,
173,
176,
188,
190,
191,
192,
196,
197,
208,
212,
217,
220,
224,
226,
228,
229,
230,
233,
234,
236,
237,
247,
250,
251,
253,
255,
258,
262,
263,
272,
279,
280,
282,
284
] | [
2,
15,
17,
20,
23,
25,
30,
32,
33,
35,
37,
39,
43,
50,
51,
54,
58,
60,
61,
62,
66,
71,
77,
79,
81,
83,
89,
90,
91,
93,
94,
95,
104,
107,
108,
117,
120,
122,
123,
125,
127,
131,
133,
136,
138,
140,
144,
145,
146,
147,
148,
150,
153,
174,
177,
189,
191,
192,
193,
197,
198,
209,
213,
218,
221,
225,
227,
229,
230,
231,
234,
235,
237,
238,
248,
251,
252,
254,
256,
259,
263,
264,
273,
280,
281,
283,
285
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib.parse
from typing import List, Optional
from netaddr import AddrFormatError, IPAddress
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.internet.interfaces import (
IProtocolFactory,
IReactorCore,
IStreamClientEndpoint,
)
from twisted.web.client import URI, Agent, HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer
from synapse.crypto.context_factory import FederationPolicyForHTTPS
from synapse.http.federation.srv_resolver import Server, SrvResolver
from synapse.http.federation.well_known_resolver import WellKnownResolver
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.util import Clock
logger = logging.getLogger(__name__)
@implementer(IAgent)
class MatrixFederationAgent:
"""An Agent-like thing which provides a `request` method which correctly
handles resolving matrix server names when using matrix://. Handles standard
https URIs as normal.
Doesn't implement any retries. (Those are done in MatrixFederationHttpClient.)
Args:
reactor: twisted reactor to use for underlying requests
tls_client_options_factory:
factory to use for fetching client tls options, or none to disable TLS.
user_agent:
The user agent header to use for federation requests.
_srv_resolver:
SrvResolver implementation to use for looking up SRV records. None
to use a default implementation.
_well_known_resolver:
WellKnownResolver to use to perform well-known lookups. None to use a
default implementation.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
user_agent: bytes,
_srv_resolver: Optional[SrvResolver] = None,
_well_known_resolver: Optional[WellKnownResolver] = None,
):
self._reactor = reactor
self._clock = Clock(reactor)
self._pool = HTTPConnectionPool(reactor)
self._pool.retryAutomatically = False
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
self._agent = Agent.usingEndpointFactory(
self._reactor,
MatrixHostnameEndpointFactory(
reactor, tls_client_options_factory, _srv_resolver
),
pool=self._pool,
)
self.user_agent = user_agent
if _well_known_resolver is None:
_well_known_resolver = WellKnownResolver(
self._reactor,
agent=Agent(
self._reactor,
pool=self._pool,
contextFactory=tls_client_options_factory,
),
user_agent=self.user_agent,
)
self._well_known_resolver = _well_known_resolver
@defer.inlineCallbacks
def request(
self,
method: bytes,
uri: bytes,
headers: Optional[Headers] = None,
bodyProducer: Optional[IBodyProducer] = None,
) -> defer.Deferred:
"""
Args:
method: HTTP method: GET/POST/etc
uri: Absolute URI to be retrieved
headers:
HTTP headers to send with the request, or None to send no extra headers.
bodyProducer:
An object which can generate bytes to make up the
body of this request (for example, the properly encoded contents of
a file for a file upload). Or None if the request is to have
no body.
Returns:
Deferred[twisted.web.iweb.IResponse]:
fires when the header of the response has been received (regardless of the
response status code). Fails if there is any problem which prevents that
response from being received (including problems that prevent the request
from being sent).
"""
# We use urlparse as that will set `port` to None if there is no
# explicit port.
parsed_uri = urllib.parse.urlparse(uri)
# There must be a valid hostname.
assert parsed_uri.hostname
# If this is a matrix:// URI check if the server has delegated matrix
# traffic using well-known delegation.
#
# We have to do this here and not in the endpoint as we need to rewrite
# the host header with the delegated server name.
delegated_server = None
if (
parsed_uri.scheme == b"matrix"
and not _is_ip_literal(parsed_uri.hostname)
and not parsed_uri.port
):
well_known_result = yield defer.ensureDeferred(
self._well_known_resolver.get_well_known(parsed_uri.hostname)
)
delegated_server = well_known_result.delegated_server
if delegated_server:
# Ok, the server has delegated matrix traffic to somewhere else, so
# lets rewrite the URL to replace the server with the delegated
# server name.
uri = urllib.parse.urlunparse(
(
parsed_uri.scheme,
delegated_server,
parsed_uri.path,
parsed_uri.params,
parsed_uri.query,
parsed_uri.fragment,
)
)
parsed_uri = urllib.parse.urlparse(uri)
# We need to make sure the host header is set to the netloc of the
# server and that a user-agent is provided.
if headers is None:
headers = Headers()
else:
headers = headers.copy()
if not headers.hasHeader(b"host"):
headers.addRawHeader(b"host", parsed_uri.netloc)
if not headers.hasHeader(b"user-agent"):
headers.addRawHeader(b"user-agent", self.user_agent)
res = yield make_deferred_yieldable(
self._agent.request(method, uri, headers, bodyProducer)
)
return res
@implementer(IAgentEndpointFactory)
class MatrixHostnameEndpointFactory:
"""Factory for MatrixHostnameEndpoint for parsing to an Agent.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: Optional[SrvResolver],
):
self._reactor = reactor
self._tls_client_options_factory = tls_client_options_factory
if srv_resolver is None:
srv_resolver = SrvResolver()
self._srv_resolver = srv_resolver
def endpointForURI(self, parsed_uri):
return MatrixHostnameEndpoint(
self._reactor,
self._tls_client_options_factory,
self._srv_resolver,
parsed_uri,
)
@implementer(IStreamClientEndpoint)
class MatrixHostnameEndpoint:
"""An endpoint that resolves matrix:// URLs using Matrix server name
resolution (i.e. via SRV). Does not check for well-known delegation.
Args:
reactor: twisted reactor to use for underlying requests
tls_client_options_factory:
factory to use for fetching client tls options, or none to disable TLS.
srv_resolver: The SRV resolver to use
parsed_uri: The parsed URI that we're wanting to connect to.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: SrvResolver,
parsed_uri: URI,
):
self._reactor = reactor
self._parsed_uri = parsed_uri
# set up the TLS connection params
#
# XXX disabling TLS is really only supported here for the benefit of the
# unit tests. We should make the UTs cope with TLS rather than having to make
# the code support the unit tests.
if tls_client_options_factory is None:
self._tls_options = None
else:
self._tls_options = tls_client_options_factory.get_options(
self._parsed_uri.host
)
self._srv_resolver = srv_resolver
def connect(self, protocol_factory: IProtocolFactory) -> defer.Deferred:
"""Implements IStreamClientEndpoint interface
"""
return run_in_background(self._do_connect, protocol_factory)
async def _do_connect(self, protocol_factory: IProtocolFactory) -> None:
first_exception = None
server_list = await self._resolve_server()
for server in server_list:
host = server.host
port = server.port
try:
logger.debug("Connecting to %s:%i", host.decode("ascii"), port)
endpoint = HostnameEndpoint(self._reactor, host, port)
if self._tls_options:
endpoint = wrapClientTLS(self._tls_options, endpoint)
result = await make_deferred_yieldable(
endpoint.connect(protocol_factory)
)
return result
except Exception as e:
logger.info(
"Failed to connect to %s:%i: %s", host.decode("ascii"), port, e
)
if not first_exception:
first_exception = e
# We return the first failure because that's probably the most interesting.
if first_exception:
raise first_exception
# This shouldn't happen as we should always have at least one host/port
# to try and if that doesn't work then we'll have an exception.
raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,))
async def _resolve_server(self) -> List[Server]:
"""Resolves the server name to a list of hosts and ports to attempt to
connect to.
"""
if self._parsed_uri.scheme != b"matrix":
return [Server(host=self._parsed_uri.host, port=self._parsed_uri.port)]
# Note: We don't do well-known lookup as that needs to have happened
# before now, due to needing to rewrite the Host header of the HTTP
# request.
# We reparse the URI so that defaultPort is -1 rather than 80
parsed_uri = urllib.parse.urlparse(self._parsed_uri.toBytes())
host = parsed_uri.hostname
port = parsed_uri.port
# If there is an explicit port or the host is an IP address we bypass
# SRV lookups and just use the given host/port.
if port or _is_ip_literal(host):
return [Server(host, port or 8448)]
server_list = await self._srv_resolver.resolve_service(b"_matrix._tcp." + host)
if server_list:
return server_list
# No SRV records, so we fallback to host and 8448
return [Server(host, 8448)]
def _is_ip_literal(host: bytes) -> bool:
"""Test if the given host name is either an IPv4 or IPv6 literal.
Args:
host: The host name to check
Returns:
True if the hostname is an IP address literal.
"""
host_str = host.decode("ascii")
try:
IPAddress(host_str)
return True
except AddrFormatError:
return False
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib.parse
from typing import List, Optional
from netaddr import AddrFormatError, IPAddress, IPSet
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.internet.interfaces import (
IProtocolFactory,
IReactorCore,
IStreamClientEndpoint,
)
from twisted.web.client import URI, Agent, HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer
from synapse.crypto.context_factory import FederationPolicyForHTTPS
from synapse.http.client import BlacklistingAgentWrapper
from synapse.http.federation.srv_resolver import Server, SrvResolver
from synapse.http.federation.well_known_resolver import WellKnownResolver
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.util import Clock
logger = logging.getLogger(__name__)
@implementer(IAgent)
class MatrixFederationAgent:
"""An Agent-like thing which provides a `request` method which correctly
handles resolving matrix server names when using matrix://. Handles standard
https URIs as normal.
Doesn't implement any retries. (Those are done in MatrixFederationHttpClient.)
Args:
reactor: twisted reactor to use for underlying requests
tls_client_options_factory:
factory to use for fetching client tls options, or none to disable TLS.
user_agent:
The user agent header to use for federation requests.
_srv_resolver:
SrvResolver implementation to use for looking up SRV records. None
to use a default implementation.
_well_known_resolver:
WellKnownResolver to use to perform well-known lookups. None to use a
default implementation.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
user_agent: bytes,
ip_blacklist: IPSet,
_srv_resolver: Optional[SrvResolver] = None,
_well_known_resolver: Optional[WellKnownResolver] = None,
):
self._reactor = reactor
self._clock = Clock(reactor)
self._pool = HTTPConnectionPool(reactor)
self._pool.retryAutomatically = False
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
self._agent = Agent.usingEndpointFactory(
self._reactor,
MatrixHostnameEndpointFactory(
reactor, tls_client_options_factory, _srv_resolver
),
pool=self._pool,
)
self.user_agent = user_agent
if _well_known_resolver is None:
# Note that the name resolver has already been wrapped in a
# IPBlacklistingResolver by MatrixFederationHttpClient.
_well_known_resolver = WellKnownResolver(
self._reactor,
agent=BlacklistingAgentWrapper(
Agent(
self._reactor,
pool=self._pool,
contextFactory=tls_client_options_factory,
),
self._reactor,
ip_blacklist=ip_blacklist,
),
user_agent=self.user_agent,
)
self._well_known_resolver = _well_known_resolver
@defer.inlineCallbacks
def request(
self,
method: bytes,
uri: bytes,
headers: Optional[Headers] = None,
bodyProducer: Optional[IBodyProducer] = None,
) -> defer.Deferred:
"""
Args:
method: HTTP method: GET/POST/etc
uri: Absolute URI to be retrieved
headers:
HTTP headers to send with the request, or None to send no extra headers.
bodyProducer:
An object which can generate bytes to make up the
body of this request (for example, the properly encoded contents of
a file for a file upload). Or None if the request is to have
no body.
Returns:
Deferred[twisted.web.iweb.IResponse]:
fires when the header of the response has been received (regardless of the
response status code). Fails if there is any problem which prevents that
response from being received (including problems that prevent the request
from being sent).
"""
# We use urlparse as that will set `port` to None if there is no
# explicit port.
parsed_uri = urllib.parse.urlparse(uri)
# There must be a valid hostname.
assert parsed_uri.hostname
# If this is a matrix:// URI check if the server has delegated matrix
# traffic using well-known delegation.
#
# We have to do this here and not in the endpoint as we need to rewrite
# the host header with the delegated server name.
delegated_server = None
if (
parsed_uri.scheme == b"matrix"
and not _is_ip_literal(parsed_uri.hostname)
and not parsed_uri.port
):
well_known_result = yield defer.ensureDeferred(
self._well_known_resolver.get_well_known(parsed_uri.hostname)
)
delegated_server = well_known_result.delegated_server
if delegated_server:
# Ok, the server has delegated matrix traffic to somewhere else, so
# lets rewrite the URL to replace the server with the delegated
# server name.
uri = urllib.parse.urlunparse(
(
parsed_uri.scheme,
delegated_server,
parsed_uri.path,
parsed_uri.params,
parsed_uri.query,
parsed_uri.fragment,
)
)
parsed_uri = urllib.parse.urlparse(uri)
# We need to make sure the host header is set to the netloc of the
# server and that a user-agent is provided.
if headers is None:
headers = Headers()
else:
headers = headers.copy()
if not headers.hasHeader(b"host"):
headers.addRawHeader(b"host", parsed_uri.netloc)
if not headers.hasHeader(b"user-agent"):
headers.addRawHeader(b"user-agent", self.user_agent)
res = yield make_deferred_yieldable(
self._agent.request(method, uri, headers, bodyProducer)
)
return res
@implementer(IAgentEndpointFactory)
class MatrixHostnameEndpointFactory:
"""Factory for MatrixHostnameEndpoint for parsing to an Agent.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: Optional[SrvResolver],
):
self._reactor = reactor
self._tls_client_options_factory = tls_client_options_factory
if srv_resolver is None:
srv_resolver = SrvResolver()
self._srv_resolver = srv_resolver
def endpointForURI(self, parsed_uri):
return MatrixHostnameEndpoint(
self._reactor,
self._tls_client_options_factory,
self._srv_resolver,
parsed_uri,
)
@implementer(IStreamClientEndpoint)
class MatrixHostnameEndpoint:
"""An endpoint that resolves matrix:// URLs using Matrix server name
resolution (i.e. via SRV). Does not check for well-known delegation.
Args:
reactor: twisted reactor to use for underlying requests
tls_client_options_factory:
factory to use for fetching client tls options, or none to disable TLS.
srv_resolver: The SRV resolver to use
parsed_uri: The parsed URI that we're wanting to connect to.
"""
def __init__(
self,
reactor: IReactorCore,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
srv_resolver: SrvResolver,
parsed_uri: URI,
):
self._reactor = reactor
self._parsed_uri = parsed_uri
# set up the TLS connection params
#
# XXX disabling TLS is really only supported here for the benefit of the
# unit tests. We should make the UTs cope with TLS rather than having to make
# the code support the unit tests.
if tls_client_options_factory is None:
self._tls_options = None
else:
self._tls_options = tls_client_options_factory.get_options(
self._parsed_uri.host
)
self._srv_resolver = srv_resolver
def connect(self, protocol_factory: IProtocolFactory) -> defer.Deferred:
"""Implements IStreamClientEndpoint interface
"""
return run_in_background(self._do_connect, protocol_factory)
async def _do_connect(self, protocol_factory: IProtocolFactory) -> None:
first_exception = None
server_list = await self._resolve_server()
for server in server_list:
host = server.host
port = server.port
try:
logger.debug("Connecting to %s:%i", host.decode("ascii"), port)
endpoint = HostnameEndpoint(self._reactor, host, port)
if self._tls_options:
endpoint = wrapClientTLS(self._tls_options, endpoint)
result = await make_deferred_yieldable(
endpoint.connect(protocol_factory)
)
return result
except Exception as e:
logger.info(
"Failed to connect to %s:%i: %s", host.decode("ascii"), port, e
)
if not first_exception:
first_exception = e
# We return the first failure because that's probably the most interesting.
if first_exception:
raise first_exception
# This shouldn't happen as we should always have at least one host/port
# to try and if that doesn't work then we'll have an exception.
raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,))
async def _resolve_server(self) -> List[Server]:
"""Resolves the server name to a list of hosts and ports to attempt to
connect to.
"""
if self._parsed_uri.scheme != b"matrix":
return [Server(host=self._parsed_uri.host, port=self._parsed_uri.port)]
# Note: We don't do well-known lookup as that needs to have happened
# before now, due to needing to rewrite the Host header of the HTTP
# request.
# We reparse the URI so that defaultPort is -1 rather than 80
parsed_uri = urllib.parse.urlparse(self._parsed_uri.toBytes())
host = parsed_uri.hostname
port = parsed_uri.port
# If there is an explicit port or the host is an IP address we bypass
# SRV lookups and just use the given host/port.
if port or _is_ip_literal(host):
return [Server(host, port or 8448)]
server_list = await self._srv_resolver.resolve_service(b"_matrix._tcp." + host)
if server_list:
return server_list
# No SRV records, so we fallback to host and 8448
return [Server(host, 8448)]
def _is_ip_literal(host: bytes) -> bool:
"""Test if the given host name is either an IPv4 or IPv6 literal.
Args:
host: The host name to check
Returns:
True if the hostname is an IP address literal.
"""
host_str = host.decode("ascii")
try:
IPAddress(host_str)
return True
except AddrFormatError:
return False
| open_redirect | {
"code": [
"from netaddr import AddrFormatError, IPAddress",
" agent=Agent(",
" pool=self._pool,",
" contextFactory=tls_client_options_factory,"
],
"line_no": [
19,
95,
97,
98
]
} | {
"code": [
"from netaddr import AddrFormatError, IPAddress, IPSet",
"from synapse.http.client import BlacklistingAgentWrapper",
" ip_blacklist: IPSet,",
" agent=BlacklistingAgentWrapper(",
" Agent(",
" self._reactor,",
" contextFactory=tls_client_options_factory,",
" ip_blacklist=ip_blacklist,"
],
"line_no": [
19,
34,
74,
99,
100,
101,
103,
106
]
} |
import logging
import urllib.parse
from typing import List, Optional
from netaddr import AddrFormatError, IPAddress
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.internet.interfaces import (
IProtocolFactory,
IReactorCore,
IStreamClientEndpoint,
)
from twisted.web.client import URI, Agent, HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer
from synapse.crypto.context_factory import FederationPolicyForHTTPS
from synapse.http.federation.srv_resolver import Server, SrvResolver
from synapse.http.federation.well_known_resolver import WellKnownResolver
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.util import Clock
VAR_0 = logging.getLogger(__name__)
@implementer(IAgent)
class CLASS_0:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_4: bytes,
VAR_5: Optional[SrvResolver] = None,
VAR_6: Optional[WellKnownResolver] = None,
):
self._reactor = VAR_2
self._clock = Clock(VAR_2)
self._pool = HTTPConnectionPool(VAR_2)
self._pool.retryAutomatically = False
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
self._agent = Agent.usingEndpointFactory(
self._reactor,
CLASS_1(
VAR_2, VAR_3, VAR_5
),
pool=self._pool,
)
self.user_agent = VAR_4
if VAR_6 is None:
VAR_6 = WellKnownResolver(
self._reactor,
agent=Agent(
self._reactor,
pool=self._pool,
contextFactory=VAR_3,
),
VAR_4=self.user_agent,
)
self._well_known_resolver = VAR_6
@defer.inlineCallbacks
def FUNC_1(
self,
VAR_7: bytes,
VAR_8: bytes,
VAR_9: Optional[Headers] = None,
VAR_10: Optional[IBodyProducer] = None,
) -> defer.Deferred:
VAR_12 = urllib.parse.urlparse(VAR_8)
assert VAR_12.hostname
VAR_15 = None
if (
VAR_12.scheme == b"matrix"
and not FUNC_0(VAR_12.hostname)
and not VAR_12.port
):
VAR_20 = yield defer.ensureDeferred(
self._well_known_resolver.get_well_known(VAR_12.hostname)
)
VAR_15 = VAR_20.delegated_server
if VAR_15:
VAR_8 = urllib.parse.urlunparse(
(
VAR_12.scheme,
VAR_15,
VAR_12.path,
VAR_12.params,
VAR_12.query,
VAR_12.fragment,
)
)
VAR_12 = urllib.parse.urlparse(VAR_8)
if VAR_9 is None:
VAR_9 = Headers()
else:
VAR_9 = VAR_9.copy()
if not VAR_9.hasHeader(b"host"):
VAR_9.addRawHeader(b"host", VAR_12.netloc)
if not VAR_9.hasHeader(b"user-agent"):
VAR_9.addRawHeader(b"user-agent", self.user_agent)
VAR_16 = yield make_deferred_yieldable(
self._agent.request(VAR_7, VAR_8, VAR_9, VAR_10)
)
return VAR_16
@implementer(IAgentEndpointFactory)
class CLASS_1:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_11: Optional[SrvResolver],
):
self._reactor = VAR_2
self._tls_client_options_factory = VAR_3
if VAR_11 is None:
VAR_11 = SrvResolver()
self._srv_resolver = VAR_11
def FUNC_2(self, VAR_12):
return CLASS_2(
self._reactor,
self._tls_client_options_factory,
self._srv_resolver,
VAR_12,
)
@implementer(IStreamClientEndpoint)
class CLASS_2:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_11: SrvResolver,
VAR_12: URI,
):
self._reactor = VAR_2
self._parsed_uri = VAR_12
if VAR_3 is None:
self._tls_options = None
else:
self._tls_options = VAR_3.get_options(
self._parsed_uri.host
)
self._srv_resolver = VAR_11
def FUNC_3(self, VAR_13: IProtocolFactory) -> defer.Deferred:
return run_in_background(self._do_connect, VAR_13)
async def FUNC_4(self, VAR_13: IProtocolFactory) -> None:
VAR_17 = None
VAR_18 = await self._resolve_server()
for server in VAR_18:
VAR_1 = server.host
VAR_19 = server.port
try:
VAR_0.debug("Connecting to %s:%i", VAR_1.decode("ascii"), VAR_19)
VAR_21 = HostnameEndpoint(self._reactor, VAR_1, VAR_19)
if self._tls_options:
VAR_21 = wrapClientTLS(self._tls_options, VAR_21)
VAR_22 = await make_deferred_yieldable(
VAR_21.connect(VAR_13)
)
return VAR_22
except Exception as e:
VAR_0.info(
"Failed to FUNC_3 to %s:%i: %s", VAR_1.decode("ascii"), VAR_19, e
)
if not VAR_17:
first_exception = e
if VAR_17:
raise VAR_17
raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,))
async def FUNC_5(self) -> List[Server]:
if self._parsed_uri.scheme != b"matrix":
return [Server(VAR_1=self._parsed_uri.host, VAR_19=self._parsed_uri.port)]
VAR_12 = urllib.parse.urlparse(self._parsed_uri.toBytes())
VAR_1 = VAR_12.hostname
VAR_19 = VAR_12.port
if VAR_19 or FUNC_0(VAR_1):
return [Server(VAR_1, VAR_19 or 8448)]
VAR_18 = await self._srv_resolver.resolve_service(b"_matrix._tcp." + VAR_1)
if VAR_18:
return VAR_18
return [Server(VAR_1, 8448)]
def FUNC_0(VAR_1: bytes) -> bool:
VAR_14 = VAR_1.decode("ascii")
try:
IPAddress(VAR_14)
return True
except AddrFormatError:
return False
|
import logging
import urllib.parse
from typing import List, Optional
from netaddr import AddrFormatError, IPAddress, IPSet
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.internet.interfaces import (
IProtocolFactory,
IReactorCore,
IStreamClientEndpoint,
)
from twisted.web.client import URI, Agent, HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer
from synapse.crypto.context_factory import FederationPolicyForHTTPS
from synapse.http.client import BlacklistingAgentWrapper
from synapse.http.federation.srv_resolver import Server, SrvResolver
from synapse.http.federation.well_known_resolver import WellKnownResolver
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.util import Clock
VAR_0 = logging.getLogger(__name__)
@implementer(IAgent)
class CLASS_0:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_4: bytes,
VAR_5: IPSet,
VAR_6: Optional[SrvResolver] = None,
VAR_7: Optional[WellKnownResolver] = None,
):
self._reactor = VAR_2
self._clock = Clock(VAR_2)
self._pool = HTTPConnectionPool(VAR_2)
self._pool.retryAutomatically = False
self._pool.maxPersistentPerHost = 5
self._pool.cachedConnectionTimeout = 2 * 60
self._agent = Agent.usingEndpointFactory(
self._reactor,
CLASS_1(
VAR_2, VAR_3, VAR_6
),
pool=self._pool,
)
self.user_agent = VAR_4
if VAR_7 is None:
VAR_7 = WellKnownResolver(
self._reactor,
agent=BlacklistingAgentWrapper(
Agent(
self._reactor,
pool=self._pool,
contextFactory=VAR_3,
),
self._reactor,
VAR_5=ip_blacklist,
),
VAR_4=self.user_agent,
)
self._well_known_resolver = VAR_7
@defer.inlineCallbacks
def FUNC_1(
self,
VAR_8: bytes,
VAR_9: bytes,
VAR_10: Optional[Headers] = None,
VAR_11: Optional[IBodyProducer] = None,
) -> defer.Deferred:
VAR_13 = urllib.parse.urlparse(VAR_9)
assert VAR_13.hostname
VAR_16 = None
if (
VAR_13.scheme == b"matrix"
and not FUNC_0(VAR_13.hostname)
and not VAR_13.port
):
VAR_21 = yield defer.ensureDeferred(
self._well_known_resolver.get_well_known(VAR_13.hostname)
)
VAR_16 = VAR_21.delegated_server
if VAR_16:
VAR_9 = urllib.parse.urlunparse(
(
VAR_13.scheme,
VAR_16,
VAR_13.path,
VAR_13.params,
VAR_13.query,
VAR_13.fragment,
)
)
VAR_13 = urllib.parse.urlparse(VAR_9)
if VAR_10 is None:
VAR_10 = Headers()
else:
VAR_10 = VAR_10.copy()
if not VAR_10.hasHeader(b"host"):
VAR_10.addRawHeader(b"host", VAR_13.netloc)
if not VAR_10.hasHeader(b"user-agent"):
VAR_10.addRawHeader(b"user-agent", self.user_agent)
VAR_17 = yield make_deferred_yieldable(
self._agent.request(VAR_8, VAR_9, VAR_10, VAR_11)
)
return VAR_17
@implementer(IAgentEndpointFactory)
class CLASS_1:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_12: Optional[SrvResolver],
):
self._reactor = VAR_2
self._tls_client_options_factory = VAR_3
if VAR_12 is None:
VAR_12 = SrvResolver()
self._srv_resolver = VAR_12
def FUNC_2(self, VAR_13):
return CLASS_2(
self._reactor,
self._tls_client_options_factory,
self._srv_resolver,
VAR_13,
)
@implementer(IStreamClientEndpoint)
class CLASS_2:
def __init__(
self,
VAR_2: IReactorCore,
VAR_3: Optional[FederationPolicyForHTTPS],
VAR_12: SrvResolver,
VAR_13: URI,
):
self._reactor = VAR_2
self._parsed_uri = VAR_13
if VAR_3 is None:
self._tls_options = None
else:
self._tls_options = VAR_3.get_options(
self._parsed_uri.host
)
self._srv_resolver = VAR_12
def FUNC_3(self, VAR_14: IProtocolFactory) -> defer.Deferred:
return run_in_background(self._do_connect, VAR_14)
async def FUNC_4(self, VAR_14: IProtocolFactory) -> None:
VAR_18 = None
VAR_19 = await self._resolve_server()
for server in VAR_19:
VAR_1 = server.host
VAR_20 = server.port
try:
VAR_0.debug("Connecting to %s:%i", VAR_1.decode("ascii"), VAR_20)
VAR_22 = HostnameEndpoint(self._reactor, VAR_1, VAR_20)
if self._tls_options:
VAR_22 = wrapClientTLS(self._tls_options, VAR_22)
VAR_23 = await make_deferred_yieldable(
VAR_22.connect(VAR_14)
)
return VAR_23
except Exception as e:
VAR_0.info(
"Failed to FUNC_3 to %s:%i: %s", VAR_1.decode("ascii"), VAR_20, e
)
if not VAR_18:
first_exception = e
if VAR_18:
raise VAR_18
raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,))
async def FUNC_5(self) -> List[Server]:
if self._parsed_uri.scheme != b"matrix":
return [Server(VAR_1=self._parsed_uri.host, VAR_20=self._parsed_uri.port)]
VAR_13 = urllib.parse.urlparse(self._parsed_uri.toBytes())
VAR_1 = VAR_13.hostname
VAR_20 = VAR_13.port
if VAR_20 or FUNC_0(VAR_1):
return [Server(VAR_1, VAR_20 or 8448)]
VAR_19 = await self._srv_resolver.resolve_service(b"_matrix._tcp." + VAR_1)
if VAR_19:
return VAR_19
return [Server(VAR_1, 8448)]
def FUNC_0(VAR_1: bytes) -> bool:
VAR_15 = VAR_1.decode("ascii")
try:
IPAddress(VAR_15)
return True
except AddrFormatError:
return False
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
18,
21,
32,
38,
40,
41,
47,
49,
52,
55,
58,
62,
67,
82,
91,
102,
104,
131,
132,
134,
135,
137,
138,
139,
140,
141,
142,
153,
155,
156,
157,
169,
170,
171,
176,
181,
185,
187,
188,
193,
202,
205,
207,
215,
216,
221,
229,
238,
240,
241,
242,
243,
244,
245,
246,
253,
255,
259,
261,
264,
266,
270,
279,
287,
288,
291,
292,
293,
295,
300,
303,
304,
305,
306,
307,
308,
310,
313,
314,
315,
318,
320,
323,
324,
326,
327,
330,
333,
337,
339,
345,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
191,
192,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
329,
330,
331,
332,
333,
334,
335,
336,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
257,
258,
297,
298,
299
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
18,
21,
32,
39,
41,
42,
48,
50,
53,
56,
59,
63,
68,
84,
93,
95,
96,
110,
112,
139,
140,
142,
143,
145,
146,
147,
148,
149,
150,
161,
163,
164,
165,
177,
178,
179,
184,
189,
193,
195,
196,
201,
210,
213,
215,
223,
224,
229,
237,
246,
248,
249,
250,
251,
252,
253,
254,
261,
263,
267,
269,
272,
274,
278,
287,
295,
296,
299,
300,
301,
303,
308,
311,
312,
313,
314,
315,
316,
318,
321,
322,
323,
326,
328,
331,
332,
334,
335,
338,
341,
345,
347,
353,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
199,
200,
227,
228,
229,
230,
231,
232,
233,
234,
235,
236,
337,
338,
339,
340,
341,
342,
343,
344,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
265,
266,
305,
306,
307
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.utils import timezone
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.utils.translation import gettext as _
from spirit.core.utils.views import is_post
from spirit.core.utils.decorators import moderator_required
from spirit.comment.models import Comment
from spirit.topic.models import Topic
@moderator_required
def _moderate(request, pk, field_name, to_value, action=None, message=None):
topic = get_object_or_404(Topic, pk=pk)
if is_post(request):
count = (
Topic.objects
.filter(pk=pk)
.exclude(**{field_name: to_value})
.update(**{
field_name: to_value,
'reindex_at': timezone.now()}))
if count and action is not None:
Comment.create_moderation_action(
user=request.user,
topic=topic,
action=action)
if message is not None:
messages.info(request, message)
return redirect(request.POST.get(
'next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/moderate.html',
context={'topic': topic})
def delete(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_removed',
to_value=True,
message=_("The topic has been deleted"))
def undelete(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_removed',
to_value=False,
message=_("The topic has been undeleted"))
def lock(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_closed',
to_value=True,
action=Comment.CLOSED,
message=_("The topic has been locked"))
def unlock(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_closed',
to_value=False,
action=Comment.UNCLOSED,
message=_("The topic has been unlocked"))
def pin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_pinned',
to_value=True,
action=Comment.PINNED,
message=_("The topic has been pinned"))
def unpin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_pinned',
to_value=False,
action=Comment.UNPINNED,
message=_("The topic has been unpinned"))
def global_pin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_globally_pinned',
to_value=True,
action=Comment.PINNED,
message=_("The topic has been globally pinned"))
def global_unpin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_globally_pinned',
to_value=False,
action=Comment.UNPINNED,
message=_("The topic has been globally unpinned"))
| # -*- coding: utf-8 -*-
from django.utils import timezone
from django.shortcuts import render, get_object_or_404
from django.contrib import messages
from django.utils.translation import gettext as _
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post
from spirit.core.utils.decorators import moderator_required
from spirit.comment.models import Comment
from spirit.topic.models import Topic
@moderator_required
def _moderate(request, pk, field_name, to_value, action=None, message=None):
topic = get_object_or_404(Topic, pk=pk)
if is_post(request):
count = (
Topic.objects
.filter(pk=pk)
.exclude(**{field_name: to_value})
.update(**{
field_name: to_value,
'reindex_at': timezone.now()}))
if count and action is not None:
Comment.create_moderation_action(
user=request.user,
topic=topic,
action=action)
if message is not None:
messages.info(request, message)
return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/topic/moderate.html',
context={'topic': topic})
def delete(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_removed',
to_value=True,
message=_("The topic has been deleted"))
def undelete(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_removed',
to_value=False,
message=_("The topic has been undeleted"))
def lock(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_closed',
to_value=True,
action=Comment.CLOSED,
message=_("The topic has been locked"))
def unlock(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_closed',
to_value=False,
action=Comment.UNCLOSED,
message=_("The topic has been unlocked"))
def pin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_pinned',
to_value=True,
action=Comment.PINNED,
message=_("The topic has been pinned"))
def unpin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_pinned',
to_value=False,
action=Comment.UNPINNED,
message=_("The topic has been unpinned"))
def global_pin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_globally_pinned',
to_value=True,
action=Comment.PINNED,
message=_("The topic has been globally pinned"))
def global_unpin(request, pk):
return _moderate(
request=request,
pk=pk,
field_name='is_globally_pinned',
to_value=False,
action=Comment.UNPINNED,
message=_("The topic has been globally unpinned"))
| open_redirect | {
"code": [
"from django.shortcuts import render, redirect, get_object_or_404",
" return redirect(request.POST.get(",
" 'next', topic.get_absolute_url()))"
],
"line_no": [
4,
36,
37
]
} | {
"code": [
"from django.shortcuts import render, get_object_or_404",
"from spirit.core.utils.http import safe_redirect",
" return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')"
],
"line_no": [
4,
8,
37
]
} |
from django.utils import timezone
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import .messages
from django.utils.translation import gettext as _
from spirit.core.utils.views import is_post
from spirit.core.utils.decorators import moderator_required
from spirit.comment.models import Comment
from spirit.topic.models import Topic
@moderator_required
def FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4=None, VAR_5=None):
VAR_6 = get_object_or_404(Topic, VAR_1=pk)
if is_post(VAR_0):
VAR_7 = (
Topic.objects
.filter(VAR_1=pk)
.exclude(**{VAR_2: VAR_3})
.update(**{
VAR_2: VAR_3,
'reindex_at': timezone.now()}))
if VAR_7 and VAR_4 is not None:
Comment.create_moderation_action(
user=VAR_0.user,
VAR_6=topic,
VAR_4=action)
if VAR_5 is not None:
messages.info(VAR_0, VAR_5)
return redirect(VAR_0.POST.get(
'next', VAR_6.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_6/moderate.html',
context={'topic': VAR_6})
def FUNC_1(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_removed',
VAR_3=True,
VAR_5=_("The VAR_6 has been deleted"))
def FUNC_2(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_removed',
VAR_3=False,
VAR_5=_("The VAR_6 has been undeleted"))
def FUNC_3(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_closed',
VAR_3=True,
VAR_4=Comment.CLOSED,
VAR_5=_("The VAR_6 has been locked"))
def FUNC_4(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_closed',
VAR_3=False,
VAR_4=Comment.UNCLOSED,
VAR_5=_("The VAR_6 has been unlocked"))
def FUNC_5(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_pinned',
VAR_3=True,
VAR_4=Comment.PINNED,
VAR_5=_("The VAR_6 has been pinned"))
def FUNC_6(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_pinned',
VAR_3=False,
VAR_4=Comment.UNPINNED,
VAR_5=_("The VAR_6 has been unpinned"))
def FUNC_7(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_globally_pinned',
VAR_3=True,
VAR_4=Comment.PINNED,
VAR_5=_("The VAR_6 has been globally pinned"))
def FUNC_8(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_globally_pinned',
VAR_3=False,
VAR_4=Comment.UNPINNED,
VAR_5=_("The VAR_6 has been globally unpinned"))
|
from django.utils import timezone
from django.shortcuts import render, get_object_or_404
from django.contrib import .messages
from django.utils.translation import gettext as _
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post
from spirit.core.utils.decorators import moderator_required
from spirit.comment.models import Comment
from spirit.topic.models import Topic
@moderator_required
def FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4=None, VAR_5=None):
VAR_6 = get_object_or_404(Topic, VAR_1=pk)
if is_post(VAR_0):
VAR_7 = (
Topic.objects
.filter(VAR_1=pk)
.exclude(**{VAR_2: VAR_3})
.update(**{
VAR_2: VAR_3,
'reindex_at': timezone.now()}))
if VAR_7 and VAR_4 is not None:
Comment.create_moderation_action(
user=VAR_0.user,
VAR_6=topic,
VAR_4=action)
if VAR_5 is not None:
messages.info(VAR_0, VAR_5)
return safe_redirect(VAR_0, 'next', VAR_6.get_absolute_url(), method='POST')
return render(
VAR_0=request,
template_name='spirit/VAR_6/moderate.html',
context={'topic': VAR_6})
def FUNC_1(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_removed',
VAR_3=True,
VAR_5=_("The VAR_6 has been deleted"))
def FUNC_2(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_removed',
VAR_3=False,
VAR_5=_("The VAR_6 has been undeleted"))
def FUNC_3(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_closed',
VAR_3=True,
VAR_4=Comment.CLOSED,
VAR_5=_("The VAR_6 has been locked"))
def FUNC_4(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_closed',
VAR_3=False,
VAR_4=Comment.UNCLOSED,
VAR_5=_("The VAR_6 has been unlocked"))
def FUNC_5(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_pinned',
VAR_3=True,
VAR_4=Comment.PINNED,
VAR_5=_("The VAR_6 has been pinned"))
def FUNC_6(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_pinned',
VAR_3=False,
VAR_4=Comment.UNPINNED,
VAR_5=_("The VAR_6 has been unpinned"))
def FUNC_7(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_globally_pinned',
VAR_3=True,
VAR_4=Comment.PINNED,
VAR_5=_("The VAR_6 has been globally pinned"))
def FUNC_8(VAR_0, VAR_1):
return FUNC_0(
VAR_0=request,
VAR_1=pk,
VAR_2='is_globally_pinned',
VAR_3=False,
VAR_4=Comment.UNPINNED,
VAR_5=_("The VAR_6 has been globally unpinned"))
| [
1,
2,
7,
12,
13,
17,
26,
32,
35,
38,
43,
44,
52,
53,
61,
62,
71,
72,
81,
82,
91,
92,
101,
102,
111,
112,
121
] | [
1,
2,
7,
13,
14,
18,
27,
33,
36,
38,
43,
44,
52,
53,
61,
62,
71,
72,
81,
82,
91,
92,
101,
102,
111,
112,
121
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import copyfile
from uuid import uuid4
# Improve this to check if scholarly is available in a global way, like other pythonic libraries
have_scholar = True
try:
from scholarly import scholarly
except ImportError:
have_scholar = False
pass
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import config, get_locale, ub, db
from . import calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
editbook = Blueprint('editbook', __name__)
log = logger.create()
def upload_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_upload() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def edit_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_edit() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def search_objects_remove(db_book_object, db_type, input_elements):
del_elements = []
for c_elements in db_book_object:
found = False
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
for inp_element in input_elements:
if inp_element.lower() == type_elements.lower():
# if inp_element == type_elements:
found = True
break
# if the element was not found in the new list, add it to remove list
if not found:
del_elements.append(c_elements)
return del_elements
def search_objects_add(db_book_object, db_type, input_elements):
add_elements = []
for inp_element in input_elements:
found = False
for c_elements in db_book_object:
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
if inp_element == type_elements:
found = True
break
if not found:
add_elements.append(inp_element)
return add_elements
def remove_objects(db_book_object, db_session, del_elements):
changed = False
if len(del_elements) > 0:
for del_element in del_elements:
db_book_object.remove(del_element)
changed = True
if len(del_element.books) == 0:
db_session.delete(del_element)
return changed
def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
changed = False
if db_type == 'languages':
db_filter = db_object.lang_code
elif db_type == 'custom':
db_filter = db_object.value
else:
db_filter = db_object.name
for add_element in add_elements:
# check if a element with that name exists
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
# if no element is found add it
# if new_element is None:
if db_type == 'author':
new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "")
elif db_type == 'series':
new_element = db_object(add_element, add_element)
elif db_type == 'custom':
new_element = db_object(value=add_element)
elif db_type == 'publisher':
new_element = db_object(add_element, None)
else: # db_type should be tag or language
new_element = db_object(add_element)
if db_element is None:
changed = True
db_session.add(new_element)
db_book_object.append(new_element)
else:
db_element = create_objects_for_addition(db_element, add_element, db_type)
changed = True
# add element to book
changed = True
db_book_object.append(db_element)
return changed
def create_objects_for_addition(db_element, add_element, db_type):
if db_type == 'custom':
if db_element.value != add_element:
db_element.value = add_element # ToDo: Before new_element, but this is not plausible
elif db_type == 'languages':
if db_element.lang_code != add_element:
db_element.lang_code = add_element
elif db_type == 'series':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element
elif db_type == 'author':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element.replace('|', ',')
elif db_type == 'publisher':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = None
elif db_element.name != add_element:
db_element.name = add_element
return db_element
# Modifies different Database objects, first check if elements if elements have to be deleted,
# because they are no longer used, than check if elements have to be added to database
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
# passing input_elements not as a list may lead to undesired results
if not isinstance(input_elements, list):
raise TypeError(str(input_elements) + " should be passed as a list")
input_elements = [x for x in input_elements if x != '']
# we have all input element (authors, series, tags) names now
# 1. search for elements to remove
del_elements = search_objects_remove(db_book_object, db_type, input_elements)
# 2. search for elements that need to be added
add_elements = search_objects_add(db_book_object, db_type, input_elements)
# if there are elements to remove, we remove them now
changed = remove_objects(db_book_object, db_session, del_elements)
# if there are elements to add, we add them now!
if len(add_elements) > 0:
changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements)
return changed
def modify_identifiers(input_identifiers, db_identifiers, db_session):
"""Modify Identifiers to match input information.
input_identifiers is a list of read-to-persist Identifiers objects.
db_identifiers is a list of already persisted list of Identifiers objects."""
changed = False
error = False
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
if len(input_identifiers) != len(input_dict):
error = True
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
# delete db identifiers not present in input or modify them with input val
for identifier_type, identifier in db_dict.items():
if identifier_type not in input_dict.keys():
db_session.delete(identifier)
changed = True
else:
input_identifier = input_dict[identifier_type]
identifier.type = input_identifier.type
identifier.val = input_identifier.val
# add input identifiers not present in db
for identifier_type, identifier in input_dict.items():
if identifier_type not in db_dict.keys():
db_session.add(identifier)
changed = True
return changed, error
@editbook.route("/ajax/delete/<int:book_id>")
@login_required
def delete_book_from_details(book_id):
return Response(delete_book(book_id, "", True), mimetype='application/json')
@editbook.route("/delete/<int:book_id>", defaults={'book_format': ""})
@editbook.route("/delete/<int:book_id>/<string:book_format>")
@login_required
def delete_book_ajax(book_id, book_format):
return delete_book(book_id, book_format, False)
def delete_whole_book(book_id, book):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse):
warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": error}])
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "warning",
"format": "",
"message": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
calibre_db.session.commit()
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in book.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
book = calibre_db.order_authors(book)
author_names = []
for authr in book.authors:
author_names.append(authr.name.replace('|', ','))
# Option for showing convertbook button
valid_source_formats=list()
allowed_conversion_formats = list()
kepub_possible=None
if config.config_converterpath:
for file in book.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
valid_source_formats.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in book.data]:
kepub_possible = True
if not config.config_converterpath:
valid_source_formats.append('epub')
# Determine what formats don't already exist
if config.config_converterpath:
allowed_conversion_formats = constants.EXTENSIONS_CONVERT_TO[:]
for file in book.data:
if file.format.lower() in allowed_conversion_formats:
allowed_conversion_formats.remove(file.format.lower())
if kepub_possible:
allowed_conversion_formats.append('kepub')
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
conversion_formats=allowed_conversion_formats,
config=config,
source_formats=valid_source_formats)
def edit_book_ratings(to_save, book):
changed = False
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
changed = True
is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
changed = True
return changed
def edit_book_tags(tags, book):
input_tags = tags.split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
# Remove duplicates
input_tags = helper.uniq(input_tags)
return modify_database_object(input_tags, book.tags, db.Tags, calibre_db.session, 'tags')
def edit_book_series(series, book):
input_series = [series.strip()]
input_series = [x for x in input_series if x != '']
return modify_database_object(input_series, book.series, db.Series, calibre_db.session, 'series')
def edit_book_series_index(series_index, book):
# Add default series_index to book
modif_date = False
series_index = series_index or '1'
if not series_index.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=series_index), category="warning")
return False
if book.series_index != series_index:
book.series_index = series_index
modif_date = True
return modif_date
# Handle book comments/description
def edit_book_comments(comments, book):
modif_date = False
if len(book.comments):
if book.comments[0].text != comments:
book.comments[0].text = comments
modif_date = True
else:
if comments:
book.comments.append(db.Comments(text=comments, book=book.id))
modif_date = True
return modif_date
def edit_book_languages(languages, book, upload=False, invalid=None):
input_languages = languages.split(',')
unknown_languages = []
if not upload:
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
else:
input_l = isoLanguages.get_valid_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
if isinstance(invalid, list):
invalid.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
# ToDo: Not working correct
if upload and len(input_l) == 1:
# If the language of the file is excluded from the users view, it's not imported, to allow the user to view
# the book it's language is set to the filter language
if input_l[0] != current_user.filter_language() and current_user.filter_language() != "all":
input_l[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
# Remove duplicates
input_l = helper.uniq(input_l)
return modify_database_object(input_l, book.languages, db.Languages, calibre_db.session, 'languages')
def edit_book_publisher(publishers, book):
changed = False
if publishers:
publisher = publishers.rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
changed |= modify_database_object([publisher], book.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(book.publishers):
changed |= modify_database_object([], book.publishers, db.Publishers, calibre_db.session, 'publisher')
return changed
def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False
if to_save[cc_string] == 'None':
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
elif c.datatype == 'comments':
to_save[cc_string] = Markup(to_save[cc_string]).unescape()
elif c.datatype == 'datetime':
try:
to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d")
except ValueError:
to_save[cc_string] = db.Books.DEFAULT_PUBDATE
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
return changed, to_save
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save):
changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
changed, to_save = edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string)
else:
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
else:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if not del_cc.books or len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
else:
input_tags = to_save[cc_string].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
changed |= modify_database_object(input_tags,
getattr(book, cc_string),
db.cc_classes[c.id],
calibre_db.session,
'custom')
return changed
def upload_single_file(request, book, book_id):
# Check and handle Uploaded file
if 'btn-upload-format' in request.files:
requested_file = request.files['btn-upload-format']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
category="error")
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_name = book.path.rsplit('/', 1)[-1]
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
if not os.path.exists(filepath):
try:
os.makedirs(filepath)
except OSError:
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
try:
requested_file.save(saved_filename)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=saved_filename), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_size = os.path.getsize(saved_filename)
is_format = calibre_db.get_book_format(book_id, file_ext.upper())
# Format entry already exists, no need to update the database
if is_format:
log.warning('Book format %s already existing', file_ext.upper())
else:
try:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
calibre_db.session.add(db_format)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error('Database error: %s', e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
# Queue uploader info
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=book.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>"))
return uploader.process(
saved_filename, *os.path.splitext(requested_file.filename),
rarExecutable=config.config_rarfile_location)
def upload_cover(request, book):
if 'btn-upload-cover' in request.files:
requested_file = request.files['btn-upload-cover']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
ret, message = helper.save_cover(requested_file, book.path)
if ret is True:
return True
else:
flash(message, category="error")
return False
return None
def handle_title_on_edit(book, book_title):
# handle book title
book_title = book_title.rstrip().strip()
if book.title != book_title:
if book_title == '':
book_title = _(u'Unknown')
book.title = book_title
return True
return False
def handle_author_on_edit(book, author_name, update_stored=True):
# handle author(s)
input_authors = author_name.split('&')
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
change = modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
# Search for each author if author is in database, if not, author name and sorted author name is generated new
# everything then is assembled for sorted author field in database
sort_authors_list = list()
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
stored_author = helper.get_sorted_author(inp)
else:
stored_author = stored_author.sort
sort_authors_list.append(helper.get_sorted_author(stored_author))
sort_authors = ' & '.join(sort_authors_list)
if book.author_sort != sort_authors and update_stored:
book.author_sort = sort_authors
change = True
return input_authors, change
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
@login_required_if_no_ano
@edit_required
def edit_book(book_id):
modif_date = False
# create the function for sorting...
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
# Show form
if request.method != 'POST':
return render_edit_book(book_id)
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
# Book not found
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
meta = upload_single_file(request, book, book_id)
if upload_cover(request, book) is True:
book.has_cover = 1
modif_date = True
try:
to_save = request.form.to_dict()
merge_metadata(to_save, meta)
# Update book
edited_books_id = None
# handle book title
title_change = handle_title_on_edit(book, to_save["book_title"])
input_authors, authorchange = handle_author_on_edit(book, to_save["author_name"])
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
error = False
if edited_books_id:
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if not error:
if "cover_url" in to_save:
if to_save["cover_url"]:
if not current_user.role_upload():
return "", (403)
if to_save["cover_url"].endswith('/static/generic_cover.jpg'):
book.has_cover = 0
else:
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
if result is True:
book.has_cover = 1
modif_date = True
else:
flash(error, category="error")
# Add default series_index to book
modif_date |= edit_book_series_index(to_save["series_index"], book)
# Handle book comments/description
modif_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
# Handle identifiers
input_identifiers = identifier_list(to_save, book)
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
if warning:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
modif_date |= modification
# Handle book tags
modif_date |= edit_book_tags(to_save['tags'], book)
# Handle book series
modif_date |= edit_book_series(to_save["series"], book)
# handle book publisher
modif_date |= edit_book_publisher(to_save['publisher'], book)
# handle book languages
modif_date |= edit_book_languages(to_save['languages'], book)
# handle book ratings
modif_date |= edit_book_ratings(to_save, book)
# handle cc data
modif_date |= edit_cc_data(book_id, book, to_save)
if to_save["pubdate"]:
try:
book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
except ValueError:
book.pubdate = db.Books.DEFAULT_PUBDATE
else:
book.pubdate = db.Books.DEFAULT_PUBDATE
if modif_date:
book.last_modified = datetime.utcnow()
calibre_db.session.merge(book)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in to_save:
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_("Metadata successfully updated"), category="success")
return render_edit_book(book_id)
else:
calibre_db.session.rollback()
flash(error, category="error")
return render_edit_book(book_id)
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing book, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
def merge_metadata(to_save, meta):
if to_save['author_name'] == _(u'Unknown'):
to_save['author_name'] = ''
if to_save['book_title'] == _(u'Unknown'):
to_save['book_title'] = ''
for s_field, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '')
to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-'
id_val_prefix = 'identifier-val-'
result = []
for type_key, type_value in to_save.items():
if not type_key.startswith(id_type_prefix):
continue
val_key = id_val_prefix + type_key[len(id_type_prefix):]
if val_key not in to_save.keys():
continue
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle publisher
modif_date |= edit_book_publisher(meta.publisher, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
return db_book, input_authors, title_dir
def file_handling_on_upload(requested_file):
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=requested_file.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return meta, None
def move_coverfile(meta, db_book):
# move cover to final directory, including book id
if meta.cover:
coverfile = meta.cover
else:
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
try:
copyfile(coverfile, new_coverpath)
if meta.cover:
os.unlink(meta.cover)
except OSError as e:
log.error("Failed to move cover file %s: %s", new_coverpath, e)
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
error=e),
category="error")
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
meta, error = file_handling_on_upload(requested_file)
if error:
return error
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
# Comments needs book id therefore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
book_id = db_book.id
title = db_book.title
error = helper.update_dir_structure_file(book_id,
config.config_calibre_dir,
input_authors[0],
meta.file_path,
title_dir + meta.extension.lower())
move_coverfile(meta, db_book)
# save data to database, reread data
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if error:
flash(error, category="error")
uploadText=_(u"File %(file)s uploaded", file=title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book_id) + "\">" + uploadText + "</a>"))
if len(request.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
resp = {"location": url_for('editbook.edit_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
else:
resp = {"location": url_for('web.show_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def convert_bookformat(book_id):
# check to see if we have form fields to work with - if not send user back
book_format_from = request.form.get('book_format_from', None)
book_format_to = request.form.get('book_format_to', None)
if (book_format_from is None) or (book_format_to is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
book_format_to.upper(), current_user.name)
if rtn is None:
flash(_(u"Book successfully queued for converting to %(book_format)s",
book_format=book_format_to),
category="success")
else:
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
@editbook.route("/scholarsearch/<query>",methods=['GET'])
@login_required_if_no_ano
@edit_required
def scholar_search(query):
if have_scholar:
scholar_gen = scholarly.search_pubs(' '.join(query.split('+')))
i=0
result = []
for publication in scholar_gen:
del publication['source']
result.append(publication)
i+=1
if(i>=10):
break
return Response(json.dumps(result),mimetype='application/json')
else:
return "[]"
@editbook.route("/ajax/editbooks/<param>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def edit_list_book(param):
vals = request.form.to_dict()
book = calibre_db.get_book(vals['pk'])
ret = ""
if param =='series_index':
edit_book_series_index(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
elif param =='tags':
edit_book_tags(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
mimetype='application/json')
elif param =='series':
edit_book_series(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
mimetype='application/json')
elif param =='publishers':
edit_book_publisher(vals['value'], book)
ret = Response(json.dumps({'success': True,
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
mimetype='application/json')
elif param =='languages':
invalid = list()
edit_book_languages(vals['value'], book, invalid=invalid)
if invalid:
ret = Response(json.dumps({'success': False,
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
mimetype='application/json')
else:
lang_names = list()
for lang in book.languages:
try:
lang_names.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
lang_names.append(_(isoLanguages.get(part3=lang.lang_code).name))
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
mimetype='application/json')
elif param =='author_sort':
book.author_sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
mimetype='application/json')
elif param == 'title':
sort = book.sort
handle_title_on_edit(book, vals.get('value', ""))
helper.update_dir_stucture(book.id, config.config_calibre_dir)
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
mimetype='application/json')
elif param =='sort':
book.sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
mimetype='application/json')
elif param =='authors':
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
helper.update_dir_stucture(book.id, config.config_calibre_dir, input_authors[0])
ret = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in input_authors])}),
mimetype='application/json')
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
# revert change for sort if automatic fields link is deactivated
if param == 'title' and vals.get('checkT') == "false":
book.sort = sort
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return ret
@editbook.route("/ajax/sort_value/<field>/<int:bookid>")
@login_required
def get_sorted_entry(field, bookid):
if field in ['title', 'authors', 'sort', 'author_sort']:
book = calibre_db.get_filtered_book(bookid)
if book:
if field == 'title':
return json.dumps({'sort': book.sort})
elif field == 'authors':
return json.dumps({'author_sort': book.author_sort})
if field == 'sort':
return json.dumps({'sort': book.title})
if field == 'author_sort':
return json.dumps({'author_sort': book.author})
return ""
@editbook.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@edit_required
def simulate_merge_list_book():
vals = request.get_json().get('Merge_books')
if vals:
to_book = calibre_db.get_book(vals[0]).title
vals.pop(0)
if to_book:
for book_id in vals:
from_book = []
from_book.append(calibre_db.get_book(book_id).title)
return json.dumps({'to': to_book, 'from': from_book})
return ""
@editbook.route("/ajax/mergebooks", methods=['POST'])
@login_required
@edit_required
def merge_list_book():
vals = request.get_json().get('Merge_books')
to_file = list()
if vals:
# load all formats from target book
to_book = calibre_db.get_book(vals[0])
vals.pop(0)
if to_book:
for file in to_book.data:
to_file.append(file.format)
to_name = helper.get_valid_filename(to_book.title) + ' - ' + \
helper.get_valid_filename(to_book.authors[0].name)
for book_id in vals:
from_book = calibre_db.get_book(book_id)
if from_book:
for element in from_book.data:
if element.format not in to_file:
# create new data entry with: book_id, book_format, uncompressed_size, name
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
to_book.path,
to_name + "." + element.format.lower()))
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
from_book.path,
element.name + "." + element.format.lower()))
copyfile(filepath_old, filepath_new)
to_book.data.append(db.Data(to_book.id,
element.format,
element.uncompressed_size,
to_name))
delete_book(from_book.id,"", True)
return json.dumps({'success': True})
return ""
@editbook.route("/ajax/xchange", methods=['POST'])
@login_required
@edit_required
def table_xchange_author_title():
vals = request.get_json().get('xchange')
if vals:
for val in vals:
modif_date = False
book = calibre_db.get_book(val)
authors = book.title
entries = calibre_db.order_authors(book)
author_names = []
for authr in entries.authors:
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, authorchange = handle_author_on_edit(book, authors)
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if edited_books_id:
helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if modif_date:
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import copyfile
from uuid import uuid4
from lxml.html.clean import clean_html
# Improve this to check if scholarly is available in a global way, like other pythonic libraries
try:
from scholarly import scholarly
have_scholar = True
except ImportError:
have_scholar = False
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import config, get_locale, ub, db
from . import calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
editbook = Blueprint('editbook', __name__)
log = logger.create()
def upload_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_upload() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def edit_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_edit() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def search_objects_remove(db_book_object, db_type, input_elements):
del_elements = []
for c_elements in db_book_object:
found = False
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
for inp_element in input_elements:
if inp_element.lower() == type_elements.lower():
# if inp_element == type_elements:
found = True
break
# if the element was not found in the new list, add it to remove list
if not found:
del_elements.append(c_elements)
return del_elements
def search_objects_add(db_book_object, db_type, input_elements):
add_elements = []
for inp_element in input_elements:
found = False
for c_elements in db_book_object:
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
if inp_element == type_elements:
found = True
break
if not found:
add_elements.append(inp_element)
return add_elements
def remove_objects(db_book_object, db_session, del_elements):
changed = False
if len(del_elements) > 0:
for del_element in del_elements:
db_book_object.remove(del_element)
changed = True
if len(del_element.books) == 0:
db_session.delete(del_element)
return changed
def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
changed = False
if db_type == 'languages':
db_filter = db_object.lang_code
elif db_type == 'custom':
db_filter = db_object.value
else:
db_filter = db_object.name
for add_element in add_elements:
# check if a element with that name exists
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
# if no element is found add it
# if new_element is None:
if db_type == 'author':
new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "")
elif db_type == 'series':
new_element = db_object(add_element, add_element)
elif db_type == 'custom':
new_element = db_object(value=add_element)
elif db_type == 'publisher':
new_element = db_object(add_element, None)
else: # db_type should be tag or language
new_element = db_object(add_element)
if db_element is None:
changed = True
db_session.add(new_element)
db_book_object.append(new_element)
else:
db_element = create_objects_for_addition(db_element, add_element, db_type)
changed = True
# add element to book
changed = True
db_book_object.append(db_element)
return changed
def create_objects_for_addition(db_element, add_element, db_type):
if db_type == 'custom':
if db_element.value != add_element:
db_element.value = add_element # ToDo: Before new_element, but this is not plausible
elif db_type == 'languages':
if db_element.lang_code != add_element:
db_element.lang_code = add_element
elif db_type == 'series':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element
elif db_type == 'author':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element.replace('|', ',')
elif db_type == 'publisher':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = None
elif db_element.name != add_element:
db_element.name = add_element
return db_element
# Modifies different Database objects, first check if elements if elements have to be deleted,
# because they are no longer used, than check if elements have to be added to database
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
# passing input_elements not as a list may lead to undesired results
if not isinstance(input_elements, list):
raise TypeError(str(input_elements) + " should be passed as a list")
input_elements = [x for x in input_elements if x != '']
# we have all input element (authors, series, tags) names now
# 1. search for elements to remove
del_elements = search_objects_remove(db_book_object, db_type, input_elements)
# 2. search for elements that need to be added
add_elements = search_objects_add(db_book_object, db_type, input_elements)
# if there are elements to remove, we remove them now
changed = remove_objects(db_book_object, db_session, del_elements)
# if there are elements to add, we add them now!
if len(add_elements) > 0:
changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements)
return changed
def modify_identifiers(input_identifiers, db_identifiers, db_session):
"""Modify Identifiers to match input information.
input_identifiers is a list of read-to-persist Identifiers objects.
db_identifiers is a list of already persisted list of Identifiers objects."""
changed = False
error = False
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
if len(input_identifiers) != len(input_dict):
error = True
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
# delete db identifiers not present in input or modify them with input val
for identifier_type, identifier in db_dict.items():
if identifier_type not in input_dict.keys():
db_session.delete(identifier)
changed = True
else:
input_identifier = input_dict[identifier_type]
identifier.type = input_identifier.type
identifier.val = input_identifier.val
# add input identifiers not present in db
for identifier_type, identifier in input_dict.items():
if identifier_type not in db_dict.keys():
db_session.add(identifier)
changed = True
return changed, error
@editbook.route("/ajax/delete/<int:book_id>")
@login_required
def delete_book_from_details(book_id):
return Response(delete_book(book_id, "", True), mimetype='application/json')
@editbook.route("/delete/<int:book_id>", defaults={'book_format': ""})
@editbook.route("/delete/<int:book_id>/<string:book_format>")
@login_required
def delete_book_ajax(book_id, book_format):
return delete_book(book_id, book_format, False)
def delete_whole_book(book_id, book):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse):
warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": error}])
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "warning",
"format": "",
"message": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
calibre_db.session.commit()
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in book.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
book = calibre_db.order_authors(book)
author_names = []
for authr in book.authors:
author_names.append(authr.name.replace('|', ','))
# Option for showing convertbook button
valid_source_formats=list()
allowed_conversion_formats = list()
kepub_possible=None
if config.config_converterpath:
for file in book.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
valid_source_formats.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in book.data]:
kepub_possible = True
if not config.config_converterpath:
valid_source_formats.append('epub')
# Determine what formats don't already exist
if config.config_converterpath:
allowed_conversion_formats = constants.EXTENSIONS_CONVERT_TO[:]
for file in book.data:
if file.format.lower() in allowed_conversion_formats:
allowed_conversion_formats.remove(file.format.lower())
if kepub_possible:
allowed_conversion_formats.append('kepub')
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
conversion_formats=allowed_conversion_formats,
config=config,
source_formats=valid_source_formats)
def edit_book_ratings(to_save, book):
changed = False
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
changed = True
is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
changed = True
return changed
def edit_book_tags(tags, book):
input_tags = tags.split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
# Remove duplicates
input_tags = helper.uniq(input_tags)
return modify_database_object(input_tags, book.tags, db.Tags, calibre_db.session, 'tags')
def edit_book_series(series, book):
input_series = [series.strip()]
input_series = [x for x in input_series if x != '']
return modify_database_object(input_series, book.series, db.Series, calibre_db.session, 'series')
def edit_book_series_index(series_index, book):
# Add default series_index to book
modif_date = False
series_index = series_index or '1'
if not series_index.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=series_index), category="warning")
return False
if book.series_index != series_index:
book.series_index = series_index
modif_date = True
return modif_date
# Handle book comments/description
def edit_book_comments(comments, book):
modif_date = False
if comments:
comments = clean_html(comments)
if len(book.comments):
if book.comments[0].text != comments:
book.comments[0].text = clean_html(comments)
modif_date = True
else:
if comments:
book.comments.append(db.Comments(text=comments, book=book.id))
modif_date = True
return modif_date
def edit_book_languages(languages, book, upload=False, invalid=None):
input_languages = languages.split(',')
unknown_languages = []
if not upload:
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
else:
input_l = isoLanguages.get_valid_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
if isinstance(invalid, list):
invalid.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
# ToDo: Not working correct
if upload and len(input_l) == 1:
# If the language of the file is excluded from the users view, it's not imported, to allow the user to view
# the book it's language is set to the filter language
if input_l[0] != current_user.filter_language() and current_user.filter_language() != "all":
input_l[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
# Remove duplicates
input_l = helper.uniq(input_l)
return modify_database_object(input_l, book.languages, db.Languages, calibre_db.session, 'languages')
def edit_book_publisher(publishers, book):
changed = False
if publishers:
publisher = publishers.rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
changed |= modify_database_object([publisher], book.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(book.publishers):
changed |= modify_database_object([], book.publishers, db.Publishers, calibre_db.session, 'publisher')
return changed
def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False
if to_save[cc_string] == 'None':
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
elif c.datatype == 'comments':
to_save[cc_string] = Markup(to_save[cc_string]).unescape()
if to_save[cc_string]:
to_save[cc_string] = clean_html(to_save[cc_string])
elif c.datatype == 'datetime':
try:
to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d")
except ValueError:
to_save[cc_string] = db.Books.DEFAULT_PUBDATE
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
return changed, to_save
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save):
changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
changed, to_save = edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string)
else:
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
else:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if not del_cc.books or len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
else:
input_tags = to_save[cc_string].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
changed |= modify_database_object(input_tags,
getattr(book, cc_string),
db.cc_classes[c.id],
calibre_db.session,
'custom')
return changed
def upload_single_file(request, book, book_id):
# Check and handle Uploaded file
if 'btn-upload-format' in request.files:
requested_file = request.files['btn-upload-format']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
category="error")
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_name = book.path.rsplit('/', 1)[-1]
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
if not os.path.exists(filepath):
try:
os.makedirs(filepath)
except OSError:
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
try:
requested_file.save(saved_filename)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=saved_filename), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_size = os.path.getsize(saved_filename)
is_format = calibre_db.get_book_format(book_id, file_ext.upper())
# Format entry already exists, no need to update the database
if is_format:
log.warning('Book format %s already existing', file_ext.upper())
else:
try:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
calibre_db.session.add(db_format)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error('Database error: %s', e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
# Queue uploader info
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=book.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>"))
return uploader.process(
saved_filename, *os.path.splitext(requested_file.filename),
rarExecutable=config.config_rarfile_location)
def upload_cover(request, book):
if 'btn-upload-cover' in request.files:
requested_file = request.files['btn-upload-cover']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
ret, message = helper.save_cover(requested_file, book.path)
if ret is True:
return True
else:
flash(message, category="error")
return False
return None
def handle_title_on_edit(book, book_title):
# handle book title
book_title = book_title.rstrip().strip()
if book.title != book_title:
if book_title == '':
book_title = _(u'Unknown')
book.title = book_title
return True
return False
def handle_author_on_edit(book, author_name, update_stored=True):
# handle author(s)
input_authors = author_name.split('&')
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
change = modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
# Search for each author if author is in database, if not, author name and sorted author name is generated new
# everything then is assembled for sorted author field in database
sort_authors_list = list()
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
stored_author = helper.get_sorted_author(inp)
else:
stored_author = stored_author.sort
sort_authors_list.append(helper.get_sorted_author(stored_author))
sort_authors = ' & '.join(sort_authors_list)
if book.author_sort != sort_authors and update_stored:
book.author_sort = sort_authors
change = True
return input_authors, change
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
@login_required_if_no_ano
@edit_required
def edit_book(book_id):
modif_date = False
# create the function for sorting...
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
# Show form
if request.method != 'POST':
return render_edit_book(book_id)
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
# Book not found
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
meta = upload_single_file(request, book, book_id)
if upload_cover(request, book) is True:
book.has_cover = 1
modif_date = True
try:
to_save = request.form.to_dict()
merge_metadata(to_save, meta)
# Update book
edited_books_id = None
# handle book title
title_change = handle_title_on_edit(book, to_save["book_title"])
input_authors, authorchange = handle_author_on_edit(book, to_save["author_name"])
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
error = False
if edited_books_id:
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if not error:
if "cover_url" in to_save:
if to_save["cover_url"]:
if not current_user.role_upload():
return "", (403)
if to_save["cover_url"].endswith('/static/generic_cover.jpg'):
book.has_cover = 0
else:
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
if result is True:
book.has_cover = 1
modif_date = True
else:
flash(error, category="error")
# Add default series_index to book
modif_date |= edit_book_series_index(to_save["series_index"], book)
# Handle book comments/description
modif_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
# Handle identifiers
input_identifiers = identifier_list(to_save, book)
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
if warning:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
modif_date |= modification
# Handle book tags
modif_date |= edit_book_tags(to_save['tags'], book)
# Handle book series
modif_date |= edit_book_series(to_save["series"], book)
# handle book publisher
modif_date |= edit_book_publisher(to_save['publisher'], book)
# handle book languages
modif_date |= edit_book_languages(to_save['languages'], book)
# handle book ratings
modif_date |= edit_book_ratings(to_save, book)
# handle cc data
modif_date |= edit_cc_data(book_id, book, to_save)
if to_save["pubdate"]:
try:
book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
except ValueError:
book.pubdate = db.Books.DEFAULT_PUBDATE
else:
book.pubdate = db.Books.DEFAULT_PUBDATE
if modif_date:
book.last_modified = datetime.utcnow()
calibre_db.session.merge(book)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in to_save:
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_("Metadata successfully updated"), category="success")
return render_edit_book(book_id)
else:
calibre_db.session.rollback()
flash(error, category="error")
return render_edit_book(book_id)
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing book, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
def merge_metadata(to_save, meta):
if to_save['author_name'] == _(u'Unknown'):
to_save['author_name'] = ''
if to_save['book_title'] == _(u'Unknown'):
to_save['book_title'] = ''
for s_field, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '')
to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-'
id_val_prefix = 'identifier-val-'
result = []
for type_key, type_value in to_save.items():
if not type_key.startswith(id_type_prefix):
continue
val_key = id_val_prefix + type_key[len(id_type_prefix):]
if val_key not in to_save.keys():
continue
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle publisher
modif_date |= edit_book_publisher(meta.publisher, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
return db_book, input_authors, title_dir
def file_handling_on_upload(requested_file):
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=requested_file.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return meta, None
def move_coverfile(meta, db_book):
# move cover to final directory, including book id
if meta.cover:
coverfile = meta.cover
else:
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
try:
copyfile(coverfile, new_coverpath)
if meta.cover:
os.unlink(meta.cover)
except OSError as e:
log.error("Failed to move cover file %s: %s", new_coverpath, e)
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
error=e),
category="error")
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
meta, error = file_handling_on_upload(requested_file)
if error:
return error
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
# Comments needs book id therefore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
book_id = db_book.id
title = db_book.title
error = helper.update_dir_structure_file(book_id,
config.config_calibre_dir,
input_authors[0],
meta.file_path,
title_dir + meta.extension.lower())
move_coverfile(meta, db_book)
# save data to database, reread data
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if error:
flash(error, category="error")
uploadText=_(u"File %(file)s uploaded", file=title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book_id) + "\">" + uploadText + "</a>"))
if len(request.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
resp = {"location": url_for('editbook.edit_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
else:
resp = {"location": url_for('web.show_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def convert_bookformat(book_id):
# check to see if we have form fields to work with - if not send user back
book_format_from = request.form.get('book_format_from', None)
book_format_to = request.form.get('book_format_to', None)
if (book_format_from is None) or (book_format_to is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
book_format_to.upper(), current_user.name)
if rtn is None:
flash(_(u"Book successfully queued for converting to %(book_format)s",
book_format=book_format_to),
category="success")
else:
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
@editbook.route("/scholarsearch/<query>",methods=['GET'])
@login_required_if_no_ano
@edit_required
def scholar_search(query):
if have_scholar:
scholar_gen = scholarly.search_pubs(' '.join(query.split('+')))
i=0
result = []
for publication in scholar_gen:
del publication['source']
result.append(publication)
i+=1
if(i>=10):
break
return Response(json.dumps(result),mimetype='application/json')
else:
return "[]"
@editbook.route("/ajax/editbooks/<param>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def edit_list_book(param):
vals = request.form.to_dict()
book = calibre_db.get_book(vals['pk'])
ret = ""
if param =='series_index':
edit_book_series_index(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
elif param =='tags':
edit_book_tags(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
mimetype='application/json')
elif param =='series':
edit_book_series(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
mimetype='application/json')
elif param =='publishers':
edit_book_publisher(vals['value'], book)
ret = Response(json.dumps({'success': True,
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
mimetype='application/json')
elif param =='languages':
invalid = list()
edit_book_languages(vals['value'], book, invalid=invalid)
if invalid:
ret = Response(json.dumps({'success': False,
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
mimetype='application/json')
else:
lang_names = list()
for lang in book.languages:
try:
lang_names.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
lang_names.append(_(isoLanguages.get(part3=lang.lang_code).name))
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
mimetype='application/json')
elif param =='author_sort':
book.author_sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
mimetype='application/json')
elif param == 'title':
sort = book.sort
handle_title_on_edit(book, vals.get('value', ""))
helper.update_dir_stucture(book.id, config.config_calibre_dir)
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
mimetype='application/json')
elif param =='sort':
book.sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
mimetype='application/json')
elif param =='authors':
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
helper.update_dir_stucture(book.id, config.config_calibre_dir, input_authors[0])
ret = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in input_authors])}),
mimetype='application/json')
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
# revert change for sort if automatic fields link is deactivated
if param == 'title' and vals.get('checkT') == "false":
book.sort = sort
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return ret
@editbook.route("/ajax/sort_value/<field>/<int:bookid>")
@login_required
def get_sorted_entry(field, bookid):
if field in ['title', 'authors', 'sort', 'author_sort']:
book = calibre_db.get_filtered_book(bookid)
if book:
if field == 'title':
return json.dumps({'sort': book.sort})
elif field == 'authors':
return json.dumps({'author_sort': book.author_sort})
if field == 'sort':
return json.dumps({'sort': book.title})
if field == 'author_sort':
return json.dumps({'author_sort': book.author})
return ""
@editbook.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@edit_required
def simulate_merge_list_book():
vals = request.get_json().get('Merge_books')
if vals:
to_book = calibre_db.get_book(vals[0]).title
vals.pop(0)
if to_book:
for book_id in vals:
from_book = []
from_book.append(calibre_db.get_book(book_id).title)
return json.dumps({'to': to_book, 'from': from_book})
return ""
@editbook.route("/ajax/mergebooks", methods=['POST'])
@login_required
@edit_required
def merge_list_book():
vals = request.get_json().get('Merge_books')
to_file = list()
if vals:
# load all formats from target book
to_book = calibre_db.get_book(vals[0])
vals.pop(0)
if to_book:
for file in to_book.data:
to_file.append(file.format)
to_name = helper.get_valid_filename(to_book.title) + ' - ' + \
helper.get_valid_filename(to_book.authors[0].name)
for book_id in vals:
from_book = calibre_db.get_book(book_id)
if from_book:
for element in from_book.data:
if element.format not in to_file:
# create new data entry with: book_id, book_format, uncompressed_size, name
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
to_book.path,
to_name + "." + element.format.lower()))
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
from_book.path,
element.name + "." + element.format.lower()))
copyfile(filepath_old, filepath_new)
to_book.data.append(db.Data(to_book.id,
element.format,
element.uncompressed_size,
to_name))
delete_book(from_book.id,"", True)
return json.dumps({'success': True})
return ""
@editbook.route("/ajax/xchange", methods=['POST'])
@login_required
@edit_required
def table_xchange_author_title():
vals = request.get_json().get('xchange')
if vals:
for val in vals:
modif_date = False
book = calibre_db.get_book(val)
authors = book.title
entries = calibre_db.order_authors(book)
author_names = []
for authr in entries.authors:
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, authorchange = handle_author_on_edit(book, authors)
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if edited_books_id:
helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if modif_date:
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| xss | {
"code": [
"have_scholar = True",
" pass",
" book.comments[0].text = comments"
],
"line_no": [
31,
36,
464
]
} | {
"code": [
" have_scholar = True",
" if comments:",
" comments = clean_html(comments)",
" book.comments[0].text = clean_html(comments)",
" if to_save[cc_string]:",
" to_save[cc_string] = clean_html(to_save[cc_string])"
],
"line_no": [
34,
463,
464,
467,
521,
522
]
} |
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import .copyfile
from uuid import uuid4
VAR_0 = True
try:
from scholarly import scholarly
except ImportError:
VAR_0 = False
pass
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, VAR_32, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import .current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import .constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import .config, get_locale, ub, db
from . import .calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import .login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_1 = Blueprint('editbook', __name__)
VAR_2 = logger.create()
def FUNC_0(VAR_3):
@wraps(VAR_3)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_upload() or current_user.role_admin():
return VAR_3(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_1(VAR_3):
@wraps(VAR_3)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_edit() or current_user.role_admin():
return VAR_3(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_2(VAR_4, VAR_5, VAR_6):
VAR_8 = []
for c_elements in VAR_4:
VAR_82 = False
if VAR_5 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_5 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
for inp_element in VAR_6:
if inp_element.lower() == VAR_101.lower():
VAR_82 = True
break
if not VAR_82:
VAR_8.append(c_elements)
return VAR_8
def FUNC_3(VAR_4, VAR_5, VAR_6):
VAR_10 = []
for inp_element in VAR_6:
VAR_82 = False
for c_elements in VAR_4:
if VAR_5 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_5 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
if inp_element == VAR_101:
VAR_82 = True
break
if not VAR_82:
VAR_10.append(inp_element)
return VAR_10
def FUNC_4(VAR_4, VAR_7, VAR_8):
VAR_48 = False
if len(VAR_8) > 0:
for del_element in VAR_8:
VAR_4.remove(del_element)
VAR_48 = True
if len(del_element.books) == 0:
VAR_7.delete(del_element)
return VAR_48
def FUNC_5(VAR_4, VAR_9, VAR_7, VAR_5, VAR_10):
VAR_48 = False
if VAR_5 == 'languages':
VAR_83 = VAR_9.lang_code
elif VAR_5 == 'custom':
VAR_83 = VAR_9.value
else:
VAR_83 = VAR_9.name
for VAR_12 in VAR_10:
VAR_11 = VAR_7.query(VAR_9).filter(VAR_83 == VAR_12).first()
if VAR_5 == 'author':
VAR_102 = VAR_9(VAR_12, helper.get_sorted_author(VAR_12.replace('|', ',')), "")
elif VAR_5 == 'series':
VAR_102 = VAR_9(VAR_12, add_element)
elif VAR_5 == 'custom':
VAR_102 = VAR_9(value=VAR_12)
elif VAR_5 == 'publisher':
VAR_102 = VAR_9(VAR_12, None)
else: # VAR_5 should be tag or language
VAR_102 = VAR_9(VAR_12)
if VAR_11 is None:
VAR_48 = True
VAR_7.add(VAR_102)
VAR_4.append(VAR_102)
else:
VAR_11 = FUNC_6(VAR_11, VAR_12, VAR_5)
VAR_48 = True
VAR_48 = True
VAR_4.append(VAR_11)
return VAR_48
def FUNC_6(VAR_11, VAR_12, VAR_5):
if VAR_5 == 'custom':
if VAR_11.value != VAR_12:
VAR_11.value = VAR_12 # ToDo: Before VAR_102, but this is not plausible
elif VAR_5 == 'languages':
if VAR_11.lang_code != VAR_12:
VAR_11.lang_code = VAR_12
elif VAR_5 == 'series':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = VAR_12
elif VAR_5 == 'author':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = VAR_12.replace('|', ',')
elif VAR_5 == 'publisher':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = None
elif VAR_11.name != VAR_12:
VAR_11.name = VAR_12
return VAR_11
def FUNC_7(VAR_6, VAR_4, VAR_9, VAR_7, VAR_5):
if not isinstance(VAR_6, list):
raise TypeError(str(VAR_6) + " should be passed as a list")
VAR_6 = [x for x in VAR_6 if x != '']
VAR_8 = FUNC_2(VAR_4, VAR_5, VAR_6)
VAR_10 = FUNC_3(VAR_4, VAR_5, VAR_6)
VAR_48 = FUNC_4(VAR_4, VAR_7, VAR_8)
if len(VAR_10) > 0:
VAR_48 |= FUNC_5(VAR_4, VAR_9, VAR_7, VAR_5, VAR_10)
return VAR_48
def FUNC_8(VAR_13, VAR_14, VAR_7):
VAR_48 = False
VAR_49 = False
VAR_50 = dict([(identifier.type.lower(), identifier) for identifier in VAR_13])
if len(VAR_13) != len(VAR_50):
VAR_49 = True
VAR_51 = dict([(identifier.type.lower(), identifier) for identifier in VAR_14 ])
for identifier_type, identifier in VAR_51.items():
if identifier_type not in VAR_50.keys():
VAR_7.delete(identifier)
VAR_48 = True
else:
VAR_103 = VAR_50[identifier_type]
identifier.type = VAR_103.type
identifier.val = VAR_103.val
for identifier_type, identifier in VAR_50.items():
if identifier_type not in VAR_51.keys():
VAR_7.add(identifier)
VAR_48 = True
return VAR_48, VAR_49
@VAR_1.route("/ajax/delete/<int:VAR_15>")
@login_required
def FUNC_9(VAR_15):
return Response(FUNC_13(VAR_15, "", True), mimetype='application/json')
@VAR_1.route("/delete/<int:VAR_15>", defaults={'book_format': ""})
@VAR_1.route("/delete/<int:VAR_15>/<string:VAR_16>")
@login_required
def FUNC_10(VAR_15, VAR_16):
return FUNC_13(VAR_15, VAR_16, False)
def FUNC_11(VAR_15, VAR_17):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_15).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_15).delete()
ub.delete_download(VAR_15)
ub.session_commit()
FUNC_7([u''], VAR_17.authors, db.Authors, calibre_db.session, 'author')
FUNC_7([u''], VAR_17.tags, db.Tags, calibre_db.session, 'tags')
FUNC_7([u''], VAR_17.series, db.Series, calibre_db.session, 'series')
FUNC_7([u''], VAR_17.languages, db.Languages, calibre_db.session, 'languages')
FUNC_7([u''], VAR_17.publishers, db.Publishers, calibre_db.session, 'publishers')
VAR_52 = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_29 in VAR_52:
VAR_31 = "custom_column_" + str(VAR_29.id)
if not VAR_29.is_multiple:
if len(getattr(VAR_17, VAR_31)) > 0:
if VAR_29.datatype == 'bool' or VAR_29.datatype == 'integer' or VAR_29.datatype == 'float':
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
elif VAR_29.datatype == 'rating':
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if len(VAR_105.books) == 0:
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
FUNC_7([u''], getattr(VAR_17, VAR_31), db.cc_classes[VAR_29.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == VAR_15).delete()
def FUNC_12(VAR_16, VAR_18, VAR_19, VAR_15):
if VAR_16:
if VAR_18:
return json.dumps([VAR_19, {"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "success",
"format": VAR_16,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
else:
if VAR_18:
return json.dumps([VAR_19, {"location": url_for('web.index'),
"type": "success",
"format": VAR_16,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def FUNC_13(VAR_15, VAR_16, VAR_18):
VAR_19 = {}
if current_user.role_delete_books():
VAR_17 = calibre_db.get_book(VAR_15)
if VAR_17:
try:
VAR_68, VAR_49 = helper.delete_book(VAR_17, config.config_calibre_dir, VAR_16=book_format.upper())
if not VAR_68:
if VAR_18:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "danger",
"format": "",
"message": VAR_49}])
else:
flash(VAR_49, category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
if VAR_49:
if VAR_18:
VAR_19 = {"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "warning",
"format": "",
"message": VAR_49}
else:
flash(VAR_49, category="warning")
if not VAR_16:
FUNC_11(VAR_15, VAR_17)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == VAR_17.id).\
filter(db.Data.format == VAR_16).delete()
calibre_db.session.commit()
except Exception as ex:
VAR_2.debug_or_exception(ex)
calibre_db.session.rollback()
if VAR_18:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
else:
VAR_2.error('Book with id "%s" could not be deleted: not found', VAR_15)
return FUNC_12(VAR_16, VAR_18, VAR_19, VAR_15)
def FUNC_14(VAR_15):
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
VAR_17 = calibre_db.get_filtered_book(VAR_15, allow_show_archived=True)
if not VAR_17:
flash(_(u"Oops! Selected VAR_17 VAR_37 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in VAR_17.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
VAR_17 = calibre_db.order_authors(VAR_17)
VAR_53 = []
for VAR_38 in VAR_17.authors:
VAR_53.append(VAR_38.name.replace('|', ','))
VAR_54=list()
VAR_55 = list()
VAR_56=None
if config.config_converterpath:
for file in VAR_17.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
VAR_54.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in VAR_17.data]:
VAR_56 = True
if not config.config_converterpath:
VAR_54.append('epub')
if config.config_converterpath:
VAR_55 = constants.EXTENSIONS_CONVERT_TO[:]
for file in VAR_17.data:
if file.format.lower() in VAR_55:
allowed_conversion_formats.remove(file.format.lower())
if VAR_56:
VAR_55.append('kepub')
return render_title_template('book_edit.html', VAR_17=book, VAR_115=VAR_53, VAR_52=cc,
VAR_37=_(u"edit metadata"), page="editbook",
conversion_formats=VAR_55,
config=config,
source_formats=VAR_54)
def FUNC_15(VAR_20, VAR_17):
VAR_48 = False
if VAR_20["rating"].strip():
VAR_84 = False
if len(VAR_17.ratings) > 0:
VAR_84 = VAR_17.ratings[0].rating
VAR_85 = int(float(VAR_20["rating"]) * 2)
if VAR_85 != VAR_84:
VAR_48 = True
VAR_104 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == VAR_85).first()
if VAR_104:
VAR_17.ratings.append(VAR_104)
else:
VAR_117 = db.Ratings(rating=VAR_85)
VAR_17.ratings.append(VAR_117)
if VAR_84:
VAR_17.ratings.remove(VAR_17.ratings[0])
else:
if len(VAR_17.ratings) > 0:
VAR_17.ratings.remove(VAR_17.ratings[0])
VAR_48 = True
return VAR_48
def FUNC_16(VAR_21, VAR_17):
VAR_57 = VAR_21.split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_57 = helper.uniq(VAR_57)
return FUNC_7(VAR_57, VAR_17.tags, db.Tags, calibre_db.session, 'tags')
def FUNC_17(VAR_22, VAR_17):
VAR_58 = [VAR_22.strip()]
VAR_58 = [x for x in VAR_58 if x != '']
return FUNC_7(VAR_58, VAR_17.series, db.Series, calibre_db.session, 'series')
def FUNC_18(VAR_23, VAR_17):
VAR_39 = False
VAR_23 = VAR_23 or '1'
if not VAR_23.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=VAR_23), category="warning")
return False
if VAR_17.series_index != VAR_23:
VAR_17.series_index = VAR_23
VAR_39 = True
return VAR_39
def FUNC_19(VAR_24, VAR_17):
VAR_39 = False
if len(VAR_17.comments):
if VAR_17.comments[0].text != VAR_24:
VAR_17.comments[0].text = VAR_24
VAR_39 = True
else:
if VAR_24:
VAR_17.comments.append(db.Comments(text=VAR_24, VAR_17=VAR_17.id))
VAR_39 = True
return VAR_39
def FUNC_20(VAR_25, VAR_17, VAR_26=False, VAR_27=None):
VAR_59 = VAR_25.split(',')
VAR_60 = []
if not VAR_26:
VAR_61 = isoLanguages.get_language_codes(get_locale(), VAR_59, VAR_60)
else:
VAR_61 = isoLanguages.get_valid_language_codes(get_locale(), VAR_59, VAR_60)
for l in VAR_60:
VAR_2.error('%s is not a valid language', l)
if isinstance(VAR_27, list):
VAR_27.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
if VAR_26 and len(VAR_61) == 1:
if VAR_61[0] != current_user.filter_language() and current_user.filter_language() != "all":
VAR_61[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
VAR_61 = helper.uniq(VAR_61)
return FUNC_7(VAR_61, VAR_17.languages, db.Languages, calibre_db.session, 'languages')
def FUNC_21(VAR_28, VAR_17):
VAR_48 = False
if VAR_28:
VAR_86 = VAR_28.rstrip().strip()
if len(VAR_17.publishers) == 0 or (len(VAR_17.publishers) > 0 and VAR_86 != VAR_17.publishers[0].name):
VAR_48 |= FUNC_7([VAR_86], VAR_17.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(VAR_17.publishers):
VAR_48 |= FUNC_7([], VAR_17.publishers, db.Publishers, calibre_db.session, 'publisher')
return VAR_48
def FUNC_22(VAR_15, VAR_17, VAR_29, VAR_20, VAR_30, VAR_31):
VAR_48 = False
if VAR_20[VAR_31] == 'None':
VAR_20[VAR_31] = None
elif VAR_29.datatype == 'bool':
VAR_20[VAR_31] = 1 if VAR_20[VAR_31] == 'True' else 0
elif VAR_29.datatype == 'comments':
VAR_20[VAR_31] = Markup(VAR_20[VAR_31]).unescape()
elif VAR_29.datatype == 'datetime':
try:
VAR_20[VAR_31] = datetime.strptime(VAR_20[VAR_31], "%Y-%m-%d")
except ValueError:
VAR_20[VAR_31] = db.Books.DEFAULT_PUBDATE
if VAR_20[VAR_31] != VAR_30:
if VAR_30 is not None:
if VAR_20[VAR_31] is not None:
setattr(getattr(VAR_17, VAR_31)[0], 'value', VAR_20[VAR_31])
VAR_48 = True
else:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_87 = db.cc_classes[VAR_29.id]
VAR_88 = VAR_87(value=VAR_20[VAR_31], VAR_17=VAR_15)
calibre_db.session.add(VAR_88)
VAR_48 = True
return VAR_48, VAR_20
def FUNC_23(VAR_17, VAR_29, VAR_20, VAR_30, VAR_31):
VAR_48 = False
if VAR_29.datatype == 'rating':
VAR_20[VAR_31] = str(int(float(VAR_20[VAR_31]) * 2))
if VAR_20[VAR_31].strip() != VAR_30:
if VAR_30 is not None:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
VAR_87 = db.cc_classes[VAR_29.id]
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_20[VAR_31].strip()).first()
if VAR_88 is None:
VAR_88 = VAR_87(value=VAR_20[VAR_31].strip())
calibre_db.session.add(VAR_88)
VAR_48 = True
calibre_db.session.flush()
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_20[VAR_31].strip()).first()
getattr(VAR_17, VAR_31).append(VAR_88)
return VAR_48, VAR_20
def FUNC_24(VAR_15, VAR_17, VAR_20):
VAR_48 = False
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_29 in VAR_52:
VAR_31 = "custom_column_" + str(VAR_29.id)
if not VAR_29.is_multiple:
if len(getattr(VAR_17, VAR_31)) > 0:
VAR_30 = getattr(VAR_17, VAR_31)[0].value
else:
VAR_30 = None
if VAR_20[VAR_31].strip():
if VAR_29.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
VAR_48, VAR_20 = FUNC_22(VAR_15, VAR_17, VAR_29, VAR_20, VAR_30, VAR_31)
else:
VAR_48, VAR_20 = FUNC_23(VAR_17, VAR_29, VAR_20, VAR_30, VAR_31)
else:
if VAR_30 is not None:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if not VAR_105.books or len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_57 = VAR_20[VAR_31].split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_48 |= FUNC_7(VAR_57,
getattr(VAR_17, VAR_31),
db.cc_classes[VAR_29.id],
calibre_db.session,
'custom')
return VAR_48
def FUNC_25(VAR_32, VAR_17, VAR_15):
if 'btn-VAR_26-format' in VAR_32.files:
VAR_40 = VAR_32.files['btn-VAR_26-format']
if VAR_40.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in VAR_40.filename:
VAR_96 = VAR_40.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=VAR_96),
category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_106 = VAR_17.path.rsplit('/', 1)[-1]
VAR_107 = os.path.normpath(os.path.join(config.config_calibre_dir, VAR_17.path))
VAR_108 = os.path.join(VAR_107, VAR_106 + '.' + VAR_96)
if not os.path.exists(VAR_107):
try:
os.makedirs(VAR_107)
except OSError:
flash(_(u"Failed to create VAR_72 %(path)s (Permission denied).", VAR_72=VAR_107), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
try:
VAR_40.save(VAR_108)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=VAR_108), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_73 = os.path.getsize(VAR_108)
VAR_109 = calibre_db.get_book_format(VAR_15, VAR_96.upper())
if VAR_109:
VAR_2.warning('Book format %s already existing', VAR_96.upper())
else:
try:
VAR_119 = db.Data(VAR_15, VAR_96.upper(), VAR_73, VAR_106)
calibre_db.session.add(VAR_119)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error('Database VAR_49: %s', e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_110=_(u"File format %(ext)s added to %(VAR_17)s", ext=VAR_96.upper(), VAR_17=VAR_17.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_15=VAR_17.id) + "\">" + VAR_110 + "</a>"))
return uploader.process(
VAR_108, *os.path.splitext(VAR_40.filename),
rarExecutable=config.config_rarfile_location)
def FUNC_26(VAR_32, VAR_17):
if 'btn-VAR_26-cover' in VAR_32.files:
VAR_40 = VAR_32.files['btn-VAR_26-cover']
if VAR_40.filename != '':
if not current_user.role_upload():
abort(403)
VAR_80, VAR_111 = helper.save_cover(VAR_40, VAR_17.path)
if VAR_80 is True:
return True
else:
flash(VAR_111, category="error")
return False
return None
def FUNC_27(VAR_17, VAR_33):
VAR_33 = book_title.rstrip().strip()
if VAR_17.title != VAR_33:
if VAR_33 == '':
VAR_33 = _(u'Unknown')
VAR_17.title = VAR_33
return True
return False
def FUNC_28(VAR_17, VAR_34, VAR_35=True):
VAR_62 = VAR_34.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_63 = FUNC_7(VAR_62, VAR_17.authors, db.Authors, calibre_db.session, 'author')
VAR_64 = list()
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
VAR_89 = helper.get_sorted_author(inp)
else:
VAR_89 = VAR_89.sort
VAR_64.append(helper.get_sorted_author(VAR_89))
VAR_65 = ' & '.join(VAR_64)
if VAR_17.author_sort != VAR_65 and VAR_35:
VAR_17.author_sort = VAR_65
VAR_63 = True
return VAR_62, VAR_63
@VAR_1.route("/admin/VAR_17/<int:VAR_15>", methods=['GET', 'POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_29(VAR_15):
VAR_39 = False
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
VAR_2.debug_or_exception(e)
calibre_db.session.rollback()
if VAR_32.method != 'POST':
return FUNC_14(VAR_15)
VAR_17 = calibre_db.get_filtered_book(VAR_15, allow_show_archived=True)
if not VAR_17:
flash(_(u"Oops! Selected VAR_17 VAR_37 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
VAR_36 = FUNC_25(VAR_32, VAR_17, VAR_15)
if FUNC_26(VAR_32, VAR_17) is True:
VAR_17.has_cover = 1
VAR_39 = True
try:
VAR_20 = VAR_32.form.to_dict()
FUNC_30(VAR_20, VAR_36)
VAR_90 = None
VAR_91 = FUNC_27(VAR_17, VAR_20["book_title"])
VAR_62, VAR_92 = FUNC_28(VAR_17, VAR_20["author_name"])
if VAR_92 or VAR_91:
VAR_90 = VAR_17.id
VAR_39 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
VAR_49 = False
if VAR_90:
VAR_49 = helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if not VAR_49:
if "cover_url" in VAR_20:
if VAR_20["cover_url"]:
if not current_user.role_upload():
return "", (403)
if VAR_20["cover_url"].endswith('/static/generic_cover.jpg'):
VAR_17.has_cover = 0
else:
VAR_68, VAR_49 = helper.save_cover_from_url(VAR_20["cover_url"], VAR_17.path)
if VAR_68 is True:
VAR_17.has_cover = 1
VAR_39 = True
else:
flash(VAR_49, category="error")
VAR_39 |= FUNC_18(VAR_20["series_index"], VAR_17)
VAR_39 |= FUNC_19(Markup(VAR_20['description']).unescape(), VAR_17)
VAR_13 = FUNC_31(VAR_20, VAR_17)
VAR_112, VAR_19 = FUNC_8(VAR_13, VAR_17.identifiers, calibre_db.session)
if VAR_19:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
VAR_39 |= VAR_112
VAR_39 |= FUNC_16(VAR_20['tags'], VAR_17)
VAR_39 |= FUNC_17(VAR_20["series"], VAR_17)
VAR_39 |= FUNC_21(VAR_20['publisher'], VAR_17)
VAR_39 |= FUNC_20(VAR_20['languages'], VAR_17)
VAR_39 |= FUNC_15(VAR_20, VAR_17)
VAR_39 |= FUNC_24(VAR_15, VAR_17, VAR_20)
if VAR_20["pubdate"]:
try:
VAR_17.pubdate = datetime.strptime(VAR_20["pubdate"], "%Y-%m-%d")
except ValueError:
VAR_17.pubdate = db.Books.DEFAULT_PUBDATE
else:
VAR_17.pubdate = db.Books.DEFAULT_PUBDATE
if VAR_39:
VAR_17.last_modified = datetime.utcnow()
calibre_db.session.merge(VAR_17)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in VAR_20:
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
else:
flash(_("Metadata successfully updated"), category="success")
return FUNC_14(VAR_15)
else:
calibre_db.session.rollback()
flash(VAR_49, category="error")
return FUNC_14(VAR_15)
except Exception as ex:
VAR_2.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing VAR_17, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
def FUNC_30(VAR_20, VAR_36):
if VAR_20['author_name'] == _(u'Unknown'):
VAR_20['author_name'] = ''
if VAR_20['book_title'] == _(u'Unknown'):
VAR_20['book_title'] = ''
for VAR_93, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
VAR_20[VAR_93] = VAR_20[VAR_93] or getattr(VAR_36, m_field, '')
VAR_20["description"] = VAR_20["description"] or Markup(
getattr(VAR_36, 'description', '')).unescape()
def FUNC_31(VAR_20, VAR_17):
VAR_66 = 'identifier-type-'
VAR_67 = 'identifier-val-'
VAR_68 = []
for type_key, type_value in VAR_20.items():
if not type_key.startswith(VAR_66):
continue
VAR_94 = VAR_67 + type_key[len(VAR_66):]
if VAR_94 not in VAR_20.keys():
continue
VAR_68.append(db.Identifiers(VAR_20[VAR_94], type_value, VAR_17.id))
return VAR_68
def FUNC_32(VAR_37, VAR_38):
if VAR_37 != _(u'Unknown') and VAR_38 != _(u'Unknown'):
VAR_95 = calibre_db.check_exists_book(VAR_38, VAR_37)
if VAR_95:
VAR_2.info("Uploaded VAR_17 probably exists in library")
flash(_(u"Uploaded VAR_17 probably exists in the library, consider to VAR_63 before VAR_26 new: ")
+ Markup(render_title_template('book_exists_flash.html', VAR_95=entry)), category="warning")
VAR_62 = VAR_38.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_64 = list()
VAR_69 = None
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
if not VAR_69:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(VAR_69)
calibre_db.session.commit()
VAR_113 = helper.get_sorted_author(inp)
else:
if not VAR_69:
db_author = VAR_89
VAR_113 = VAR_89.sort
VAR_64.append(VAR_113)
VAR_65 = ' & '.join(VAR_64)
return VAR_65, VAR_62, VAR_69
def FUNC_33(VAR_39, VAR_36):
VAR_37 = VAR_36.title
VAR_38 = VAR_36.author
VAR_65, VAR_62, VAR_69 = FUNC_32(VAR_37, VAR_38)
VAR_70 = helper.get_valid_filename(VAR_37)
VAR_71 = helper.get_valid_filename(VAR_69.name)
VAR_72 = os.path.join(VAR_71, VAR_70).replace('\\', '/')
VAR_41 = db.Books(VAR_37, "", VAR_65, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), VAR_72, VAR_36.cover, VAR_69, [], "")
VAR_39 |= FUNC_7(VAR_62, VAR_41.authors, db.Authors, calibre_db.session,
'author')
VAR_39 |= FUNC_18(VAR_36.series_id, VAR_41)
VAR_39 |= FUNC_20(VAR_36.languages, VAR_41, VAR_26=True)
VAR_39 |= FUNC_16(VAR_36.tags, VAR_41)
VAR_39 |= FUNC_21(VAR_36.publisher, VAR_41)
VAR_39 |= FUNC_17(VAR_36.series, VAR_41)
VAR_73 = os.path.getsize(VAR_36.file_path)
VAR_74 = db.Data(VAR_41, VAR_36.extension.upper()[1:], VAR_73, VAR_70)
VAR_41.data.append(VAR_74)
calibre_db.session.add(VAR_41)
calibre_db.session.flush()
return VAR_41, VAR_62, VAR_70
def FUNC_34(VAR_40):
if '.' in VAR_40.filename:
VAR_96 = VAR_40.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=VAR_96), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
try:
VAR_36 = uploader.upload(VAR_40, config.config_rarfile_location)
except (IOError, OSError):
VAR_2.error("File %s could not saved to temp dir", VAR_40.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=VAR_40.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return VAR_36, None
def FUNC_35(VAR_36, VAR_41):
if VAR_36.cover:
VAR_97 = VAR_36.cover
else:
VAR_97 = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
VAR_75 = os.path.join(config.config_calibre_dir, VAR_41.path, "cover.jpg")
try:
copyfile(VAR_97, VAR_75)
if VAR_36.cover:
os.unlink(VAR_36.cover)
except OSError as e:
VAR_2.error("Failed to move cover file %s: %s", VAR_75, e)
flash(_(u"Failed to Move Cover File %(file)s: %(VAR_49)s", file=VAR_75,
VAR_49=e),
category="error")
@VAR_1.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@FUNC_0
def VAR_26():
if not config.config_uploading:
abort(404)
if VAR_32.method == 'POST' and 'btn-upload' in VAR_32.files:
for VAR_40 in VAR_32.files.getlist("btn-upload"):
try:
VAR_39 = False
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
VAR_36, VAR_49 = FUNC_34(VAR_40)
if VAR_49:
return VAR_49
VAR_41, VAR_62, VAR_70 = FUNC_33(VAR_39, VAR_36)
VAR_39 |= FUNC_19(Markup(VAR_36.description).unescape(), VAR_41)
VAR_15 = VAR_41.id
VAR_37 = VAR_41.title
VAR_49 = helper.update_dir_structure_file(VAR_15,
config.config_calibre_dir,
VAR_62[0],
VAR_36.file_path,
VAR_70 + VAR_36.extension.lower())
FUNC_35(VAR_36, VAR_41)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_49:
flash(VAR_49, category="error")
VAR_110=_(u"File %(file)s uploaded", file=VAR_37)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_15=book_id) + "\">" + VAR_110 + "</a>"))
if len(VAR_32.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
VAR_120 = {"location": url_for('editbook.edit_book', VAR_15=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
else:
VAR_120 = {"location": url_for('web.show_book', VAR_15=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@VAR_1.route("/admin/VAR_17/convert/<int:VAR_15>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_37(VAR_15):
VAR_76 = VAR_32.form.get('book_format_from', None)
VAR_77 = VAR_32.form.get('book_format_to', None)
if (VAR_76 is None) or (VAR_77 is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
VAR_2.info('converting: VAR_17 id: %s from: %s to: %s', VAR_15, VAR_76, VAR_77)
VAR_78 = helper.convert_book_format(VAR_15, config.config_calibre_dir, VAR_76.upper(),
VAR_77.upper(), current_user.name)
if VAR_78 is None:
flash(_(u"Book successfully queued for converting to %(VAR_16)s",
VAR_16=VAR_77),
category="success")
else:
flash(_(u"There was an VAR_49 converting this VAR_17: %(res)s", res=VAR_78), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
@VAR_1.route("/scholarsearch/<VAR_42>",methods=['GET'])
@login_required_if_no_ano
@FUNC_1
def FUNC_38(VAR_42):
if VAR_0:
VAR_98 = scholarly.search_pubs(' '.join(VAR_42.split('+')))
VAR_99=0
VAR_68 = []
for publication in VAR_98:
del publication['source']
VAR_68.append(publication)
VAR_99+=1
if(VAR_99>=10):
break
return Response(json.dumps(VAR_68),mimetype='application/json')
else:
return "[]"
@VAR_1.route("/ajax/editbooks/<VAR_43>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_39(VAR_43):
VAR_79 = VAR_32.form.to_dict()
VAR_17 = calibre_db.get_book(VAR_79['pk'])
VAR_80 = ""
if VAR_43 =='series_index':
FUNC_18(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.series_index}), mimetype='application/json')
elif VAR_43 =='tags':
FUNC_16(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in VAR_17.tags])}),
mimetype='application/json')
elif VAR_43 =='series':
FUNC_17(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in VAR_17.series])}),
mimetype='application/json')
elif VAR_43 =='publishers':
FUNC_21(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True,
'newValue': ', '.join([VAR_86.name for VAR_86 in VAR_17.publishers])}),
mimetype='application/json')
elif VAR_43 =='languages':
VAR_27 = list()
FUNC_20(VAR_79['value'], VAR_17, VAR_27=invalid)
if VAR_27:
VAR_80 = Response(json.dumps({'success': False,
'msg': 'Invalid VAR_25 in VAR_32: {}'.format(','.join(VAR_27))}),
mimetype='application/json')
else:
VAR_121 = list()
for lang in VAR_17.languages:
try:
VAR_121.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
VAR_121.append(_(isoLanguages.get(part3=lang.lang_code).name))
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join(VAR_121)}),
mimetype='application/json')
elif VAR_43 =='author_sort':
VAR_17.author_sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.author_sort}),
mimetype='application/json')
elif VAR_43 == 'title':
VAR_124 = VAR_17.sort
FUNC_27(VAR_17, VAR_79.get('value', ""))
helper.update_dir_stucture(VAR_17.id, config.config_calibre_dir)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.title}),
mimetype='application/json')
elif VAR_43 =='sort':
VAR_17.sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.sort}),
mimetype='application/json')
elif VAR_43 =='authors':
VAR_62, VAR_125 = FUNC_28(VAR_17, VAR_79['value'], VAR_79.get('checkA', None) == "true")
helper.update_dir_stucture(VAR_17.id, config.config_calibre_dir, VAR_62[0])
VAR_80 = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in VAR_62])}),
mimetype='application/json')
VAR_17.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
if VAR_43 == 'title' and VAR_79.get('checkT') == "false":
VAR_17.sort = VAR_124
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
return VAR_80
@VAR_1.route("/ajax/sort_value/<VAR_44>/<int:VAR_45>")
@login_required
def FUNC_40(VAR_44, VAR_45):
if VAR_44 in ['title', 'authors', 'sort', 'author_sort']:
VAR_17 = calibre_db.get_filtered_book(VAR_45)
if VAR_17:
if VAR_44 == 'title':
return json.dumps({'sort': VAR_17.sort})
elif VAR_44 == 'authors':
return json.dumps({'author_sort': VAR_17.author_sort})
if VAR_44 == 'sort':
return json.dumps({'sort': VAR_17.title})
if VAR_44 == 'author_sort':
return json.dumps({'author_sort': VAR_17.author})
return ""
@VAR_1.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@FUNC_1
def FUNC_41():
VAR_79 = VAR_32.get_json().get('Merge_books')
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0]).title
VAR_79.pop(0)
if VAR_100:
for VAR_15 in VAR_79:
VAR_118 = []
VAR_118.append(calibre_db.get_book(VAR_15).title)
return json.dumps({'to': VAR_100, 'from': VAR_118})
return ""
@VAR_1.route("/ajax/mergebooks", methods=['POST'])
@login_required
@FUNC_1
def FUNC_42():
VAR_79 = VAR_32.get_json().get('Merge_books')
VAR_81 = list()
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0])
VAR_79.pop(0)
if VAR_100:
for file in VAR_100.data:
VAR_81.append(file.format)
VAR_114 = helper.get_valid_filename(VAR_100.title) + ' - ' + \
helper.get_valid_filename(VAR_100.authors[0].name)
for VAR_15 in VAR_79:
VAR_118 = calibre_db.get_book(VAR_15)
if VAR_118:
for element in VAR_118.data:
if element.format not in VAR_81:
VAR_122 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_100.path,
VAR_114 + "." + element.format.lower()))
VAR_123 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_118.path,
element.name + "." + element.format.lower()))
copyfile(VAR_123, VAR_122)
VAR_100.data.append(db.Data(VAR_100.id,
element.format,
element.uncompressed_size,
VAR_114))
FUNC_13(VAR_118.id,"", True)
return json.dumps({'success': True})
return ""
@VAR_1.route("/ajax/xchange", methods=['POST'])
@login_required
@FUNC_1
def FUNC_43():
VAR_79 = VAR_32.get_json().get('xchange')
if VAR_79:
for val in VAR_79:
VAR_39 = False
VAR_17 = calibre_db.get_book(val)
VAR_115 = VAR_17.title
VAR_116 = calibre_db.order_authors(VAR_17)
VAR_53 = []
for VAR_38 in VAR_116.authors:
VAR_53.append(VAR_38.name.replace('|', ','))
VAR_91 = FUNC_27(VAR_17, " ".join(VAR_53))
VAR_62, VAR_92 = FUNC_28(VAR_17, VAR_115)
if VAR_92 or VAR_91:
VAR_90 = VAR_17.id
VAR_39 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_90:
helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if VAR_39:
VAR_17.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
|
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import .copyfile
from uuid import uuid4
from lxml.html.clean import .clean_html
try:
from scholarly import scholarly
VAR_45 = True
except ImportError:
VAR_45 = False
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, VAR_31, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import .current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import .constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import .config, get_locale, ub, db
from . import .calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import .login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_0 = Blueprint('editbook', __name__)
VAR_1 = logger.create()
def FUNC_0(VAR_2):
@wraps(VAR_2)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_upload() or current_user.role_admin():
return VAR_2(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_1(VAR_2):
@wraps(VAR_2)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_edit() or current_user.role_admin():
return VAR_2(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_2(VAR_3, VAR_4, VAR_5):
VAR_7 = []
for c_elements in VAR_3:
VAR_82 = False
if VAR_4 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_4 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
for inp_element in VAR_5:
if inp_element.lower() == VAR_101.lower():
VAR_82 = True
break
if not VAR_82:
VAR_7.append(c_elements)
return VAR_7
def FUNC_3(VAR_3, VAR_4, VAR_5):
VAR_9 = []
for inp_element in VAR_5:
VAR_82 = False
for c_elements in VAR_3:
if VAR_4 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_4 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
if inp_element == VAR_101:
VAR_82 = True
break
if not VAR_82:
VAR_9.append(inp_element)
return VAR_9
def FUNC_4(VAR_3, VAR_6, VAR_7):
VAR_48 = False
if len(VAR_7) > 0:
for del_element in VAR_7:
VAR_3.remove(del_element)
VAR_48 = True
if len(del_element.books) == 0:
VAR_6.delete(del_element)
return VAR_48
def FUNC_5(VAR_3, VAR_8, VAR_6, VAR_4, VAR_9):
VAR_48 = False
if VAR_4 == 'languages':
VAR_83 = VAR_8.lang_code
elif VAR_4 == 'custom':
VAR_83 = VAR_8.value
else:
VAR_83 = VAR_8.name
for VAR_11 in VAR_9:
VAR_10 = VAR_6.query(VAR_8).filter(VAR_83 == VAR_11).first()
if VAR_4 == 'author':
VAR_102 = VAR_8(VAR_11, helper.get_sorted_author(VAR_11.replace('|', ',')), "")
elif VAR_4 == 'series':
VAR_102 = VAR_8(VAR_11, add_element)
elif VAR_4 == 'custom':
VAR_102 = VAR_8(value=VAR_11)
elif VAR_4 == 'publisher':
VAR_102 = VAR_8(VAR_11, None)
else: # VAR_4 should be tag or language
VAR_102 = VAR_8(VAR_11)
if VAR_10 is None:
VAR_48 = True
VAR_6.add(VAR_102)
VAR_3.append(VAR_102)
else:
VAR_10 = FUNC_6(VAR_10, VAR_11, VAR_4)
VAR_48 = True
VAR_48 = True
VAR_3.append(VAR_10)
return VAR_48
def FUNC_6(VAR_10, VAR_11, VAR_4):
if VAR_4 == 'custom':
if VAR_10.value != VAR_11:
VAR_10.value = VAR_11 # ToDo: Before VAR_102, but this is not plausible
elif VAR_4 == 'languages':
if VAR_10.lang_code != VAR_11:
VAR_10.lang_code = VAR_11
elif VAR_4 == 'series':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = VAR_11
elif VAR_4 == 'author':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = VAR_11.replace('|', ',')
elif VAR_4 == 'publisher':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = None
elif VAR_10.name != VAR_11:
VAR_10.name = VAR_11
return VAR_10
def FUNC_7(VAR_5, VAR_3, VAR_8, VAR_6, VAR_4):
if not isinstance(VAR_5, list):
raise TypeError(str(VAR_5) + " should be passed as a list")
VAR_5 = [x for x in VAR_5 if x != '']
VAR_7 = FUNC_2(VAR_3, VAR_4, VAR_5)
VAR_9 = FUNC_3(VAR_3, VAR_4, VAR_5)
VAR_48 = FUNC_4(VAR_3, VAR_6, VAR_7)
if len(VAR_9) > 0:
VAR_48 |= FUNC_5(VAR_3, VAR_8, VAR_6, VAR_4, VAR_9)
return VAR_48
def FUNC_8(VAR_12, VAR_13, VAR_6):
VAR_48 = False
VAR_49 = False
VAR_50 = dict([(identifier.type.lower(), identifier) for identifier in VAR_12])
if len(VAR_12) != len(VAR_50):
VAR_49 = True
VAR_51 = dict([(identifier.type.lower(), identifier) for identifier in VAR_13 ])
for identifier_type, identifier in VAR_51.items():
if identifier_type not in VAR_50.keys():
VAR_6.delete(identifier)
VAR_48 = True
else:
VAR_103 = VAR_50[identifier_type]
identifier.type = VAR_103.type
identifier.val = VAR_103.val
for identifier_type, identifier in VAR_50.items():
if identifier_type not in VAR_51.keys():
VAR_6.add(identifier)
VAR_48 = True
return VAR_48, VAR_49
@VAR_0.route("/ajax/delete/<int:VAR_14>")
@login_required
def FUNC_9(VAR_14):
return Response(FUNC_13(VAR_14, "", True), mimetype='application/json')
@VAR_0.route("/delete/<int:VAR_14>", defaults={'book_format': ""})
@VAR_0.route("/delete/<int:VAR_14>/<string:VAR_15>")
@login_required
def FUNC_10(VAR_14, VAR_15):
return FUNC_13(VAR_14, VAR_15, False)
def FUNC_11(VAR_14, VAR_16):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_14).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_14).delete()
ub.delete_download(VAR_14)
ub.session_commit()
FUNC_7([u''], VAR_16.authors, db.Authors, calibre_db.session, 'author')
FUNC_7([u''], VAR_16.tags, db.Tags, calibre_db.session, 'tags')
FUNC_7([u''], VAR_16.series, db.Series, calibre_db.session, 'series')
FUNC_7([u''], VAR_16.languages, db.Languages, calibre_db.session, 'languages')
FUNC_7([u''], VAR_16.publishers, db.Publishers, calibre_db.session, 'publishers')
VAR_52 = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_28 in VAR_52:
VAR_30 = "custom_column_" + str(VAR_28.id)
if not VAR_28.is_multiple:
if len(getattr(VAR_16, VAR_30)) > 0:
if VAR_28.datatype == 'bool' or VAR_28.datatype == 'integer' or VAR_28.datatype == 'float':
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
elif VAR_28.datatype == 'rating':
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if len(VAR_105.books) == 0:
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
FUNC_7([u''], getattr(VAR_16, VAR_30), db.cc_classes[VAR_28.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == VAR_14).delete()
def FUNC_12(VAR_15, VAR_17, VAR_18, VAR_14):
if VAR_15:
if VAR_17:
return json.dumps([VAR_18, {"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "success",
"format": VAR_15,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
else:
if VAR_17:
return json.dumps([VAR_18, {"location": url_for('web.index'),
"type": "success",
"format": VAR_15,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def FUNC_13(VAR_14, VAR_15, VAR_17):
VAR_18 = {}
if current_user.role_delete_books():
VAR_16 = calibre_db.get_book(VAR_14)
if VAR_16:
try:
VAR_68, VAR_49 = helper.delete_book(VAR_16, config.config_calibre_dir, VAR_15=book_format.upper())
if not VAR_68:
if VAR_17:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "danger",
"format": "",
"message": VAR_49}])
else:
flash(VAR_49, category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
if VAR_49:
if VAR_17:
VAR_18 = {"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "warning",
"format": "",
"message": VAR_49}
else:
flash(VAR_49, category="warning")
if not VAR_15:
FUNC_11(VAR_14, VAR_16)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == VAR_16.id).\
filter(db.Data.format == VAR_15).delete()
calibre_db.session.commit()
except Exception as ex:
VAR_1.debug_or_exception(ex)
calibre_db.session.rollback()
if VAR_17:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
else:
VAR_1.error('Book with id "%s" could not be deleted: not found', VAR_14)
return FUNC_12(VAR_15, VAR_17, VAR_18, VAR_14)
def FUNC_14(VAR_14):
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
VAR_16 = calibre_db.get_filtered_book(VAR_14, allow_show_archived=True)
if not VAR_16:
flash(_(u"Oops! Selected VAR_16 VAR_36 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in VAR_16.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
VAR_16 = calibre_db.order_authors(VAR_16)
VAR_53 = []
for VAR_37 in VAR_16.authors:
VAR_53.append(VAR_37.name.replace('|', ','))
VAR_54=list()
VAR_55 = list()
VAR_56=None
if config.config_converterpath:
for file in VAR_16.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
VAR_54.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in VAR_16.data]:
VAR_56 = True
if not config.config_converterpath:
VAR_54.append('epub')
if config.config_converterpath:
VAR_55 = constants.EXTENSIONS_CONVERT_TO[:]
for file in VAR_16.data:
if file.format.lower() in VAR_55:
allowed_conversion_formats.remove(file.format.lower())
if VAR_56:
VAR_55.append('kepub')
return render_title_template('book_edit.html', VAR_16=book, VAR_115=VAR_53, VAR_52=cc,
VAR_36=_(u"edit metadata"), page="editbook",
conversion_formats=VAR_55,
config=config,
source_formats=VAR_54)
def FUNC_15(VAR_19, VAR_16):
VAR_48 = False
if VAR_19["rating"].strip():
VAR_84 = False
if len(VAR_16.ratings) > 0:
VAR_84 = VAR_16.ratings[0].rating
VAR_85 = int(float(VAR_19["rating"]) * 2)
if VAR_85 != VAR_84:
VAR_48 = True
VAR_104 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == VAR_85).first()
if VAR_104:
VAR_16.ratings.append(VAR_104)
else:
VAR_117 = db.Ratings(rating=VAR_85)
VAR_16.ratings.append(VAR_117)
if VAR_84:
VAR_16.ratings.remove(VAR_16.ratings[0])
else:
if len(VAR_16.ratings) > 0:
VAR_16.ratings.remove(VAR_16.ratings[0])
VAR_48 = True
return VAR_48
def FUNC_16(VAR_20, VAR_16):
VAR_57 = VAR_20.split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_57 = helper.uniq(VAR_57)
return FUNC_7(VAR_57, VAR_16.tags, db.Tags, calibre_db.session, 'tags')
def FUNC_17(VAR_21, VAR_16):
VAR_58 = [VAR_21.strip()]
VAR_58 = [x for x in VAR_58 if x != '']
return FUNC_7(VAR_58, VAR_16.series, db.Series, calibre_db.session, 'series')
def FUNC_18(VAR_22, VAR_16):
VAR_38 = False
VAR_22 = VAR_22 or '1'
if not VAR_22.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=VAR_22), category="warning")
return False
if VAR_16.series_index != VAR_22:
VAR_16.series_index = VAR_22
VAR_38 = True
return VAR_38
def FUNC_19(VAR_23, VAR_16):
VAR_38 = False
if VAR_23:
comments = clean_html(VAR_23)
if len(VAR_16.comments):
if VAR_16.comments[0].text != VAR_23:
VAR_16.comments[0].text = clean_html(VAR_23)
VAR_38 = True
else:
if VAR_23:
VAR_16.comments.append(db.Comments(text=VAR_23, VAR_16=VAR_16.id))
VAR_38 = True
return VAR_38
def FUNC_20(VAR_24, VAR_16, VAR_25=False, VAR_26=None):
VAR_59 = VAR_24.split(',')
VAR_60 = []
if not VAR_25:
VAR_61 = isoLanguages.get_language_codes(get_locale(), VAR_59, VAR_60)
else:
VAR_61 = isoLanguages.get_valid_language_codes(get_locale(), VAR_59, VAR_60)
for l in VAR_60:
VAR_1.error('%s is not a valid language', l)
if isinstance(VAR_26, list):
VAR_26.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
if VAR_25 and len(VAR_61) == 1:
if VAR_61[0] != current_user.filter_language() and current_user.filter_language() != "all":
VAR_61[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
VAR_61 = helper.uniq(VAR_61)
return FUNC_7(VAR_61, VAR_16.languages, db.Languages, calibre_db.session, 'languages')
def FUNC_21(VAR_27, VAR_16):
VAR_48 = False
if VAR_27:
VAR_86 = VAR_27.rstrip().strip()
if len(VAR_16.publishers) == 0 or (len(VAR_16.publishers) > 0 and VAR_86 != VAR_16.publishers[0].name):
VAR_48 |= FUNC_7([VAR_86], VAR_16.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(VAR_16.publishers):
VAR_48 |= FUNC_7([], VAR_16.publishers, db.Publishers, calibre_db.session, 'publisher')
return VAR_48
def FUNC_22(VAR_14, VAR_16, VAR_28, VAR_19, VAR_29, VAR_30):
VAR_48 = False
if VAR_19[VAR_30] == 'None':
VAR_19[VAR_30] = None
elif VAR_28.datatype == 'bool':
VAR_19[VAR_30] = 1 if VAR_19[VAR_30] == 'True' else 0
elif VAR_28.datatype == 'comments':
VAR_19[VAR_30] = Markup(VAR_19[VAR_30]).unescape()
if VAR_19[VAR_30]:
VAR_19[VAR_30] = clean_html(VAR_19[VAR_30])
elif VAR_28.datatype == 'datetime':
try:
VAR_19[VAR_30] = datetime.strptime(VAR_19[VAR_30], "%Y-%m-%d")
except ValueError:
VAR_19[VAR_30] = db.Books.DEFAULT_PUBDATE
if VAR_19[VAR_30] != VAR_29:
if VAR_29 is not None:
if VAR_19[VAR_30] is not None:
setattr(getattr(VAR_16, VAR_30)[0], 'value', VAR_19[VAR_30])
VAR_48 = True
else:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_87 = db.cc_classes[VAR_28.id]
VAR_88 = VAR_87(value=VAR_19[VAR_30], VAR_16=VAR_14)
calibre_db.session.add(VAR_88)
VAR_48 = True
return VAR_48, VAR_19
def FUNC_23(VAR_16, VAR_28, VAR_19, VAR_29, VAR_30):
VAR_48 = False
if VAR_28.datatype == 'rating':
VAR_19[VAR_30] = str(int(float(VAR_19[VAR_30]) * 2))
if VAR_19[VAR_30].strip() != VAR_29:
if VAR_29 is not None:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
VAR_87 = db.cc_classes[VAR_28.id]
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_19[VAR_30].strip()).first()
if VAR_88 is None:
VAR_88 = VAR_87(value=VAR_19[VAR_30].strip())
calibre_db.session.add(VAR_88)
VAR_48 = True
calibre_db.session.flush()
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_19[VAR_30].strip()).first()
getattr(VAR_16, VAR_30).append(VAR_88)
return VAR_48, VAR_19
def FUNC_24(VAR_14, VAR_16, VAR_19):
VAR_48 = False
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_28 in VAR_52:
VAR_30 = "custom_column_" + str(VAR_28.id)
if not VAR_28.is_multiple:
if len(getattr(VAR_16, VAR_30)) > 0:
VAR_29 = getattr(VAR_16, VAR_30)[0].value
else:
VAR_29 = None
if VAR_19[VAR_30].strip():
if VAR_28.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
VAR_48, VAR_19 = FUNC_22(VAR_14, VAR_16, VAR_28, VAR_19, VAR_29, VAR_30)
else:
VAR_48, VAR_19 = FUNC_23(VAR_16, VAR_28, VAR_19, VAR_29, VAR_30)
else:
if VAR_29 is not None:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if not VAR_105.books or len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_57 = VAR_19[VAR_30].split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_48 |= FUNC_7(VAR_57,
getattr(VAR_16, VAR_30),
db.cc_classes[VAR_28.id],
calibre_db.session,
'custom')
return VAR_48
def FUNC_25(VAR_31, VAR_16, VAR_14):
if 'btn-VAR_25-format' in VAR_31.files:
VAR_39 = VAR_31.files['btn-VAR_25-format']
if VAR_39.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in VAR_39.filename:
VAR_96 = VAR_39.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=VAR_96),
category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_106 = VAR_16.path.rsplit('/', 1)[-1]
VAR_107 = os.path.normpath(os.path.join(config.config_calibre_dir, VAR_16.path))
VAR_108 = os.path.join(VAR_107, VAR_106 + '.' + VAR_96)
if not os.path.exists(VAR_107):
try:
os.makedirs(VAR_107)
except OSError:
flash(_(u"Failed to create VAR_72 %(path)s (Permission denied).", VAR_72=VAR_107), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
try:
VAR_39.save(VAR_108)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=VAR_108), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_73 = os.path.getsize(VAR_108)
VAR_109 = calibre_db.get_book_format(VAR_14, VAR_96.upper())
if VAR_109:
VAR_1.warning('Book format %s already existing', VAR_96.upper())
else:
try:
VAR_119 = db.Data(VAR_14, VAR_96.upper(), VAR_73, VAR_106)
calibre_db.session.add(VAR_119)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error('Database VAR_49: %s', e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_110=_(u"File format %(ext)s added to %(VAR_16)s", ext=VAR_96.upper(), VAR_16=VAR_16.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_14=VAR_16.id) + "\">" + VAR_110 + "</a>"))
return uploader.process(
VAR_108, *os.path.splitext(VAR_39.filename),
rarExecutable=config.config_rarfile_location)
def FUNC_26(VAR_31, VAR_16):
if 'btn-VAR_25-cover' in VAR_31.files:
VAR_39 = VAR_31.files['btn-VAR_25-cover']
if VAR_39.filename != '':
if not current_user.role_upload():
abort(403)
VAR_80, VAR_111 = helper.save_cover(VAR_39, VAR_16.path)
if VAR_80 is True:
return True
else:
flash(VAR_111, category="error")
return False
return None
def FUNC_27(VAR_16, VAR_32):
VAR_32 = book_title.rstrip().strip()
if VAR_16.title != VAR_32:
if VAR_32 == '':
VAR_32 = _(u'Unknown')
VAR_16.title = VAR_32
return True
return False
def FUNC_28(VAR_16, VAR_33, VAR_34=True):
VAR_62 = VAR_33.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_63 = FUNC_7(VAR_62, VAR_16.authors, db.Authors, calibre_db.session, 'author')
VAR_64 = list()
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
VAR_89 = helper.get_sorted_author(inp)
else:
VAR_89 = VAR_89.sort
VAR_64.append(helper.get_sorted_author(VAR_89))
VAR_65 = ' & '.join(VAR_64)
if VAR_16.author_sort != VAR_65 and VAR_34:
VAR_16.author_sort = VAR_65
VAR_63 = True
return VAR_62, VAR_63
@VAR_0.route("/admin/VAR_16/<int:VAR_14>", methods=['GET', 'POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_29(VAR_14):
VAR_38 = False
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
VAR_1.debug_or_exception(e)
calibre_db.session.rollback()
if VAR_31.method != 'POST':
return FUNC_14(VAR_14)
VAR_16 = calibre_db.get_filtered_book(VAR_14, allow_show_archived=True)
if not VAR_16:
flash(_(u"Oops! Selected VAR_16 VAR_36 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
VAR_35 = FUNC_25(VAR_31, VAR_16, VAR_14)
if FUNC_26(VAR_31, VAR_16) is True:
VAR_16.has_cover = 1
VAR_38 = True
try:
VAR_19 = VAR_31.form.to_dict()
FUNC_30(VAR_19, VAR_35)
VAR_90 = None
VAR_91 = FUNC_27(VAR_16, VAR_19["book_title"])
VAR_62, VAR_92 = FUNC_28(VAR_16, VAR_19["author_name"])
if VAR_92 or VAR_91:
VAR_90 = VAR_16.id
VAR_38 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
VAR_49 = False
if VAR_90:
VAR_49 = helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if not VAR_49:
if "cover_url" in VAR_19:
if VAR_19["cover_url"]:
if not current_user.role_upload():
return "", (403)
if VAR_19["cover_url"].endswith('/static/generic_cover.jpg'):
VAR_16.has_cover = 0
else:
VAR_68, VAR_49 = helper.save_cover_from_url(VAR_19["cover_url"], VAR_16.path)
if VAR_68 is True:
VAR_16.has_cover = 1
VAR_38 = True
else:
flash(VAR_49, category="error")
VAR_38 |= FUNC_18(VAR_19["series_index"], VAR_16)
VAR_38 |= FUNC_19(Markup(VAR_19['description']).unescape(), VAR_16)
VAR_12 = FUNC_31(VAR_19, VAR_16)
VAR_112, VAR_18 = FUNC_8(VAR_12, VAR_16.identifiers, calibre_db.session)
if VAR_18:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
VAR_38 |= VAR_112
VAR_38 |= FUNC_16(VAR_19['tags'], VAR_16)
VAR_38 |= FUNC_17(VAR_19["series"], VAR_16)
VAR_38 |= FUNC_21(VAR_19['publisher'], VAR_16)
VAR_38 |= FUNC_20(VAR_19['languages'], VAR_16)
VAR_38 |= FUNC_15(VAR_19, VAR_16)
VAR_38 |= FUNC_24(VAR_14, VAR_16, VAR_19)
if VAR_19["pubdate"]:
try:
VAR_16.pubdate = datetime.strptime(VAR_19["pubdate"], "%Y-%m-%d")
except ValueError:
VAR_16.pubdate = db.Books.DEFAULT_PUBDATE
else:
VAR_16.pubdate = db.Books.DEFAULT_PUBDATE
if VAR_38:
VAR_16.last_modified = datetime.utcnow()
calibre_db.session.merge(VAR_16)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in VAR_19:
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
else:
flash(_("Metadata successfully updated"), category="success")
return FUNC_14(VAR_14)
else:
calibre_db.session.rollback()
flash(VAR_49, category="error")
return FUNC_14(VAR_14)
except Exception as ex:
VAR_1.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing VAR_16, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
def FUNC_30(VAR_19, VAR_35):
if VAR_19['author_name'] == _(u'Unknown'):
VAR_19['author_name'] = ''
if VAR_19['book_title'] == _(u'Unknown'):
VAR_19['book_title'] = ''
for VAR_93, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
VAR_19[VAR_93] = VAR_19[VAR_93] or getattr(VAR_35, m_field, '')
VAR_19["description"] = VAR_19["description"] or Markup(
getattr(VAR_35, 'description', '')).unescape()
def FUNC_31(VAR_19, VAR_16):
VAR_66 = 'identifier-type-'
VAR_67 = 'identifier-val-'
VAR_68 = []
for type_key, type_value in VAR_19.items():
if not type_key.startswith(VAR_66):
continue
VAR_94 = VAR_67 + type_key[len(VAR_66):]
if VAR_94 not in VAR_19.keys():
continue
VAR_68.append(db.Identifiers(VAR_19[VAR_94], type_value, VAR_16.id))
return VAR_68
def FUNC_32(VAR_36, VAR_37):
if VAR_36 != _(u'Unknown') and VAR_37 != _(u'Unknown'):
VAR_95 = calibre_db.check_exists_book(VAR_37, VAR_36)
if VAR_95:
VAR_1.info("Uploaded VAR_16 probably exists in library")
flash(_(u"Uploaded VAR_16 probably exists in the library, consider to VAR_63 before VAR_25 new: ")
+ Markup(render_title_template('book_exists_flash.html', VAR_95=entry)), category="warning")
VAR_62 = VAR_37.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_64 = list()
VAR_69 = None
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
if not VAR_69:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(VAR_69)
calibre_db.session.commit()
VAR_113 = helper.get_sorted_author(inp)
else:
if not VAR_69:
db_author = VAR_89
VAR_113 = VAR_89.sort
VAR_64.append(VAR_113)
VAR_65 = ' & '.join(VAR_64)
return VAR_65, VAR_62, VAR_69
def FUNC_33(VAR_38, VAR_35):
VAR_36 = VAR_35.title
VAR_37 = VAR_35.author
VAR_65, VAR_62, VAR_69 = FUNC_32(VAR_36, VAR_37)
VAR_70 = helper.get_valid_filename(VAR_36)
VAR_71 = helper.get_valid_filename(VAR_69.name)
VAR_72 = os.path.join(VAR_71, VAR_70).replace('\\', '/')
VAR_40 = db.Books(VAR_36, "", VAR_65, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), VAR_72, VAR_35.cover, VAR_69, [], "")
VAR_38 |= FUNC_7(VAR_62, VAR_40.authors, db.Authors, calibre_db.session,
'author')
VAR_38 |= FUNC_18(VAR_35.series_id, VAR_40)
VAR_38 |= FUNC_20(VAR_35.languages, VAR_40, VAR_25=True)
VAR_38 |= FUNC_16(VAR_35.tags, VAR_40)
VAR_38 |= FUNC_21(VAR_35.publisher, VAR_40)
VAR_38 |= FUNC_17(VAR_35.series, VAR_40)
VAR_73 = os.path.getsize(VAR_35.file_path)
VAR_74 = db.Data(VAR_40, VAR_35.extension.upper()[1:], VAR_73, VAR_70)
VAR_40.data.append(VAR_74)
calibre_db.session.add(VAR_40)
calibre_db.session.flush()
return VAR_40, VAR_62, VAR_70
def FUNC_34(VAR_39):
if '.' in VAR_39.filename:
VAR_96 = VAR_39.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=VAR_96), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
try:
VAR_35 = uploader.upload(VAR_39, config.config_rarfile_location)
except (IOError, OSError):
VAR_1.error("File %s could not saved to temp dir", VAR_39.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=VAR_39.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return VAR_35, None
def FUNC_35(VAR_35, VAR_40):
if VAR_35.cover:
VAR_97 = VAR_35.cover
else:
VAR_97 = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
VAR_75 = os.path.join(config.config_calibre_dir, VAR_40.path, "cover.jpg")
try:
copyfile(VAR_97, VAR_75)
if VAR_35.cover:
os.unlink(VAR_35.cover)
except OSError as e:
VAR_1.error("Failed to move cover file %s: %s", VAR_75, e)
flash(_(u"Failed to Move Cover File %(file)s: %(VAR_49)s", file=VAR_75,
VAR_49=e),
category="error")
@VAR_0.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@FUNC_0
def VAR_25():
if not config.config_uploading:
abort(404)
if VAR_31.method == 'POST' and 'btn-upload' in VAR_31.files:
for VAR_39 in VAR_31.files.getlist("btn-upload"):
try:
VAR_38 = False
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
VAR_35, VAR_49 = FUNC_34(VAR_39)
if VAR_49:
return VAR_49
VAR_40, VAR_62, VAR_70 = FUNC_33(VAR_38, VAR_35)
VAR_38 |= FUNC_19(Markup(VAR_35.description).unescape(), VAR_40)
VAR_14 = VAR_40.id
VAR_36 = VAR_40.title
VAR_49 = helper.update_dir_structure_file(VAR_14,
config.config_calibre_dir,
VAR_62[0],
VAR_35.file_path,
VAR_70 + VAR_35.extension.lower())
FUNC_35(VAR_35, VAR_40)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_49:
flash(VAR_49, category="error")
VAR_110=_(u"File %(file)s uploaded", file=VAR_36)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_14=book_id) + "\">" + VAR_110 + "</a>"))
if len(VAR_31.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
VAR_120 = {"location": url_for('editbook.edit_book', VAR_14=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
else:
VAR_120 = {"location": url_for('web.show_book', VAR_14=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@VAR_0.route("/admin/VAR_16/convert/<int:VAR_14>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_37(VAR_14):
VAR_76 = VAR_31.form.get('book_format_from', None)
VAR_77 = VAR_31.form.get('book_format_to', None)
if (VAR_76 is None) or (VAR_77 is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
VAR_1.info('converting: VAR_16 id: %s from: %s to: %s', VAR_14, VAR_76, VAR_77)
VAR_78 = helper.convert_book_format(VAR_14, config.config_calibre_dir, VAR_76.upper(),
VAR_77.upper(), current_user.name)
if VAR_78 is None:
flash(_(u"Book successfully queued for converting to %(VAR_15)s",
VAR_15=VAR_77),
category="success")
else:
flash(_(u"There was an VAR_49 converting this VAR_16: %(res)s", res=VAR_78), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
@VAR_0.route("/scholarsearch/<VAR_41>",methods=['GET'])
@login_required_if_no_ano
@FUNC_1
def FUNC_38(VAR_41):
if VAR_45:
VAR_98 = scholarly.search_pubs(' '.join(VAR_41.split('+')))
VAR_99=0
VAR_68 = []
for publication in VAR_98:
del publication['source']
VAR_68.append(publication)
VAR_99+=1
if(VAR_99>=10):
break
return Response(json.dumps(VAR_68),mimetype='application/json')
else:
return "[]"
@VAR_0.route("/ajax/editbooks/<VAR_42>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_39(VAR_42):
VAR_79 = VAR_31.form.to_dict()
VAR_16 = calibre_db.get_book(VAR_79['pk'])
VAR_80 = ""
if VAR_42 =='series_index':
FUNC_18(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.series_index}), mimetype='application/json')
elif VAR_42 =='tags':
FUNC_16(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in VAR_16.tags])}),
mimetype='application/json')
elif VAR_42 =='series':
FUNC_17(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in VAR_16.series])}),
mimetype='application/json')
elif VAR_42 =='publishers':
FUNC_21(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True,
'newValue': ', '.join([VAR_86.name for VAR_86 in VAR_16.publishers])}),
mimetype='application/json')
elif VAR_42 =='languages':
VAR_26 = list()
FUNC_20(VAR_79['value'], VAR_16, VAR_26=invalid)
if VAR_26:
VAR_80 = Response(json.dumps({'success': False,
'msg': 'Invalid VAR_24 in VAR_31: {}'.format(','.join(VAR_26))}),
mimetype='application/json')
else:
VAR_121 = list()
for lang in VAR_16.languages:
try:
VAR_121.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
VAR_121.append(_(isoLanguages.get(part3=lang.lang_code).name))
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join(VAR_121)}),
mimetype='application/json')
elif VAR_42 =='author_sort':
VAR_16.author_sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.author_sort}),
mimetype='application/json')
elif VAR_42 == 'title':
VAR_124 = VAR_16.sort
FUNC_27(VAR_16, VAR_79.get('value', ""))
helper.update_dir_stucture(VAR_16.id, config.config_calibre_dir)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.title}),
mimetype='application/json')
elif VAR_42 =='sort':
VAR_16.sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.sort}),
mimetype='application/json')
elif VAR_42 =='authors':
VAR_62, VAR_125 = FUNC_28(VAR_16, VAR_79['value'], VAR_79.get('checkA', None) == "true")
helper.update_dir_stucture(VAR_16.id, config.config_calibre_dir, VAR_62[0])
VAR_80 = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in VAR_62])}),
mimetype='application/json')
VAR_16.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
if VAR_42 == 'title' and VAR_79.get('checkT') == "false":
VAR_16.sort = VAR_124
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
return VAR_80
@VAR_0.route("/ajax/sort_value/<VAR_43>/<int:VAR_44>")
@login_required
def FUNC_40(VAR_43, VAR_44):
if VAR_43 in ['title', 'authors', 'sort', 'author_sort']:
VAR_16 = calibre_db.get_filtered_book(VAR_44)
if VAR_16:
if VAR_43 == 'title':
return json.dumps({'sort': VAR_16.sort})
elif VAR_43 == 'authors':
return json.dumps({'author_sort': VAR_16.author_sort})
if VAR_43 == 'sort':
return json.dumps({'sort': VAR_16.title})
if VAR_43 == 'author_sort':
return json.dumps({'author_sort': VAR_16.author})
return ""
@VAR_0.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@FUNC_1
def FUNC_41():
VAR_79 = VAR_31.get_json().get('Merge_books')
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0]).title
VAR_79.pop(0)
if VAR_100:
for VAR_14 in VAR_79:
VAR_118 = []
VAR_118.append(calibre_db.get_book(VAR_14).title)
return json.dumps({'to': VAR_100, 'from': VAR_118})
return ""
@VAR_0.route("/ajax/mergebooks", methods=['POST'])
@login_required
@FUNC_1
def FUNC_42():
VAR_79 = VAR_31.get_json().get('Merge_books')
VAR_81 = list()
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0])
VAR_79.pop(0)
if VAR_100:
for file in VAR_100.data:
VAR_81.append(file.format)
VAR_114 = helper.get_valid_filename(VAR_100.title) + ' - ' + \
helper.get_valid_filename(VAR_100.authors[0].name)
for VAR_14 in VAR_79:
VAR_118 = calibre_db.get_book(VAR_14)
if VAR_118:
for element in VAR_118.data:
if element.format not in VAR_81:
VAR_122 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_100.path,
VAR_114 + "." + element.format.lower()))
VAR_123 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_118.path,
element.name + "." + element.format.lower()))
copyfile(VAR_123, VAR_122)
VAR_100.data.append(db.Data(VAR_100.id,
element.format,
element.uncompressed_size,
VAR_114))
FUNC_13(VAR_118.id,"", True)
return json.dumps({'success': True})
return ""
@VAR_0.route("/ajax/xchange", methods=['POST'])
@login_required
@FUNC_1
def FUNC_43():
VAR_79 = VAR_31.get_json().get('xchange')
if VAR_79:
for val in VAR_79:
VAR_38 = False
VAR_16 = calibre_db.get_book(val)
VAR_115 = VAR_16.title
VAR_116 = calibre_db.order_authors(VAR_16)
VAR_53 = []
for VAR_37 in VAR_116.authors:
VAR_53.append(VAR_37.name.replace('|', ','))
VAR_91 = FUNC_27(VAR_16, " ".join(VAR_53))
VAR_62, VAR_92 = FUNC_28(VAR_16, VAR_115)
if VAR_92 or VAR_91:
VAR_90 = VAR_16.id
VAR_38 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_90:
helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if VAR_38:
VAR_16.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
29,
30,
37,
38,
53,
58,
59,
62,
63,
70,
72,
79,
81,
94,
97,
101,
102,
120,
121,
131,
141,
143,
144,
162,
166,
167,
190,
191,
192,
193,
195,
199,
200,
202,
204,
206,
210,
211,
222,
231,
237,
242,
243,
249,
250,
252,
257,
258,
259,
265,
295,
296,
316,
317,
359,
361,
364,
365,
372,
375,
377,
381,
382,
394,
395,
408,
409,
432,
436,
439,
440,
445,
446,
448,
458,
459,
471,
472,
486,
488,
489,
493,
496,
497,
508,
509,
523,
540,
541,
548,
557,
565,
568,
569,
587,
602,
604,
607,
620,
624,
625,
637,
640,
641,
655,
656,
660,
664,
665,
669,
680,
681,
683,
691,
692,
694,
697,
699,
702,
704,
705,
706,
720,
721,
727,
728,
734,
735,
738,
740,
741,
745,
753,
755,
756,
758,
763,
766,
770,
785,
786,
788,
790,
796,
798,
800,
802,
804,
806,
808,
816,
837,
838,
851,
852,
866,
867,
875,
876,
878,
880,
882,
883,
886,
904,
905,
910,
913,
914,
916,
917,
920,
923,
924,
926,
927,
929,
930,
932,
933,
935,
936,
938,
939,
944,
945,
948,
950,
961,
962,
971,
972,
974,
989,
990,
1001,
1004,
1008,
1010,
1011,
1013,
1016,
1022,
1024,
1025,
1027,
1035,
1048,
1053,
1056,
1060,
1064,
1072,
1090,
1153,
1161,
1162,
1178,
1179,
1194,
1195,
1203,
1216,
1231,
1246,
1252,
1255,
1266,
1271,
213,
214,
215,
854
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
31,
37,
52,
57,
58,
59,
60,
63,
64,
71,
73,
80,
82,
95,
98,
102,
103,
121,
122,
132,
142,
144,
145,
163,
167,
168,
191,
192,
193,
194,
196,
200,
201,
203,
205,
207,
211,
212,
223,
232,
238,
243,
244,
250,
251,
253,
258,
259,
260,
266,
296,
297,
317,
318,
360,
362,
365,
366,
373,
376,
378,
382,
383,
395,
396,
409,
410,
433,
437,
440,
441,
446,
447,
449,
459,
460,
474,
475,
489,
491,
492,
496,
499,
500,
511,
512,
528,
545,
546,
553,
562,
570,
573,
574,
592,
607,
609,
612,
625,
629,
630,
642,
645,
646,
660,
661,
665,
669,
670,
674,
685,
686,
688,
696,
697,
699,
702,
704,
707,
709,
710,
711,
725,
726,
732,
733,
739,
740,
743,
745,
746,
750,
758,
760,
761,
763,
768,
771,
775,
790,
791,
793,
795,
801,
803,
805,
807,
809,
811,
813,
821,
842,
843,
856,
857,
871,
872,
880,
881,
883,
885,
887,
888,
891,
909,
910,
915,
918,
919,
921,
922,
925,
928,
929,
931,
932,
934,
935,
937,
938,
940,
941,
943,
944,
949,
950,
953,
955,
966,
967,
976,
977,
979,
994,
995,
1006,
1009,
1013,
1015,
1016,
1018,
1021,
1027,
1029,
1030,
1032,
1040,
1053,
1058,
1061,
1065,
1069,
1077,
1095,
1158,
1166,
1167,
1183,
1184,
1199,
1200,
1208,
1221,
1236,
1251,
1257,
1260,
1271,
1276,
214,
215,
216,
859
] |
0CWE-22
| import collections
import os
import re
import lxml
def englishFromList(items, conjunction="or"):
# Format a list of strings into an English list.
items = list(items)
if len(items) == 1:
return items[0]
if len(items) == 2:
return "{0} {2} {1}".format(items[0], items[1], conjunction)
return "{0}, {2} {1}".format(", ".join(items[:-1]), items[-1], conjunction)
def intersperse(iterable, delimiter):
it = iter(iterable)
yield next(it)
for x in it:
yield delimiter
yield x
def processTextNodes(nodes, regex, replacer):
"""
Takes an array of alternating text/objects,
and runs reSubObject on the text parts,
splicing them into the passed-in array.
Mutates!
"""
for i, node in enumerate(nodes):
# Node list always alternates between text and elements
if i % 2 == 0:
nodes[i : i + 1] = reSubObject(regex, node, replacer)
return nodes
def reSubObject(pattern, string, repl=None):
"""
like re.sub, but replacements don't have to be text;
returns an array of alternating unmatched text and match objects instead.
If repl is specified, it's called with each match object,
and the result then shows up in the array instead.
"""
lastEnd = 0
pieces = []
for match in pattern.finditer(string):
pieces.append(string[lastEnd : match.start()])
if repl:
pieces.append(repl(match))
else:
pieces.append(match)
lastEnd = match.end()
pieces.append(string[lastEnd:])
return pieces
def simplifyText(text):
# Remove anything that's not a name character.
text = text.strip().lower()
# I convert ( to - so foo(bar) becomes foo-bar,
# but then I have to remove () because there's nothing to separate,
# otherwise I get a double-dash in some cases.
text = re.sub(r"\(\)", "", text)
text = re.sub(r"[\s/(,]+", "-", text)
text = re.sub(r"[^a-z0-9_-]", "", text)
text = text.rstrip("-")
return text
def linkTextsFromElement(el):
from ..h import find, textContent
if el.get("data-lt") == "":
return []
elif el.get("data-lt"):
rawText = el.get("data-lt")
if rawText in ["|", "||", "|||"]:
texts = [rawText]
else:
texts = [x.strip() for x in rawText.split("|")]
else:
if el.tag in ("dfn", "a"):
texts = [textContent(el).strip()]
elif el.tag in ("h2", "h3", "h4", "h5", "h6"):
texts = [textContent(find(".content", el)).strip()]
if el.get("data-local-lt"):
localTexts = [x.strip() for x in el.get("data-local-lt").split("|")]
for text in localTexts:
if text in texts:
# lt and local-lt both specify the same thing
raise DuplicatedLinkText(text, texts + localTexts, el)
texts += localTexts
texts = [re.sub(r"\s+", " ", x) for x in texts if x != ""]
return texts
class DuplicatedLinkText(Exception):
def __init__(self, offendingText, allTexts, el):
super().__init__()
self.offendingText = offendingText
self.allTexts = allTexts
self.el = el
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def firstLinkTextFromElement(el):
try:
texts = linkTextsFromElement(el)
except DuplicatedLinkText as e:
texts = e.allTexts
return texts[0] if len(texts) > 0 else None
def splitForValues(forValues):
"""
Splits a string of 1+ "for" values into an array of individual value.
Respects function args, etc.
Currently, for values are separated by commas.
"""
if forValues is None:
return None
forValues = re.sub(r"\s+", " ", forValues)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", forValues)
if value.strip()
]
def groupFromKey(key, length=2):
"""Generates a filename-safe "group" from a key, of a specified length."""
if key in _groupFromKeyCache:
return _groupFromKeyCache[key]
safeChars = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
group = ""
for char in key.lower():
if len(group) == length:
_groupFromKeyCache[key] = group
return group
if char in safeChars:
group += char
else:
group = group.ljust(length, "_")
_groupFromKeyCache[key] = group
return group
_groupFromKeyCache = {}
def flatten(arr):
for el in arr:
if (
isinstance(el, collections.Iterable)
and not isinstance(el, str)
and not lxml.etree.iselement(el)
):
yield from flatten(el)
else:
yield el
def scriptPath(*pathSegs):
startPath = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
return os.path.join(startPath, *pathSegs)
def doEvery(s, action, lastTime=None):
# Takes an action every N seconds.
# Pass it the duration and the last time it took the action;
# it returns the time it last took the action
# (possibly just now).
# If you want to take action on first call,
# pass 0 as lastTime;
# otherwise it won't take action until N seconds.
import time
newTime = time.time()
if lastTime is None:
lastTime = newTime
if lastTime == 0 or newTime - lastTime > s:
action()
return newTime
return lastTime
| import collections
import os
import re
import lxml
from .. import constants
from .. import messages
def englishFromList(items, conjunction="or"):
# Format a list of strings into an English list.
items = list(items)
if len(items) == 1:
return items[0]
if len(items) == 2:
return "{0} {2} {1}".format(items[0], items[1], conjunction)
return "{0}, {2} {1}".format(", ".join(items[:-1]), items[-1], conjunction)
def intersperse(iterable, delimiter):
it = iter(iterable)
yield next(it)
for x in it:
yield delimiter
yield x
def processTextNodes(nodes, regex, replacer):
"""
Takes an array of alternating text/objects,
and runs reSubObject on the text parts,
splicing them into the passed-in array.
Mutates!
"""
for i, node in enumerate(nodes):
# Node list always alternates between text and elements
if i % 2 == 0:
nodes[i : i + 1] = reSubObject(regex, node, replacer)
return nodes
def reSubObject(pattern, string, repl=None):
"""
like re.sub, but replacements don't have to be text;
returns an array of alternating unmatched text and match objects instead.
If repl is specified, it's called with each match object,
and the result then shows up in the array instead.
"""
lastEnd = 0
pieces = []
for match in pattern.finditer(string):
pieces.append(string[lastEnd : match.start()])
if repl:
pieces.append(repl(match))
else:
pieces.append(match)
lastEnd = match.end()
pieces.append(string[lastEnd:])
return pieces
def simplifyText(text):
# Remove anything that's not a name character.
text = text.strip().lower()
# I convert ( to - so foo(bar) becomes foo-bar,
# but then I have to remove () because there's nothing to separate,
# otherwise I get a double-dash in some cases.
text = re.sub(r"\(\)", "", text)
text = re.sub(r"[\s/(,]+", "-", text)
text = re.sub(r"[^a-z0-9_-]", "", text)
text = text.rstrip("-")
return text
def linkTextsFromElement(el):
from ..h import find, textContent
if el.get("data-lt") == "":
return []
elif el.get("data-lt"):
rawText = el.get("data-lt")
if rawText in ["|", "||", "|||"]:
texts = [rawText]
else:
texts = [x.strip() for x in rawText.split("|")]
else:
if el.tag in ("dfn", "a"):
texts = [textContent(el).strip()]
elif el.tag in ("h2", "h3", "h4", "h5", "h6"):
texts = [textContent(find(".content", el)).strip()]
if el.get("data-local-lt"):
localTexts = [x.strip() for x in el.get("data-local-lt").split("|")]
for text in localTexts:
if text in texts:
# lt and local-lt both specify the same thing
raise DuplicatedLinkText(text, texts + localTexts, el)
texts += localTexts
texts = [re.sub(r"\s+", " ", x) for x in texts if x != ""]
return texts
class DuplicatedLinkText(Exception):
def __init__(self, offendingText, allTexts, el):
super().__init__()
self.offendingText = offendingText
self.allTexts = allTexts
self.el = el
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def firstLinkTextFromElement(el):
try:
texts = linkTextsFromElement(el)
except DuplicatedLinkText as e:
texts = e.allTexts
return texts[0] if len(texts) > 0 else None
def splitForValues(forValues):
"""
Splits a string of 1+ "for" values into an array of individual value.
Respects function args, etc.
Currently, for values are separated by commas.
"""
if forValues is None:
return None
forValues = re.sub(r"\s+", " ", forValues)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", forValues)
if value.strip()
]
def groupFromKey(key, length=2):
"""Generates a filename-safe "group" from a key, of a specified length."""
if key in _groupFromKeyCache:
return _groupFromKeyCache[key]
safeChars = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
group = ""
for char in key.lower():
if len(group) == length:
_groupFromKeyCache[key] = group
return group
if char in safeChars:
group += char
else:
group = group.ljust(length, "_")
_groupFromKeyCache[key] = group
return group
_groupFromKeyCache = {}
def flatten(arr):
for el in arr:
if (
isinstance(el, collections.Iterable)
and not isinstance(el, str)
and not lxml.etree.iselement(el)
):
yield from flatten(el)
else:
yield el
def scriptPath(*pathSegs):
startPath = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
path = os.path.join(startPath, *pathSegs)
return path
def chrootPath(chrootPath, path):
chrootPath = os.path.abspath(chrootPath)
path = os.path.abspath(path)
if not path.startswith(chrootPath):
messages.die(f"Attempted to access a file ({path}) outside the source document's directory ({chrootPath}). See --allow-nonlocal-files.")
raise Exception()
else:
return path
def doEvery(s, action, lastTime=None):
# Takes an action every N seconds.
# Pass it the duration and the last time it took the action;
# it returns the time it last took the action
# (possibly just now).
# If you want to take action on first call,
# pass 0 as lastTime;
# otherwise it won't take action until N seconds.
import time
newTime = time.time()
if lastTime is None:
lastTime = newTime
if lastTime == 0 or newTime - lastTime > s:
action()
return newTime
return lastTime
| path_disclosure | {
"code": [
" return os.path.join(startPath, *pathSegs)"
],
"line_no": [
171
]
} | {
"code": [
"from .. import messages",
" path = os.path.join(startPath, *pathSegs)",
"def chrootPath(chrootPath, path):",
" path = os.path.abspath(path)",
" messages.die(f\"Attempted to access a file ({path}) outside the source document's directory ({chrootPath}). See --allow-nonlocal-files.\")",
" else:",
" return path"
],
"line_no": [
8,
174,
178,
180,
182,
184,
185
]
} | import collections
import os
import re
import lxml
def FUNC_0(VAR_0, VAR_1="or"):
VAR_0 = list(VAR_0)
if len(VAR_0) == 1:
return VAR_0[0]
if len(VAR_0) == 2:
return "{0} {2} {1}".format(VAR_0[0], VAR_0[1], VAR_1)
return "{0}, {2} {1}".format(", ".join(VAR_0[:-1]), VAR_0[-1], VAR_1)
def FUNC_1(VAR_2, VAR_3):
VAR_21 = iter(VAR_2)
yield next(VAR_21)
for x in VAR_21:
yield VAR_3
yield x
def FUNC_2(VAR_4, VAR_5, VAR_6):
for VAR_32, node in enumerate(VAR_4):
if VAR_32 % 2 == 0:
VAR_4[VAR_32 : i + 1] = FUNC_3(VAR_5, node, VAR_6)
return VAR_4
def FUNC_3(VAR_7, VAR_8, VAR_9=None):
VAR_22 = 0
VAR_23 = []
for match in VAR_7.finditer(VAR_8):
VAR_23.append(VAR_8[VAR_22 : match.start()])
if VAR_9:
VAR_23.append(VAR_9(match))
else:
VAR_23.append(match)
VAR_22 = match.end()
VAR_23.append(VAR_8[VAR_22:])
return VAR_23
def FUNC_4(VAR_10):
VAR_10 = VAR_10.strip().lower()
VAR_10 = re.sub(r"\(\)", "", VAR_10)
VAR_10 = re.sub(r"[\VAR_18/(,]+", "-", VAR_10)
VAR_10 = re.sub(r"[^a-z0-9_-]", "", VAR_10)
VAR_10 = VAR_10.rstrip("-")
return VAR_10
def FUNC_5(VAR_11):
from ..h import find, textContent
if VAR_11.get("data-lt") == "":
return []
elif VAR_11.get("data-lt"):
VAR_33 = VAR_11.get("data-lt")
if VAR_33 in ["|", "||", "|||"]:
VAR_24 = [VAR_33]
else:
VAR_24 = [x.strip() for x in VAR_33.split("|")]
else:
if VAR_11.tag in ("dfn", "a"):
VAR_24 = [textContent(VAR_11).strip()]
elif VAR_11.tag in ("h2", "h3", "h4", "h5", "h6"):
VAR_24 = [textContent(find(".content", VAR_11)).strip()]
if VAR_11.get("data-local-lt"):
VAR_31 = [x.strip() for x in VAR_11.get("data-local-lt").split("|")]
for VAR_10 in VAR_31:
if VAR_10 in VAR_24:
raise CLASS_0(VAR_10, VAR_24 + VAR_31, VAR_11)
VAR_24 += VAR_31
VAR_24 = [re.sub(r"\VAR_18+", " ", x) for x in VAR_24 if x != ""]
return VAR_24
class CLASS_0(Exception):
def __init__(self, VAR_25, VAR_26, VAR_11):
super().__init__()
self.offendingText = VAR_25
self.allTexts = VAR_26
self.el = VAR_11
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def FUNC_6(VAR_11):
try:
VAR_24 = FUNC_5(VAR_11)
except CLASS_0 as e:
VAR_24 = e.allTexts
return VAR_24[0] if len(VAR_24) > 0 else None
def FUNC_7(VAR_12):
if VAR_12 is None:
return None
VAR_12 = re.sub(r"\VAR_18+", " ", VAR_12)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", VAR_12)
if value.strip()
]
def FUNC_8(VAR_13, VAR_14=2):
if VAR_13 in VAR_15:
return VAR_15[VAR_13]
VAR_27 = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
VAR_28 = ""
for char in VAR_13.lower():
if len(VAR_28) == VAR_14:
VAR_15[VAR_13] = VAR_28
return VAR_28
if char in VAR_27:
VAR_28 += char
else:
VAR_28 = group.ljust(VAR_14, "_")
VAR_15[VAR_13] = VAR_28
return VAR_28
VAR_15 = {}
def FUNC_9(VAR_16):
for VAR_11 in VAR_16:
if (
isinstance(VAR_11, collections.Iterable)
and not isinstance(VAR_11, str)
and not lxml.etree.iselement(VAR_11)
):
yield from FUNC_9(VAR_11)
else:
yield VAR_11
def FUNC_10(*VAR_17):
VAR_29 = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
return os.path.join(VAR_29, *VAR_17)
def FUNC_11(VAR_18, VAR_19, VAR_20=None):
import time
VAR_30 = time.time()
if VAR_20 is None:
VAR_20 = VAR_30
if VAR_20 == 0 or VAR_30 - VAR_20 > VAR_18:
VAR_19()
return VAR_30
return VAR_20
| import collections
import os
import re
import lxml
from .. import constants
from .. import messages
def FUNC_0(VAR_0, VAR_1="or"):
VAR_0 = list(VAR_0)
if len(VAR_0) == 1:
return VAR_0[0]
if len(VAR_0) == 2:
return "{0} {2} {1}".format(VAR_0[0], VAR_0[1], VAR_1)
return "{0}, {2} {1}".format(", ".join(VAR_0[:-1]), VAR_0[-1], VAR_1)
def FUNC_1(VAR_2, VAR_3):
VAR_23 = iter(VAR_2)
yield next(VAR_23)
for x in VAR_23:
yield VAR_3
yield x
def FUNC_2(VAR_4, VAR_5, VAR_6):
for VAR_34, node in enumerate(VAR_4):
if VAR_34 % 2 == 0:
VAR_4[VAR_34 : i + 1] = FUNC_3(VAR_5, node, VAR_6)
return VAR_4
def FUNC_3(VAR_7, VAR_8, VAR_9=None):
VAR_24 = 0
VAR_25 = []
for match in VAR_7.finditer(VAR_8):
VAR_25.append(VAR_8[VAR_24 : match.start()])
if VAR_9:
VAR_25.append(VAR_9(match))
else:
VAR_25.append(match)
VAR_24 = match.end()
VAR_25.append(VAR_8[VAR_24:])
return VAR_25
def FUNC_4(VAR_10):
VAR_10 = VAR_10.strip().lower()
VAR_10 = re.sub(r"\(\)", "", VAR_10)
VAR_10 = re.sub(r"[\VAR_20/(,]+", "-", VAR_10)
VAR_10 = re.sub(r"[^a-z0-9_-]", "", VAR_10)
VAR_10 = VAR_10.rstrip("-")
return VAR_10
def FUNC_5(VAR_11):
from ..h import find, textContent
if VAR_11.get("data-lt") == "":
return []
elif VAR_11.get("data-lt"):
VAR_35 = VAR_11.get("data-lt")
if VAR_35 in ["|", "||", "|||"]:
VAR_26 = [VAR_35]
else:
VAR_26 = [x.strip() for x in VAR_35.split("|")]
else:
if VAR_11.tag in ("dfn", "a"):
VAR_26 = [textContent(VAR_11).strip()]
elif VAR_11.tag in ("h2", "h3", "h4", "h5", "h6"):
VAR_26 = [textContent(find(".content", VAR_11)).strip()]
if VAR_11.get("data-local-lt"):
VAR_33 = [x.strip() for x in VAR_11.get("data-local-lt").split("|")]
for VAR_10 in VAR_33:
if VAR_10 in VAR_26:
raise CLASS_0(VAR_10, VAR_26 + VAR_33, VAR_11)
VAR_26 += VAR_33
VAR_26 = [re.sub(r"\VAR_20+", " ", x) for x in VAR_26 if x != ""]
return VAR_26
class CLASS_0(Exception):
def __init__(self, VAR_27, VAR_28, VAR_11):
super().__init__()
self.offendingText = VAR_27
self.allTexts = VAR_28
self.el = VAR_11
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def FUNC_6(VAR_11):
try:
VAR_26 = FUNC_5(VAR_11)
except CLASS_0 as e:
VAR_26 = e.allTexts
return VAR_26[0] if len(VAR_26) > 0 else None
def FUNC_7(VAR_12):
if VAR_12 is None:
return None
VAR_12 = re.sub(r"\VAR_20+", " ", VAR_12)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", VAR_12)
if value.strip()
]
def FUNC_8(VAR_13, VAR_14=2):
if VAR_13 in VAR_15:
return VAR_15[VAR_13]
VAR_29 = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
VAR_30 = ""
for char in VAR_13.lower():
if len(VAR_30) == VAR_14:
VAR_15[VAR_13] = VAR_30
return VAR_30
if char in VAR_29:
VAR_30 += char
else:
VAR_30 = group.ljust(VAR_14, "_")
VAR_15[VAR_13] = VAR_30
return VAR_30
VAR_15 = {}
def FUNC_9(VAR_16):
for VAR_11 in VAR_16:
if (
isinstance(VAR_11, collections.Iterable)
and not isinstance(VAR_11, str)
and not lxml.etree.iselement(VAR_11)
):
yield from FUNC_9(VAR_11)
else:
yield VAR_11
def FUNC_10(*VAR_17):
VAR_31 = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
VAR_19 = os.path.join(VAR_31, *VAR_17)
return VAR_19
def VAR_18(FUNC_11, VAR_19):
VAR_18 = os.path.abspath(VAR_18)
VAR_19 = os.path.abspath(VAR_19)
if not VAR_19.startswith(VAR_18):
messages.die(f"Attempted to access a file ({VAR_19}) outside the source document's directory ({VAR_18}). See --allow-nonlocal-files.")
raise Exception()
else:
return VAR_19
def FUNC_12(VAR_20, VAR_21, VAR_22=None):
import time
VAR_32 = time.time()
if VAR_22 is None:
VAR_22 = VAR_32
if VAR_22 == 0 or VAR_32 - VAR_22 > VAR_20:
VAR_21()
return VAR_32
return VAR_22
| [
4,
6,
7,
9,
16,
17,
24,
25,
34,
38,
39,
58,
59,
61,
63,
64,
65,
71,
72,
75,
93,
96,
99,
100,
107,
110,
111,
118,
119,
134,
135,
152,
153,
155,
156,
167,
168,
172,
173,
175,
176,
177,
178,
179,
180,
181,
183,
191,
27,
28,
29,
30,
31,
32,
41,
42,
43,
44,
45,
46,
121,
122,
123,
124,
125,
137
] | [
4,
6,
9,
10,
12,
19,
20,
27,
28,
37,
41,
42,
61,
62,
64,
66,
67,
68,
74,
75,
78,
96,
99,
102,
103,
110,
113,
114,
121,
122,
137,
138,
155,
156,
158,
159,
170,
171,
176,
177,
186,
187,
188,
190,
191,
192,
193,
194,
195,
196,
198,
206,
30,
31,
32,
33,
34,
35,
44,
45,
46,
47,
48,
49,
124,
125,
126,
127,
128,
140
] |
1CWE-79
| # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CheckoutProcess(object):
horizontal_template = True
def __init__(self, phase_specs, phase_kwargs, view=None):
"""
Initialize this checkout process.
:type phase_specs: list[str]
:type phase_kwargs: dict
:type view: shuup.front.checkout.BaseCheckoutView|None
"""
self.phase_specs = phase_specs
self.phase_kwargs = phase_kwargs
self.view = view
self.request = self.phase_kwargs.get("request")
@property
def phases(self):
"""
:rtype: Iterable[CheckoutPhaseViewMixin]
"""
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def instantiate_phase_class(self, phase_class, **extra_kwargs):
if not phase_class.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % phase_class)
kwargs = {}
kwargs.update(self.phase_kwargs)
kwargs.update(extra_kwargs)
phase = phase_class(checkout_process=self, horizontal_template=self.horizontal_template, **kwargs)
return phase
def _load_phases(self):
phases = OrderedDict()
for phase_spec in self.phase_specs:
phase_class = load(phase_spec)
phase = self.instantiate_phase_class(phase_class)
phases[phase_class.identifier] = phase
# check whether the phase spawns new phases,
# if so, then let's spawn then and add the phases
for spawned_phase in phase.spawn_phases(self):
phases[spawned_phase.identifier] = spawned_phase
return list(phases.values())
def get_current_phase(self, requested_phase_identifier):
found = False
for phase in self.phases:
if phase.is_valid():
phase.process()
if found or not requested_phase_identifier or requested_phase_identifier == phase.identifier:
found = True # We're at or past the requested phase
if not phase.should_skip():
return phase
if not phase.should_skip() and not phase.is_valid(): # A past phase is not valid, that's the current one
return phase
raise Http404("Error! Phase with identifier `%s` not found." % requested_phase_identifier) # pragma: no cover
def _get_next_phase(self, phases, current_phase, target_phase):
found = False
for phase in phases:
if phase.identifier == current_phase.identifier:
# Found the current one, so any valid phase from here on out is the next one
found = True
continue
if found and current_phase.identifier != target_phase.identifier:
return phase
if found and not phase.should_skip():
# Yep, that's the one
return phase
def get_next_phase(self, current_phase, target_phase):
return self._get_next_phase(self.phases, current_phase, target_phase)
def get_previous_phase(self, current_phase, target_phase):
return self._get_next_phase(reversed(self.phases), current_phase, target_phase)
def prepare_current_phase(self, phase_identifier):
current_phase = self.get_current_phase(phase_identifier)
self.add_phase_attributes(current_phase)
self.current_phase = current_phase
return current_phase
def add_phase_attributes(self, target_phase, current_phase=None):
"""
Add phase instance attributes (previous, next, etc) to the given target phase,
using the optional `current_phase` as the current phase for previous and next.
This is exposed as a public API for the benefit of phases that need to do sub-phase
initialization and dispatching, such as method phases.
"""
current_phase = current_phase or target_phase
target_phase.previous_phase = self.get_previous_phase(current_phase, target_phase)
target_phase.next_phase = self.get_next_phase(current_phase, target_phase)
target_phase.phases = self.phases
if current_phase in self.phases:
current_phase_index = self.phases.index(current_phase)
# Set up attributes that are handy for the phase bar in the templates.
for i, phase in enumerate(self.phases):
setattr(phase, "is_past", i > current_phase_index)
setattr(phase, "is_current", phase == current_phase)
setattr(phase, "is_future", i < current_phase_index)
setattr(phase, "is_previous", phase == target_phase.previous_phase)
setattr(phase, "is_next", phase == target_phase.next_phase)
return target_phase
def reset(self):
for phase in self.phases:
phase.reset()
def complete(self):
"""
To be called from a phase (`self.checkout_process.complete()`) when the checkout process is complete.
"""
self.reset()
def get_phase_url(self, phase):
# The self.view is optional for backward compatibility
if not self.view:
url_kwargs = {"phase": phase.identifier}
return reverse("shuup:checkout", kwargs=url_kwargs)
return self.view.get_phase_url(phase)
@property
def basket(self):
"""
The basket used in this checkout process.
:rtype: shuup.front.basket.objects.BaseBasket
"""
return get_basket(self.request)
class VerticalCheckoutProcess(CheckoutProcess):
horizontal_template = False
| # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from django.utils.html import escape
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CheckoutProcess(object):
horizontal_template = True
def __init__(self, phase_specs, phase_kwargs, view=None):
"""
Initialize this checkout process.
:type phase_specs: list[str]
:type phase_kwargs: dict
:type view: shuup.front.checkout.BaseCheckoutView|None
"""
self.phase_specs = phase_specs
self.phase_kwargs = phase_kwargs
self.view = view
self.request = self.phase_kwargs.get("request")
@property
def phases(self):
"""
:rtype: Iterable[CheckoutPhaseViewMixin]
"""
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def instantiate_phase_class(self, phase_class, **extra_kwargs):
if not phase_class.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % phase_class)
kwargs = {}
kwargs.update(self.phase_kwargs)
kwargs.update(extra_kwargs)
phase = phase_class(checkout_process=self, horizontal_template=self.horizontal_template, **kwargs)
return phase
def _load_phases(self):
phases = OrderedDict()
for phase_spec in self.phase_specs:
phase_class = load(phase_spec)
phase = self.instantiate_phase_class(phase_class)
phases[phase_class.identifier] = phase
# check whether the phase spawns new phases,
# if so, then let's spawn then and add the phases
for spawned_phase in phase.spawn_phases(self):
phases[spawned_phase.identifier] = spawned_phase
return list(phases.values())
def get_current_phase(self, requested_phase_identifier):
found = False
for phase in self.phases:
if phase.is_valid():
phase.process()
if found or not requested_phase_identifier or requested_phase_identifier == phase.identifier:
found = True # We're at or past the requested phase
if not phase.should_skip():
return phase
if not phase.should_skip() and not phase.is_valid(): # A past phase is not valid, that's the current one
return phase
raise Http404("Error! Phase with identifier `%s` not found." % escape(requested_phase_identifier))
def _get_next_phase(self, phases, current_phase, target_phase):
found = False
for phase in phases:
if phase.identifier == current_phase.identifier:
# Found the current one, so any valid phase from here on out is the next one
found = True
continue
if found and current_phase.identifier != target_phase.identifier:
return phase
if found and not phase.should_skip():
# Yep, that's the one
return phase
def get_next_phase(self, current_phase, target_phase):
return self._get_next_phase(self.phases, current_phase, target_phase)
def get_previous_phase(self, current_phase, target_phase):
return self._get_next_phase(reversed(self.phases), current_phase, target_phase)
def prepare_current_phase(self, phase_identifier):
current_phase = self.get_current_phase(phase_identifier)
self.add_phase_attributes(current_phase)
self.current_phase = current_phase
return current_phase
def add_phase_attributes(self, target_phase, current_phase=None):
"""
Add phase instance attributes (previous, next, etc) to the given target phase,
using the optional `current_phase` as the current phase for previous and next.
This is exposed as a public API for the benefit of phases that need to do sub-phase
initialization and dispatching, such as method phases.
"""
current_phase = current_phase or target_phase
target_phase.previous_phase = self.get_previous_phase(current_phase, target_phase)
target_phase.next_phase = self.get_next_phase(current_phase, target_phase)
target_phase.phases = self.phases
if current_phase in self.phases:
current_phase_index = self.phases.index(current_phase)
# Set up attributes that are handy for the phase bar in the templates.
for i, phase in enumerate(self.phases):
setattr(phase, "is_past", i > current_phase_index)
setattr(phase, "is_current", phase == current_phase)
setattr(phase, "is_future", i < current_phase_index)
setattr(phase, "is_previous", phase == target_phase.previous_phase)
setattr(phase, "is_next", phase == target_phase.next_phase)
return target_phase
def reset(self):
for phase in self.phases:
phase.reset()
def complete(self):
"""
To be called from a phase (`self.checkout_process.complete()`) when the checkout process is complete.
"""
self.reset()
def get_phase_url(self, phase):
# The self.view is optional for backward compatibility
if not self.view:
url_kwargs = {"phase": phase.identifier}
return reverse("shuup:checkout", kwargs=url_kwargs)
return self.view.get_phase_url(phase)
@property
def basket(self):
"""
The basket used in this checkout process.
:rtype: shuup.front.basket.objects.BaseBasket
"""
return get_basket(self.request)
class VerticalCheckoutProcess(CheckoutProcess):
horizontal_template = False
| xss | {
"code": [],
"line_no": []
} | {
"code": [
" raise Http404(\"Error! Phase with identifier `%s` not found.\" % escape(requested_phase_identifier))"
],
"line_no": [
79
]
} |
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CLASS_0(object):
VAR_0 = True
def __init__(self, VAR_1, VAR_2, VAR_3=None):
self.phase_specs = VAR_1
self.phase_kwargs = VAR_2
self.view = VAR_3
self.request = self.phase_kwargs.get("request")
@property
def VAR_7(self):
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def FUNC_1(self, VAR_4, **VAR_5):
if not VAR_4.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % VAR_4)
VAR_12 = {}
kwargs.update(self.phase_kwargs)
VAR_12.update(VAR_5)
VAR_11 = VAR_4(checkout_process=self, VAR_0=self.horizontal_template, **VAR_12)
return VAR_11
def FUNC_2(self):
VAR_7 = OrderedDict()
for phase_spec in self.phase_specs:
VAR_4 = load(phase_spec)
VAR_11 = self.instantiate_phase_class(VAR_4)
VAR_7[VAR_4.identifier] = VAR_11
for VAR_16 in VAR_11.spawn_phases(self):
VAR_7[VAR_16.identifier] = VAR_16
return list(VAR_7.values())
def FUNC_3(self, VAR_6):
VAR_13 = False
for VAR_11 in self.phases:
if VAR_11.is_valid():
VAR_11.process()
if VAR_13 or not VAR_6 or VAR_6 == VAR_11.identifier:
VAR_13 = True # We're at or past the requested VAR_11
if not VAR_11.should_skip():
return VAR_11
if not VAR_11.should_skip() and not VAR_11.is_valid(): # A past VAR_11 is not valid, that's the current one
return VAR_11
raise Http404("Error! Phase with identifier `%s` not VAR_13." % VAR_6) # pragma: no cover
def FUNC_4(self, VAR_7, VAR_8, VAR_9):
VAR_13 = False
for VAR_11 in VAR_7:
if VAR_11.identifier == VAR_8.identifier:
VAR_13 = True
continue
if VAR_13 and VAR_8.identifier != VAR_9.identifier:
return VAR_11
if VAR_13 and not VAR_11.should_skip():
return VAR_11
def FUNC_5(self, VAR_8, VAR_9):
return self._get_next_phase(self.phases, VAR_8, VAR_9)
def FUNC_6(self, VAR_8, VAR_9):
return self._get_next_phase(reversed(self.phases), VAR_8, VAR_9)
def FUNC_7(self, VAR_10):
VAR_8 = self.get_current_phase(VAR_10)
self.add_phase_attributes(VAR_8)
self.current_phase = VAR_8
return VAR_8
def FUNC_8(self, VAR_9, VAR_8=None):
VAR_8 = VAR_8 or VAR_9
target_phase.previous_phase = self.get_previous_phase(VAR_8, VAR_9)
target_phase.next_phase = self.get_next_phase(VAR_8, VAR_9)
target_phase.phases = self.phases
if VAR_8 in self.phases:
VAR_14 = self.phases.index(VAR_8)
for i, VAR_11 in enumerate(self.phases):
setattr(VAR_11, "is_past", i > VAR_14)
setattr(VAR_11, "is_current", VAR_11 == VAR_8)
setattr(VAR_11, "is_future", i < VAR_14)
setattr(VAR_11, "is_previous", VAR_11 == VAR_9.previous_phase)
setattr(VAR_11, "is_next", VAR_11 == VAR_9.next_phase)
return VAR_9
def FUNC_9(self):
for VAR_11 in self.phases:
VAR_11.reset()
def FUNC_10(self):
self.reset()
def FUNC_11(self, VAR_11):
if not self.view:
VAR_15 = {"phase": VAR_11.identifier}
return reverse("shuup:checkout", VAR_12=VAR_15)
return self.view.get_phase_url(VAR_11)
@property
def FUNC_12(self):
return get_basket(self.request)
class CLASS_1(CLASS_0):
VAR_0 = False
|
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from django.utils.html import escape
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CLASS_0(object):
VAR_0 = True
def __init__(self, VAR_1, VAR_2, VAR_3=None):
self.phase_specs = VAR_1
self.phase_kwargs = VAR_2
self.view = VAR_3
self.request = self.phase_kwargs.get("request")
@property
def VAR_7(self):
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def FUNC_1(self, VAR_4, **VAR_5):
if not VAR_4.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % VAR_4)
VAR_12 = {}
kwargs.update(self.phase_kwargs)
VAR_12.update(VAR_5)
VAR_11 = VAR_4(checkout_process=self, VAR_0=self.horizontal_template, **VAR_12)
return VAR_11
def FUNC_2(self):
VAR_7 = OrderedDict()
for phase_spec in self.phase_specs:
VAR_4 = load(phase_spec)
VAR_11 = self.instantiate_phase_class(VAR_4)
VAR_7[VAR_4.identifier] = VAR_11
for VAR_16 in VAR_11.spawn_phases(self):
VAR_7[VAR_16.identifier] = VAR_16
return list(VAR_7.values())
def FUNC_3(self, VAR_6):
VAR_13 = False
for VAR_11 in self.phases:
if VAR_11.is_valid():
VAR_11.process()
if VAR_13 or not VAR_6 or VAR_6 == VAR_11.identifier:
VAR_13 = True # We're at or past the requested VAR_11
if not VAR_11.should_skip():
return VAR_11
if not VAR_11.should_skip() and not VAR_11.is_valid(): # A past VAR_11 is not valid, that's the current one
return VAR_11
raise Http404("Error! Phase with identifier `%s` not VAR_13." % escape(VAR_6))
def FUNC_4(self, VAR_7, VAR_8, VAR_9):
VAR_13 = False
for VAR_11 in VAR_7:
if VAR_11.identifier == VAR_8.identifier:
VAR_13 = True
continue
if VAR_13 and VAR_8.identifier != VAR_9.identifier:
return VAR_11
if VAR_13 and not VAR_11.should_skip():
return VAR_11
def FUNC_5(self, VAR_8, VAR_9):
return self._get_next_phase(self.phases, VAR_8, VAR_9)
def FUNC_6(self, VAR_8, VAR_9):
return self._get_next_phase(reversed(self.phases), VAR_8, VAR_9)
def FUNC_7(self, VAR_10):
VAR_8 = self.get_current_phase(VAR_10)
self.add_phase_attributes(VAR_8)
self.current_phase = VAR_8
return VAR_8
def FUNC_8(self, VAR_9, VAR_8=None):
VAR_8 = VAR_8 or VAR_9
target_phase.previous_phase = self.get_previous_phase(VAR_8, VAR_9)
target_phase.next_phase = self.get_next_phase(VAR_8, VAR_9)
target_phase.phases = self.phases
if VAR_8 in self.phases:
VAR_14 = self.phases.index(VAR_8)
for i, VAR_11 in enumerate(self.phases):
setattr(VAR_11, "is_past", i > VAR_14)
setattr(VAR_11, "is_current", VAR_11 == VAR_8)
setattr(VAR_11, "is_future", i < VAR_14)
setattr(VAR_11, "is_previous", VAR_11 == VAR_9.previous_phase)
setattr(VAR_11, "is_next", VAR_11 == VAR_9.next_phase)
return VAR_9
def FUNC_9(self):
for VAR_11 in self.phases:
VAR_11.reset()
def FUNC_10(self):
self.reset()
def FUNC_11(self, VAR_11):
if not self.view:
VAR_15 = {"phase": VAR_11.identifier}
return reverse("shuup:checkout", VAR_12=VAR_15)
return self.view.get_phase_url(VAR_11)
@property
def FUNC_12(self):
return get_basket(self.request)
class CLASS_1(CLASS_0):
VAR_0 = False
| [
1,
2,
3,
4,
5,
6,
8,
12,
16,
17,
20,
24,
33,
42,
51,
54,
59,
60,
61,
64,
66,
79,
84,
87,
90,
92,
94,
97,
100,
106,
111,
121,
129,
133,
139,
141,
146,
151,
155,
156,
159,
22,
23,
24,
25,
26,
27,
28,
36,
37,
38,
108,
109,
110,
111,
112,
113,
114,
135,
136,
137,
149,
150,
151,
152,
153
] | [
1,
2,
3,
4,
5,
6,
8,
13,
17,
18,
21,
25,
34,
43,
52,
55,
60,
61,
62,
65,
67,
80,
85,
88,
91,
93,
95,
98,
101,
107,
112,
122,
130,
134,
140,
142,
147,
152,
156,
157,
160,
23,
24,
25,
26,
27,
28,
29,
37,
38,
39,
109,
110,
111,
112,
113,
114,
115,
136,
137,
138,
150,
151,
152,
153,
154
] |
2CWE-601
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# webgateway/views.py - django application view handling functions
#
# Copyright (c) 2007-2020 Glencoe Software, Inc. All rights reserved.
#
# This software is distributed under the terms described by the LICENCE file
# you can find at the root of the distribution bundle, which states you are
# free to use it only for non commercial purposes.
# If the file is missing please request a copy by contacting
# jason@glencoesoftware.com.
#
# Author: Carlos Neves <carlos(at)glencoesoftware.com>
import re
import json
import base64
import warnings
from functools import wraps
import omero
import omero.clients
from past.builtins import unicode
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
JsonResponse,
HttpResponseForbidden,
)
from django.http import (
HttpResponseRedirect,
HttpResponseNotAllowed,
Http404,
StreamingHttpResponse,
HttpResponseNotFound,
)
from django.views.decorators.http import require_POST
from django.views.decorators.debug import sensitive_post_parameters
from django.utils.decorators import method_decorator
from django.core.urlresolvers import reverse, NoReverseMatch
from django.conf import settings
from wsgiref.util import FileWrapper
from omero.rtypes import rlong, unwrap
from omero.constants.namespaces import NSBULKANNOTATIONS
from .util import points_string_to_XY_list, xy_list_to_bbox
from .plategrid import PlateGrid
from omeroweb.version import omeroweb_buildyear as build_year
from .marshal import imageMarshal, shapeMarshal, rgb_int2rgba
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.views.generic import View
from django.shortcuts import render
from omeroweb.webadmin.forms import LoginForm
from omeroweb.decorators import get_client_ip, is_public_user
from omeroweb.webadmin.webadmin_utils import upgradeCheck
try:
from hashlib import md5
except Exception:
from md5 import md5
try:
import long
except ImportError:
long = int
from io import BytesIO
import tempfile
from omero import ApiUsageException
from omero.util.decorators import timeit, TimeIt
from omeroweb.httprsp import HttpJavascriptResponse, HttpJavascriptResponseServerError
from omeroweb.connector import Server
import glob
# from models import StoredConnection
from omeroweb.webgateway.webgateway_cache import (
webgateway_cache,
CacheBase,
webgateway_tempfile,
)
import logging
import os
import traceback
import time
import zipfile
import shutil
from omeroweb.decorators import login_required, ConnCleaningHttpResponse
from omeroweb.connector import Connector
from omeroweb.webgateway.util import zip_archived_files, LUTS_IN_PNG
from omeroweb.webgateway.util import get_longs, getIntOrDefault
cache = CacheBase()
logger = logging.getLogger(__name__)
try:
from PIL import Image
from PIL import ImageDraw
except Exception: # pragma: nocover
try:
import Image
import ImageDraw
except Exception:
logger.error("No Pillow installed")
try:
import numpy
numpyInstalled = True
except ImportError:
logger.error("No numpy installed")
numpyInstalled = False
def index(request):
""" /webgateway/ index placeholder """
return HttpResponse("Welcome to webgateway")
def _safestr(s):
return unicode(s).encode("utf-8")
class UserProxy(object):
"""
Represents the current user of the connection, with methods delegating to
the connection itself.
"""
def __init__(self, blitzcon):
"""
Initialises the User proxy with the L{omero.gateway.BlitzGateway}
connection
@param blitzcon: connection
@type blitzcon: L{omero.gateway.BlitzGateway}
"""
self._blitzcon = blitzcon
self.loggedIn = False
def logIn(self):
""" Sets the loggedIn Flag to True """
self.loggedIn = True
def isAdmin(self):
"""
True if the current user is an admin
@return: True if the current user is an admin
@rtype: Boolean
"""
return self._blitzcon.isAdmin()
def canBeAdmin(self):
"""
True if the current user can be admin
@return: True if the current user can be admin
@rtype: Boolean
"""
return self._blitzcon.canBeAdmin()
def getId(self):
"""
Returns the ID of the current user
@return: User ID
@rtype: Long
"""
return self._blitzcon.getUserId()
def getName(self):
"""
Returns the Name of the current user
@return: User Name
@rtype: String
"""
return self._blitzcon.getUser().omeName
def getFirstName(self):
"""
Returns the first name of the current user
@return: First Name
@rtype: String
"""
return self._blitzcon.getUser().firstName or self.getName()
# def getPreferences (self):
# return self._blitzcon._user.getPreferences()
#
# def getUserObj (self):
# return self._blitzcon._user
#
# class SessionCB (object):
# def _log (self, what, c):
# logger.debug('CONN:%s %s:%d:%s' % (what, c._user, os.getpid(),
# c._sessionUuid))
#
# def create (self, c):
# self._log('create',c)
#
# def join (self, c):
# self._log('join',c)
#
# def close (self, c):
# self._log('close',c)
# _session_cb = SessionCB()
def _split_channel_info(rchannels):
"""
Splits the request query channel information for images into a sequence of
channels, window ranges and channel colors.
@param rchannels: The request string with channel info. E.g
1|100:505$0000FF,-2,3|620:3879$FF0000
@type rchannels: String
@return: E.g. [1, -2, 3] [[100.0, 505.0], (None, None), [620.0,
3879.0]] [u'0000FF', None, u'FF0000']
@rtype: tuple of 3 lists
"""
channels = []
windows = []
colors = []
for chan in rchannels.split(","):
# chan 1|12:1386r$0000FF
chan = chan.split("|", 1)
# chan ['1', '12:1386r$0000FF']
t = chan[0].strip()
# t = '1'
color = None
# Not normally used...
if t.find("$") >= 0:
t, color = t.split("$")
try:
channels.append(int(t))
ch_window = (None, None)
if len(chan) > 1:
t = chan[1].strip()
# t = '12:1386r$0000FF'
if t.find("$") >= 0:
t, color = t.split("$", 1)
# color = '0000FF'
# t = 12:1386
t = t.split(":")
if len(t) == 2:
try:
ch_window = [float(x) for x in t]
except ValueError:
pass
windows.append(ch_window)
colors.append(color)
except ValueError:
pass
logger.debug(str(channels) + "," + str(windows) + "," + str(colors))
return channels, windows, colors
def getImgDetailsFromReq(request, as_string=False):
"""
Break the GET information from the request object into details on how
to render the image.
The following keys are recognized:
z - Z axis position
t - T axis position
q - Quality set (0,0..1,0)
m - Model (g for greyscale, c for color)
p - Projection (see blitz_gateway.ImageWrapper.PROJECTIONS for keys)
x - X position (for now based on top/left offset on the browser window)
y - Y position (same as above)
c - a comma separated list of channels to be rendered (start index 1)
- format for each entry [-]ID[|wndst:wndend][#HEXCOLOR][,...]
zm - the zoom setting (as a percentual value)
@param request: http request with keys above
@param as_string: If True, return a string representation of the
rendering details
@return: A dict or String representation of rendering details
above.
@rtype: Dict or String
"""
r = request.GET
rv = {}
for k in ("z", "t", "q", "m", "zm", "x", "y", "p"):
if k in r:
rv[k] = r[k]
if "c" in r:
rv["c"] = []
ci = _split_channel_info(r["c"])
logger.debug(ci)
for i in range(len(ci[0])):
# a = abs channel, i = channel, s = window start, e = window end,
# c = color
rv["c"].append(
{
"a": abs(ci[0][i]),
"i": ci[0][i],
"s": ci[1][i][0],
"e": ci[1][i][1],
"c": ci[2][i],
}
)
if as_string:
return "&".join(["%s=%s" % (x[0], x[1]) for x in rv.items()])
return rv
@login_required()
def render_birds_eye_view(request, iid, size=None, conn=None, **kwargs):
"""
Returns an HttpResponse wrapped jpeg with the rendered bird's eye view
for image 'iid'. We now use a thumbnail for performance. #10626
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@param size: Maximum size of the longest side of the resulting
bird's eye view.
@return: http response containing jpeg
"""
return render_thumbnail(request, iid, w=size, **kwargs)
def _render_thumbnail(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns a jpeg with the rendered thumbnail for image 'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing jpeg
"""
server_id = request.session["connector"].server_id
server_settings = request.session.get("server_settings", {}).get("browser", {})
defaultSize = server_settings.get("thumb_default_size", 96)
direct = True
if w is None:
size = (defaultSize,)
else:
if h is None:
size = (int(w),)
else:
size = (int(w), int(h))
if size == (defaultSize,):
direct = False
user_id = conn.getUserId()
z = getIntOrDefault(request, "z", None)
t = getIntOrDefault(request, "t", None)
rdefId = getIntOrDefault(request, "rdefId", None)
# TODO - cache handles rdefId
jpeg_data = webgateway_cache.getThumb(request, server_id, user_id, iid, size)
if jpeg_data is None:
prevent_cache = False
img = conn.getObject("Image", iid)
if img is None:
logger.debug("(b)Image %s not found..." % (str(iid)))
if _defcb:
jpeg_data = _defcb(size=size)
prevent_cache = True
else:
raise Http404("Failed to render thumbnail")
else:
jpeg_data = img.getThumbnail(
size=size, direct=direct, rdefId=rdefId, z=z, t=t
)
if jpeg_data is None:
logger.debug("(c)Image %s not found..." % (str(iid)))
if _defcb:
jpeg_data = _defcb(size=size)
prevent_cache = True
else:
raise Http404("Failed to render thumbnail")
else:
prevent_cache = img._thumbInProgress
if not prevent_cache:
webgateway_cache.setThumb(request, server_id, user_id, iid, jpeg_data, size)
else:
pass
return jpeg_data
@login_required()
def render_thumbnail(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns an HttpResponse wrapped jpeg with the rendered thumbnail for image
'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing jpeg
"""
jpeg_data = _render_thumbnail(
request=request, iid=iid, w=w, h=h, conn=conn, _defcb=_defcb, **kwargs
)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
@login_required()
def render_roi_thumbnail(request, roiId, w=None, h=None, conn=None, **kwargs):
"""
For the given ROI, choose the shape to render (first time-point, mid
z-section) then render a region around that shape, scale to width and
height (or default size) and draw the shape on to the region
"""
server_id = request.session["connector"].server_id
# need to find the z indices of the first shape in T
result = conn.getRoiService().findByRoi(long(roiId), None, conn.SERVICE_OPTS)
if result is None or result.rois is None or len(result.rois) == 0:
raise Http404
for roi in result.rois:
imageId = roi.image.id.val
shapes = roi.copyShapes()
shapes = [s for s in shapes if s is not None]
if len(shapes) == 0:
raise Http404("No Shapes found for ROI %s" % roiId)
pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)
if pi is None:
raise Http404
image, compress_quality = pi
shape = None
# if only single shape, use it...
if len(shapes) == 1:
shape = shapes[0]
else:
default_t = image.getDefaultT()
default_z = image.getDefaultZ()
# find shapes on default Z/T plane
def_shapes = [
s
for s in shapes
if unwrap(s.getTheT()) is None or unwrap(s.getTheT()) == default_t
]
if len(def_shapes) == 1:
shape = def_shapes[0]
else:
def_shapes = [
s
for s in def_shapes
if unwrap(s.getTheZ()) is None or unwrap(s.getTheZ()) == default_z
]
if len(def_shapes) > 0:
shape = def_shapes[0]
# otherwise pick first shape
if shape is None and len(shapes) > 0:
shape = shapes[0]
return get_shape_thumbnail(request, conn, image, shape, compress_quality)
@login_required()
def render_shape_thumbnail(request, shapeId, w=None, h=None, conn=None, **kwargs):
"""
For the given Shape, redner a region around that shape, scale to width and
height (or default size) and draw the shape on to the region.
"""
server_id = request.session["connector"].server_id
# need to find the z indices of the first shape in T
params = omero.sys.Parameters()
params.map = {"id": rlong(shapeId)}
shape = conn.getQueryService().findByQuery(
"select s from Shape s join fetch s.roi where s.id = :id",
params,
conn.SERVICE_OPTS,
)
if shape is None:
raise Http404
imageId = shape.roi.image.id.val
pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)
if pi is None:
raise Http404
image, compress_quality = pi
return get_shape_thumbnail(request, conn, image, shape, compress_quality)
def get_shape_thumbnail(request, conn, image, s, compress_quality):
"""
Render a region around the specified Shape, scale to width and height (or
default size) and draw the shape on to the region. Returns jpeg data.
@param image: ImageWrapper
@param s: omero.model.Shape
"""
MAX_WIDTH = 250
color = request.GET.get("color", "fff")
colours = {
"f00": (255, 0, 0),
"0f0": (0, 255, 0),
"00f": (0, 0, 255),
"ff0": (255, 255, 0),
"fff": (255, 255, 255),
"000": (0, 0, 0),
}
lineColour = colours["f00"]
if color in colours:
lineColour = colours[color]
# used for padding if we go outside the image area
bg_color = (221, 221, 221)
bBox = None # bounding box: (x, y, w, h)
shape = {}
theT = unwrap(s.getTheT())
theT = theT if theT is not None else image.getDefaultT()
theZ = unwrap(s.getTheZ())
theZ = theZ if theZ is not None else image.getDefaultZ()
if type(s) == omero.model.RectangleI:
shape["type"] = "Rectangle"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
shape["width"] = s.getWidth().getValue()
shape["height"] = s.getHeight().getValue()
bBox = (shape["x"], shape["y"], shape["width"], shape["height"])
elif type(s) == omero.model.MaskI:
shape["type"] = "Mask"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
shape["width"] = s.getWidth().getValue()
shape["height"] = s.getHeight().getValue()
bBox = (shape["x"], shape["y"], shape["width"], shape["height"])
# TODO: support for mask
elif type(s) == omero.model.EllipseI:
shape["type"] = "Ellipse"
shape["x"] = int(s.getX().getValue())
shape["y"] = int(s.getY().getValue())
shape["radiusX"] = int(s.getRadiusX().getValue())
shape["radiusY"] = int(s.getRadiusY().getValue())
bBox = (
shape["x"] - shape["radiusX"],
shape["y"] - shape["radiusY"],
2 * shape["radiusX"],
2 * shape["radiusY"],
)
elif type(s) == omero.model.PolylineI:
shape["type"] = "PolyLine"
shape["xyList"] = points_string_to_XY_list(s.getPoints().getValue())
bBox = xy_list_to_bbox(shape["xyList"])
elif type(s) == omero.model.LineI:
shape["type"] = "Line"
shape["x1"] = int(s.getX1().getValue())
shape["x2"] = int(s.getX2().getValue())
shape["y1"] = int(s.getY1().getValue())
shape["y2"] = int(s.getY2().getValue())
x = min(shape["x1"], shape["x2"])
y = min(shape["y1"], shape["y2"])
bBox = (
x,
y,
max(shape["x1"], shape["x2"]) - x,
max(shape["y1"], shape["y2"]) - y,
)
elif type(s) == omero.model.PointI:
shape["type"] = "Point"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
bBox = (shape["x"] - 50, shape["y"] - 50, 100, 100)
elif type(s) == omero.model.PolygonI:
shape["type"] = "Polygon"
shape["xyList"] = points_string_to_XY_list(s.getPoints().getValue())
bBox = xy_list_to_bbox(shape["xyList"])
elif type(s) == omero.model.LabelI:
shape["type"] = "Label"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
bBox = (shape["x"] - 50, shape["y"] - 50, 100, 100)
else:
logger.debug("Shape type not supported: %s" % str(type(s)))
# we want to render a region larger than the bounding box
x, y, w, h = bBox
# make the aspect ratio (w/h) = 3/2
requiredWidth = max(w, h * 3 // 2)
requiredHeight = requiredWidth * 2 // 3
# make the rendered region 1.5 times larger than the bounding box
newW = int(requiredWidth * 1.5)
newH = int(requiredHeight * 1.5)
# Don't want the region to be smaller than the thumbnail dimensions
if newW < MAX_WIDTH:
newW = MAX_WIDTH
newH = newW * 2 // 3
# Don't want the region to be bigger than a 'Big Image'!
def getConfigValue(key):
try:
return conn.getConfigService().getConfigValue(key)
except Exception:
logger.warn(
"webgateway: get_shape_thumbnail() could not get"
" Config-Value for %s" % key
)
pass
max_plane_width = getConfigValue("omero.pixeldata.max_plane_width")
max_plane_height = getConfigValue("omero.pixeldata.max_plane_height")
if (
max_plane_width is None
or max_plane_height is None
or (newW > int(max_plane_width))
or (newH > int(max_plane_height))
):
# generate dummy image to return
dummy = Image.new("RGB", (MAX_WIDTH, MAX_WIDTH * 2 // 3), bg_color)
draw = ImageDraw.Draw(dummy)
draw.text((10, 30), "Shape too large to \ngenerate thumbnail", fill=(255, 0, 0))
rv = BytesIO()
dummy.save(rv, "jpeg", quality=90)
return HttpResponse(rv.getvalue(), content_type="image/jpeg")
xOffset = (newW - w) // 2
yOffset = (newH - h) // 2
newX = int(x - xOffset)
newY = int(y - yOffset)
# Need to check if any part of our region is outside the image. (assume
# that SOME of the region is within the image!)
sizeX = image.getSizeX()
sizeY = image.getSizeY()
left_xs, right_xs, top_xs, bottom_xs = 0, 0, 0, 0
if newX < 0:
newW = newW + newX
left_xs = abs(newX)
newX = 0
if newY < 0:
newH = newH + newY
top_xs = abs(newY)
newY = 0
if newW + newX > sizeX:
right_xs = (newW + newX) - sizeX
newW = newW - right_xs
if newH + newY > sizeY:
bottom_xs = (newH + newY) - sizeY
newH = newH - bottom_xs
# now we should be getting the correct region
jpeg_data = image.renderJpegRegion(
theZ, theT, newX, newY, newW, newH, level=None, compression=compress_quality
)
img = Image.open(BytesIO(jpeg_data))
# add back on the xs we were forced to trim
if left_xs != 0 or right_xs != 0 or top_xs != 0 or bottom_xs != 0:
jpg_w, jpg_h = img.size
xs_w = jpg_w + right_xs + left_xs
xs_h = jpg_h + bottom_xs + top_xs
xs_image = Image.new("RGB", (xs_w, xs_h), bg_color)
xs_image.paste(img, (left_xs, top_xs))
img = xs_image
# we have our full-sized region. Need to resize to thumbnail.
current_w, current_h = img.size
factor = float(MAX_WIDTH) / current_w
resizeH = int(current_h * factor)
img = img.resize((MAX_WIDTH, resizeH))
draw = ImageDraw.Draw(img)
if shape["type"] == "Rectangle":
rectX = int(xOffset * factor)
rectY = int(yOffset * factor)
rectW = int((w + xOffset) * factor)
rectH = int((h + yOffset) * factor)
draw.rectangle((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.rectangle((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif shape["type"] == "Line":
lineX1 = (shape["x1"] - newX + left_xs) * factor
lineX2 = (shape["x2"] - newX + left_xs) * factor
lineY1 = (shape["y1"] - newY + top_xs) * factor
lineY2 = (shape["y2"] - newY + top_xs) * factor
draw.line((lineX1, lineY1, lineX2, lineY2), fill=lineColour, width=2)
elif shape["type"] == "Ellipse":
rectX = int(xOffset * factor)
rectY = int(yOffset * factor)
rectW = int((w + xOffset) * factor)
rectH = int((h + yOffset) * factor)
draw.ellipse((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.ellipse((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif shape["type"] == "Point":
point_radius = 2
rectX = (MAX_WIDTH // 2) - point_radius
rectY = int(resizeH // 2) - point_radius
rectW = rectX + (point_radius * 2)
rectH = rectY + (point_radius * 2)
draw.ellipse((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.ellipse((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif "xyList" in shape:
# resizedXY = [(int(x*factor), int(y*factor))
# for (x,y) in shape['xyList']]
def resizeXY(xy):
x, y = xy
return (
int((x - newX + left_xs) * factor),
int((y - newY + top_xs) * factor),
)
resizedXY = [resizeXY(xy) for xy in shape["xyList"]]
# doesn't support 'width' of line
# draw.polygon(resizedXY, outline=lineColour)
x2 = y2 = None
for line in range(1, len(resizedXY)):
x1, y1 = resizedXY[line - 1]
x2, y2 = resizedXY[line]
draw.line((x1, y1, x2, y2), fill=lineColour, width=2)
start_x, start_y = resizedXY[0]
if shape["type"] != "PolyLine":
# Seems possible to have Polygon with only 1 point!
if x2 is None:
x2 = start_x + 1 # This will create a visible dot
if y2 is None:
y2 = start_y + 1
draw.line((x2, y2, start_x, start_y), fill=lineColour, width=2)
rv = BytesIO()
compression = 0.9
try:
img.save(rv, "jpeg", quality=int(compression * 100))
jpeg = rv.getvalue()
finally:
rv.close()
return HttpResponse(jpeg, content_type="image/jpeg")
@login_required()
def render_shape_mask(request, shapeId, conn=None, **kwargs):
""" Returns mask as a png (supports transparency) """
if not numpyInstalled:
raise NotImplementedError("numpy not installed")
params = omero.sys.Parameters()
params.map = {"id": rlong(shapeId)}
shape = conn.getQueryService().findByQuery(
"select s from Shape s where s.id = :id", params, conn.SERVICE_OPTS
)
if shape is None:
raise Http404("Shape ID: %s not found" % shapeId)
width = int(shape.getWidth().getValue())
height = int(shape.getHeight().getValue())
color = unwrap(shape.getFillColor())
fill = (255, 255, 0, 255)
if color is not None:
color = rgb_int2rgba(color)
fill = (color[0], color[1], color[2], int(color[3] * 255))
mask_packed = shape.getBytes()
# convert bytearray into something we can use
intarray = numpy.fromstring(mask_packed, dtype=numpy.uint8)
binarray = numpy.unpackbits(intarray)
# Couldn't get the 'proper' way of doing this to work,
# TODO: look at this again later. Faster than simple way below:
# E.g. takes ~2 seconds for 1984 x 1984 mask
# pixels = ""
# steps = len(binarray) / 8
# for i in range(steps):
# b = binarray[i*8: (i+1)*8]
# pixels += struct.pack("8B", b[0], b[1], b[2], b[3], b[4],
# b[5], b[6], b[7])
# for b in binarray:
# pixels += struct.pack("1B", b)
# im = Image.frombytes("1", size=(width, height), data=pixels)
# Simple approach - Just set each pixel in turn
# E.g. takes ~12 seconds for 1984 x 1984 mask with most pixels '1'
# Or ~5 seconds for same size mask with most pixels "0"
img = Image.new("RGBA", size=(width, height), color=(0, 0, 0, 0))
x = 0
y = 0
for pix in binarray:
if pix == 1:
img.putpixel((x, y), fill)
x += 1
if x > width - 1:
x = 0
y += 1
rv = BytesIO()
# return a png (supports transparency)
img.save(rv, "png", quality=int(100))
png = rv.getvalue()
return HttpResponse(png, content_type="image/png")
def _get_signature_from_request(request):
"""
returns a string that identifies this image, along with the settings
passed on the request.
Useful for using as img identifier key, for prepared image.
@param request: http request
@return: String
"""
r = request.GET
rv = r.get("m", "_") + r.get("p", "_") + r.get("c", "_") + r.get("q", "_")
return rv
def _get_maps_enabled(request, name, sizeC=0):
"""
Parses 'maps' query string from request
"""
codomains = None
if "maps" in request:
map_json = request["maps"]
codomains = []
try:
# If coming from request string, need to load -> json
if isinstance(map_json, (unicode, str)):
map_json = json.loads(map_json)
sizeC = max(len(map_json), sizeC)
for c in range(sizeC):
enabled = None
if len(map_json) > c:
m = map_json[c].get(name)
# If None, no change to saved status
if m is not None:
enabled = m.get("enabled") in (True, "true")
codomains.append(enabled)
except Exception:
logger.debug("Invalid json for query ?maps=%s" % map_json)
codomains = None
return codomains
def _get_prepared_image(
request, iid, server_id=None, conn=None, saveDefs=False, retry=True
):
"""
Fetches the Image object for image 'iid' and prepares it according to the
request query, setting the channels, rendering model and projection
arguments. The compression level is parsed and returned too.
For parameters in request, see L{getImgDetailsFromReq}
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@param saveDefs: Try to save the rendering settings, default z and t.
@param retry: Try an extra attempt at this method
@return: Tuple (L{omero.gateway.ImageWrapper} image, quality)
"""
r = request.GET
logger.debug(
"Preparing Image:%r saveDefs=%r "
"retry=%r request=%r conn=%s" % (iid, saveDefs, retry, r, str(conn))
)
img = conn.getObject("Image", iid)
if img is None:
return
invert_flags = None
if "maps" in r:
reverses = _get_maps_enabled(r, "reverse", img.getSizeC())
# 'reverse' is now deprecated (5.4.0). Also check for 'invert'
invert_flags = _get_maps_enabled(r, "inverted", img.getSizeC())
# invert is True if 'invert' OR 'reverse' is enabled
if reverses is not None and invert_flags is not None:
invert_flags = [
z[0] if z[0] is not None else z[1] for z in zip(invert_flags, reverses)
]
try:
# quantization maps (just applied, not saved at the moment)
qm = [m.get("quantization") for m in json.loads(r["maps"])]
img.setQuantizationMaps(qm)
except Exception:
logger.debug("Failed to set quantization maps")
if "c" in r:
logger.debug("c=" + r["c"])
activechannels, windows, colors = _split_channel_info(r["c"])
allchannels = range(1, img.getSizeC() + 1)
# If saving, apply to all channels
if saveDefs and not img.setActiveChannels(
allchannels, windows, colors, invert_flags
):
logger.debug("Something bad happened while setting the active channels...")
# Save the active/inactive state of the channels
if not img.setActiveChannels(activechannels, windows, colors, invert_flags):
logger.debug("Something bad happened while setting the active channels...")
if r.get("m", None) == "g":
img.setGreyscaleRenderingModel()
elif r.get("m", None) == "c":
img.setColorRenderingModel()
# projection 'intmax' OR 'intmax|5:25'
p = r.get("p", None)
pStart, pEnd = None, None
if p is not None and len(p.split("|")) > 1:
p, startEnd = p.split("|", 1)
try:
pStart, pEnd = [int(s) for s in startEnd.split(":")]
except ValueError:
pass
img.setProjection(p)
img.setProjectionRange(pStart, pEnd)
img.setInvertedAxis(bool(r.get("ia", "0") == "1"))
compress_quality = r.get("q", None)
if saveDefs:
"z" in r and img.setDefaultZ(long(r["z"]) - 1)
"t" in r and img.setDefaultT(long(r["t"]) - 1)
img.saveDefaults()
return (img, compress_quality)
@login_required()
def render_image_region(request, iid, z, t, conn=None, **kwargs):
"""
Returns a jpeg of the OMERO image, rendering only a region specified in
query string as region=x,y,width,height. E.g. region=0,512,256,256
Rendering settings can be specified in the request parameters.
@param request: http request
@param iid: image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping jpeg
"""
server_id = request.session["connector"].server_id
# if the region=x,y,w,h is not parsed correctly to give 4 ints then we
# simply provide whole image plane.
# alternatively, could return a 404?
# if h == None:
# return render_image(request, iid, z, t, server_id=None, _conn=None,
# **kwargs)
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
tile = request.GET.get("tile", None)
region = request.GET.get("region", None)
level = None
if tile:
try:
img._prepareRenderingEngine()
w, h = img._re.getTileSize()
levels = img._re.getResolutionLevels() - 1
zxyt = tile.split(",")
# if tile size is given respect it
if len(zxyt) > 4:
tile_size = [int(zxyt[3]), int(zxyt[4])]
tile_defaults = [w, h]
max_tile_length = 1024
try:
max_tile_length = int(
conn.getConfigService().getConfigValue(
"omero.pixeldata.max_tile_length"
)
)
except Exception:
pass
for i, tile_length in enumerate(tile_size):
# use default tile size if <= 0
if tile_length <= 0:
tile_size[i] = tile_defaults[i]
# allow no bigger than max_tile_length
if tile_length > max_tile_length:
tile_size[i] = max_tile_length
w, h = tile_size
v = int(zxyt[0])
if v < 0:
msg = "Invalid resolution level %s < 0" % v
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
if levels == 0: # non pyramid file
if v > 0:
msg = "Invalid resolution level %s, non pyramid file" % v
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
else:
level = None
else:
level = levels - v
if level < 0:
msg = (
"Invalid resolution level, \
%s > number of available levels %s "
% (v, levels)
)
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
x = int(zxyt[1]) * w
y = int(zxyt[2]) * h
except Exception:
msg = "malformed tile argument, tile=%s" % tile
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
elif region:
try:
xywh = region.split(",")
x = int(xywh[0])
y = int(xywh[1])
w = int(xywh[2])
h = int(xywh[3])
except Exception:
msg = "malformed region argument, region=%s" % region
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
else:
return HttpResponseBadRequest("tile or region argument required")
# region details in request are used as key for caching.
jpeg_data = webgateway_cache.getImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderJpegRegion(
z, t, x, y, w, h, level=level, compression=compress_quality
)
if jpeg_data is None:
raise Http404
webgateway_cache.setImage(request, server_id, img, z, t, jpeg_data)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
@login_required()
def render_image(request, iid, z=None, t=None, conn=None, **kwargs):
"""
Renders the image with id {{iid}} at {{z}} and {{t}} as jpeg.
Many options are available from the request dict. See
L{getImgDetailsFromReq} for list.
I am assuming a single Pixels object on image with image-Id='iid'. May be
wrong
@param request: http request
@param iid: image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping jpeg
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
jpeg_data = webgateway_cache.getImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderJpeg(z, t, compression=compress_quality)
if jpeg_data is None:
raise Http404
webgateway_cache.setImage(request, server_id, img, z, t, jpeg_data)
format = request.GET.get("format", "jpeg")
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
if "download" in kwargs and kwargs["download"]:
if format == "png":
# convert jpeg data to png...
i = Image.open(BytesIO(jpeg_data))
output = BytesIO()
i.save(output, "png")
jpeg_data = output.getvalue()
output.close()
rsp = HttpResponse(jpeg_data, content_type="image/png")
elif format == "tif":
# convert jpeg data to TIFF
i = Image.open(BytesIO(jpeg_data))
output = BytesIO()
i.save(output, "tiff")
jpeg_data = output.getvalue()
output.close()
rsp = HttpResponse(jpeg_data, content_type="image/tiff")
fileName = img.getName()
try:
fileName = fileName.decode("utf8")
except AttributeError:
pass # python 3
fileName = fileName.replace(",", ".").replace(" ", "_")
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(jpeg_data)
rsp["Content-Disposition"] = "attachment; filename=%s.%s" % (fileName, format)
return rsp
@login_required()
def render_ome_tiff(request, ctx, cid, conn=None, **kwargs):
"""
Renders the OME-TIFF representation of the image(s) with id cid in ctx
(i)mage, (d)ataset, or (p)roject.
For multiple images export, images that require pixels pyramid (big
images) will be silently skipped.
If exporting a single big image or if all images in a multple image export
are big, a 404 will be triggered.
A request parameter dryrun can be passed to return the count of images
that would actually be exported.
@param request: http request
@param ctx: 'p' or 'd' or 'i'
@param cid: Project, Dataset or Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping the tiff (or zip for multiple
files), or redirect to temp file/zip
if dryrun is True, returns count of images that would
be exported
"""
server_id = request.session["connector"].server_id
imgs = []
if ctx == "p":
obj = conn.getObject("Project", cid)
if obj is None:
raise Http404
for d in obj.listChildren():
imgs.extend(list(d.listChildren()))
name = obj.getName()
elif ctx == "d":
obj = conn.getObject("Dataset", cid)
if obj is None:
raise Http404
imgs.extend(list(obj.listChildren()))
selection = list(filter(None, request.GET.get("selection", "").split(",")))
if len(selection) > 0:
logger.debug(selection)
logger.debug(imgs)
imgs = [x for x in imgs if str(x.getId()) in selection]
logger.debug(imgs)
if len(imgs) == 0:
raise Http404
name = "%s-%s" % (obj.getParent().getName(), obj.getName())
elif ctx == "w":
obj = conn.getObject("Well", cid)
if obj is None:
raise Http404
imgs.extend([x.getImage() for x in obj.listChildren()])
plate = obj.getParent()
coord = "%s%s" % (
plate.getRowLabels()[obj.row],
plate.getColumnLabels()[obj.column],
)
name = "%s-%s-%s" % (plate.getParent().getName(), plate.getName(), coord)
else:
obj = conn.getObject("Image", cid)
if obj is None:
raise Http404
imgs.append(obj)
imgs = [x for x in imgs if not x.requiresPixelsPyramid()]
if request.GET.get("dryrun", False):
rv = json.dumps(len(imgs))
c = request.GET.get("callback", None)
if c is not None and not kwargs.get("_internal", False):
rv = "%s(%s)" % (c, rv)
return HttpJavascriptResponse(rv)
if len(imgs) == 0:
raise Http404
if len(imgs) == 1:
obj = imgs[0]
key = (
"_".join((str(x.getId()) for x in obj.getAncestry()))
+ "_"
+ str(obj.getId())
+ "_ome_tiff"
)
# total name len <= 255, 9 is for .ome.tiff
fnamemax = 255 - len(str(obj.getId())) - 10
objname = obj.getName()[:fnamemax]
fpath, rpath, fobj = webgateway_tempfile.new(
str(obj.getId()) + "-" + objname + ".ome.tiff", key=key
)
if fobj is True:
# already exists
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, imgs[0])
if tiff_data is None:
try:
tiff_data = imgs[0].exportOmeTiff()
except Exception:
logger.debug("Failed to export image (2)", exc_info=True)
tiff_data = None
if tiff_data is None:
webgateway_tempfile.abort(fpath)
raise Http404
webgateway_cache.setOmeTiffImage(request, server_id, imgs[0], tiff_data)
if fobj is None:
rsp = HttpResponse(tiff_data, content_type="image/tiff")
rsp["Content-Disposition"] = 'attachment; filename="%s.ome.tiff"' % (
str(obj.getId()) + "-" + objname
)
rsp["Content-Length"] = len(tiff_data)
return rsp
else:
fobj.write(tiff_data)
fobj.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
else:
try:
img_ids = "+".join((str(x.getId()) for x in imgs)).encode("utf-8")
key = (
"_".join((str(x.getId()) for x in imgs[0].getAncestry()))
+ "_"
+ md5(img_ids).hexdigest()
+ "_ome_tiff_zip"
)
fpath, rpath, fobj = webgateway_tempfile.new(name + ".zip", key=key)
if fobj is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
logger.debug(fpath)
if fobj is None:
fobj = BytesIO()
zobj = zipfile.ZipFile(fobj, "w", zipfile.ZIP_STORED)
for obj in imgs:
tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, obj)
if tiff_data is None:
tiff_data = obj.exportOmeTiff()
if tiff_data is None:
continue
webgateway_cache.setOmeTiffImage(request, server_id, obj, tiff_data)
# While ZIP itself doesn't have the 255 char limit for
# filenames, the FS where these get unarchived might, so trim
# names
# total name len <= 255, 9 is for .ome.tiff
fnamemax = 255 - len(str(obj.getId())) - 10
objname = obj.getName()[:fnamemax]
zobj.writestr(str(obj.getId()) + "-" + objname + ".ome.tiff", tiff_data)
zobj.close()
if fpath is None:
zip_data = fobj.getvalue()
rsp = HttpResponse(zip_data, content_type="application/zip")
rsp["Content-Disposition"] = 'attachment; filename="%s.zip"' % name
rsp["Content-Length"] = len(zip_data)
return rsp
except Exception:
logger.debug(traceback.format_exc())
raise
return HttpResponseRedirect(settings.STATIC_URL + "webgateway/tfiles/" + rpath)
@login_required()
def render_movie(request, iid, axis, pos, conn=None, **kwargs):
"""
Renders a movie from the image with id iid
@param request: http request
@param iid: Image ID
@param axis: Movie frames are along 'z' or 't' dimension. String
@param pos: The T index (for z axis) or Z index (for t axis)
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping the file, or redirect to temp
file
"""
server_id = request.session["connector"].server_id
try:
# Prepare a filename we'll use for temp cache, and check if file is
# already there
opts = {}
opts["format"] = "video/" + request.GET.get("format", "quicktime")
opts["fps"] = int(request.GET.get("fps", 4))
opts["minsize"] = (512, 512, "Black")
ext = ".avi"
key = "%s-%s-%s-%d-%s-%s" % (
iid,
axis,
pos,
opts["fps"],
_get_signature_from_request(request),
request.GET.get("format", "quicktime"),
)
pos = int(pos)
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
fpath, rpath, fobj = webgateway_tempfile.new(img.getName() + ext, key=key)
logger.debug(fpath, rpath, fobj)
if fobj is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
# os.path.join(rpath, img.getName() + ext))
if "optsCB" in kwargs:
opts.update(kwargs["optsCB"](img))
opts.update(kwargs.get("opts", {}))
logger.debug(
"rendering movie for img %s with axis %s, pos %i and opts %s"
% (iid, axis, pos, opts)
)
# fpath, rpath = webgateway_tempfile.newdir()
if fpath is None:
fo, fn = tempfile.mkstemp()
else:
fn = fpath # os.path.join(fpath, img.getName())
if axis.lower() == "z":
dext, mimetype = img.createMovie(
fn, 0, img.getSizeZ() - 1, pos - 1, pos - 1, opts
)
else:
dext, mimetype = img.createMovie(
fn, pos - 1, pos - 1, 0, img.getSizeT() - 1, opts
)
if dext is None and mimetype is None:
# createMovie is currently only available on 4.1_custom
# https://trac.openmicroscopy.org/ome/ticket/3857
raise Http404
if fpath is None:
movie = open(fn).read()
os.close(fo)
rsp = HttpResponse(movie, content_type=mimetype)
rsp["Content-Disposition"] = 'attachment; filename="%s"' % (
img.getName() + ext
)
rsp["Content-Length"] = len(movie)
return rsp
else:
fobj.close()
# shutil.move(fn, fn + ext)
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
# os.path.join(rpath, img.getName() + ext))
except Exception:
logger.debug(traceback.format_exc())
raise
@login_required()
def render_split_channel(request, iid, z, t, conn=None, **kwargs):
"""
Renders a split channel view of the image with id {{iid}} at {{z}} and
{{t}} as jpeg.
Many options are available from the request dict.
Requires Pillow to be installed on the server.
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping a jpeg
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
compress_quality = compress_quality and float(compress_quality) or 0.9
jpeg_data = webgateway_cache.getSplitChannelImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderSplitChannel(z, t, compression=compress_quality)
if jpeg_data is None:
raise Http404
webgateway_cache.setSplitChannelImage(request, server_id, img, z, t, jpeg_data)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
def debug(f):
"""
Decorator for adding debugging functionality to methods.
@param f: The function to wrap
@return: The wrapped function
"""
@wraps(f)
def wrap(request, *args, **kwargs):
debug = request.GET.getlist("debug")
if "slow" in debug:
time.sleep(5)
if "fail" in debug:
raise Http404
if "error" in debug:
raise AttributeError("Debug requested error")
return f(request, *args, **kwargs)
return wrap
def jsonp(f):
"""
Decorator for adding connection debugging and returning function result as
json, depending on values in kwargs
@param f: The function to wrap
@return: The wrapped function, which will return json
"""
@wraps(f)
def wrap(request, *args, **kwargs):
logger.debug("jsonp")
try:
server_id = kwargs.get("server_id", None)
if server_id is None and request.session.get("connector"):
server_id = request.session["connector"].server_id
kwargs["server_id"] = server_id
rv = f(request, *args, **kwargs)
if kwargs.get("_raw", False):
return rv
if isinstance(rv, HttpResponse):
return rv
c = request.GET.get("callback", None)
if c is not None and not kwargs.get("_internal", False):
rv = json.dumps(rv)
rv = "%s(%s)" % (c, rv)
# mimetype for JSONP is application/javascript
return HttpJavascriptResponse(rv)
if kwargs.get("_internal", False):
return rv
# mimetype for JSON is application/json
# NB: To support old api E.g. /get_rois_json/
# We need to support lists
safe = type(rv) is dict
return JsonResponse(rv, safe=safe)
except Exception as ex:
# Default status is 500 'server error'
# But we try to handle all 'expected' errors appropriately
# TODO: handle omero.ConcurrencyException
status = 500
if isinstance(ex, omero.SecurityViolation):
status = 403
elif isinstance(ex, omero.ApiUsageException):
status = 400
trace = traceback.format_exc()
logger.debug(trace)
if kwargs.get("_raw", False) or kwargs.get("_internal", False):
raise
return JsonResponse(
{"message": str(ex), "stacktrace": trace}, status=status
)
return wrap
@debug
@login_required()
def render_row_plot(request, iid, z, t, y, conn=None, w=1, **kwargs):
"""
Renders the line plot for the image with id {{iid}} at {{z}} and {{t}} as
gif with transparent background.
Many options are available from the request dict.
I am assuming a single Pixels object on image with Image ID='iid'. May be
wrong
TODO: cache
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param y: Y position of row to measure
@param conn: L{omero.gateway.BlitzGateway} connection
@param w: Line width
@return: http response wrapping a gif
"""
if not w:
w = 1
pi = _get_prepared_image(request, iid, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
try:
gif_data = img.renderRowLinePlotGif(int(z), int(t), int(y), int(w))
except Exception:
logger.debug("a", exc_info=True)
raise
if gif_data is None:
raise Http404
rsp = HttpResponse(gif_data, content_type="image/gif")
return rsp
@debug
@login_required()
def render_col_plot(request, iid, z, t, x, w=1, conn=None, **kwargs):
"""
Renders the line plot for the image with id {{iid}} at {{z}} and {{t}} as
gif with transparent background.
Many options are available from the request dict.
I am assuming a single Pixels object on image with id='iid'. May be wrong
TODO: cache
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param x: X position of column to measure
@param conn: L{omero.gateway.BlitzGateway} connection
@param w: Line width
@return: http response wrapping a gif
"""
if not w:
w = 1
pi = _get_prepared_image(request, iid, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
gif_data = img.renderColLinePlotGif(int(z), int(t), int(x), int(w))
if gif_data is None:
raise Http404
rsp = HttpResponse(gif_data, content_type="image/gif")
return rsp
@login_required()
@jsonp
def imageData_json(request, conn=None, _internal=False, **kwargs):
"""
Get a dict with image information
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@param _internal: TODO: ?
@return: Dict
"""
iid = kwargs["iid"]
key = kwargs.get("key", None)
image = conn.getObject("Image", iid)
if image is None:
if is_public_user(request):
# 403 - Should try logging in
return HttpResponseForbidden()
else:
return HttpResponseNotFound("Image:%s not found" % iid)
if request.GET.get("getDefaults") == "true":
image.resetDefaults(save=False)
rv = imageMarshal(image, key=key, request=request)
return rv
@login_required()
@jsonp
def wellData_json(request, conn=None, _internal=False, **kwargs):
"""
Get a dict with image information
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@param _internal: TODO: ?
@return: Dict
"""
wid = kwargs["wid"]
well = conn.getObject("Well", wid)
if well is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
rv = well.simpleMarshal(xtra=xtra)
return rv
@login_required()
@jsonp
def plateGrid_json(request, pid, field=0, conn=None, **kwargs):
""""""
try:
field = long(field or 0)
except ValueError:
field = 0
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
thumbsize = getIntOrDefault(request, "size", None)
logger.debug(thumbsize)
server_id = kwargs["server_id"]
def get_thumb_url(iid):
if thumbsize is not None:
return reverse(prefix, args=(iid, thumbsize))
return reverse(prefix, args=(iid,))
plateGrid = PlateGrid(conn, pid, field, kwargs.get("urlprefix", get_thumb_url))
plate = plateGrid.plate
if plate is None:
return Http404
cache_key = "plategrid-%d-%s" % (field, thumbsize)
rv = webgateway_cache.getJson(request, server_id, plate, cache_key)
if rv is None:
rv = plateGrid.metadata
webgateway_cache.setJson(request, server_id, plate, json.dumps(rv), cache_key)
else:
rv = json.loads(rv)
return rv
@login_required()
@jsonp
def get_thumbnails_json(request, w=None, conn=None, **kwargs):
"""
Returns base64 encoded jpeg with the rendered thumbnail for images
'id'
@param request: http request
@param w: Thumbnail max width. 96 by default
@return: http response containing base64 encoded thumbnails
"""
server_settings = request.session.get("server_settings", {}).get("browser", {})
defaultSize = server_settings.get("thumb_default_size", 96)
if w is None:
w = defaultSize
image_ids = get_longs(request, "id")
image_ids = list(set(image_ids)) # remove any duplicates
# If we only have a single ID, simply use getThumbnail()
if len(image_ids) == 1:
iid = image_ids[0]
try:
data = _render_thumbnail(request, iid, w=w, conn=conn)
return {
iid: "data:image/jpeg;base64,%s"
% base64.b64encode(data).decode("utf-8")
}
except Exception:
return {iid: None}
logger.debug("Image ids: %r" % image_ids)
if len(image_ids) > settings.THUMBNAILS_BATCH:
return HttpJavascriptResponseServerError(
"Max %s thumbnails at a time." % settings.THUMBNAILS_BATCH
)
thumbnails = conn.getThumbnailSet([rlong(i) for i in image_ids], w)
rv = dict()
for i in image_ids:
rv[i] = None
try:
t = thumbnails[i]
if len(t) > 0:
# replace thumbnail urls by base64 encoded image
rv[i] = "data:image/jpeg;base64,%s" % base64.b64encode(t).decode(
"utf-8"
)
except KeyError:
logger.error("Thumbnail not available. (img id: %d)" % i)
except Exception:
logger.error(traceback.format_exc())
return rv
@login_required()
@jsonp
def get_thumbnail_json(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns an HttpResponse base64 encoded jpeg with the rendered thumbnail
for image 'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing base64 encoded thumbnail
"""
jpeg_data = _render_thumbnail(
request=request, iid=iid, w=w, h=h, conn=conn, _defcb=_defcb, **kwargs
)
rv = "data:image/jpeg;base64,%s" % base64.b64encode(jpeg_data).decode("utf-8")
return rv
@login_required()
@jsonp
def listImages_json(request, did, conn=None, **kwargs):
"""
lists all Images in a Dataset, as json
TODO: cache
@param request: http request
@param did: Dataset ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of image json.
"""
dataset = conn.getObject("Dataset", did)
if dataset is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {
"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix),
"tiled": request.GET.get("tiled", False),
}
return [x.simpleMarshal(xtra=xtra) for x in dataset.listChildren()]
@login_required()
@jsonp
def listWellImages_json(request, did, conn=None, **kwargs):
"""
lists all Images in a Well, as json
TODO: cache
@param request: http request
@param did: Well ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of image json.
"""
well = conn.getObject("Well", did)
acq = getIntOrDefault(request, "run", None)
if well is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
def marshal_pos(w):
d = {}
for x, p in (["x", w.getPosX()], ["y", w.getPosY()]):
if p is not None:
d[x] = {"value": p.getValue(), "unit": str(p.getUnit())}
return d
wellImgs = []
for ws in well.listChildren():
# optionally filter by acquisition 'run'
if (
acq is not None
and ws.plateAcquisition is not None
and ws.plateAcquisition.id.val != acq
):
continue
img = ws.getImage()
if img is not None:
m = img.simpleMarshal(xtra=xtra)
pos = marshal_pos(ws)
if len(pos.keys()) > 0:
m["position"] = pos
wellImgs.append(m)
return wellImgs
@login_required()
@jsonp
def listDatasets_json(request, pid, conn=None, **kwargs):
"""
lists all Datasets in a Project, as json
TODO: cache
@param request: http request
@param pid: Project ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of dataset json.
"""
project = conn.getObject("Project", pid)
if project is None:
return HttpJavascriptResponse("[]")
return [x.simpleMarshal(xtra={"childCount": 0}) for x in project.listChildren()]
@login_required()
@jsonp
def datasetDetail_json(request, did, conn=None, **kwargs):
"""
return json encoded details for a dataset
TODO: cache
"""
ds = conn.getObject("Dataset", did)
return ds.simpleMarshal()
@login_required()
@jsonp
def listProjects_json(request, conn=None, **kwargs):
"""
lists all Projects, as json
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@return: list of project json.
"""
rv = []
for pr in conn.listProjects():
rv.append({"id": pr.id, "name": pr.name, "description": pr.description or ""})
return rv
@login_required()
@jsonp
def projectDetail_json(request, pid, conn=None, **kwargs):
"""
grab details from one specific project
TODO: cache
@param request: http request
@param pid: Project ID
@param conn: L{omero.gateway.BlitzGateway}
@return: project details as dict.
"""
pr = conn.getObject("Project", pid)
rv = pr.simpleMarshal()
return rv
@jsonp
def open_with_options(request, **kwargs):
"""
Make the settings.OPEN_WITH available via JSON
"""
open_with = settings.OPEN_WITH
viewers = []
for ow in open_with:
if len(ow) < 2:
continue
viewer = {}
viewer["id"] = ow[0]
try:
viewer["url"] = reverse(ow[1])
except NoReverseMatch:
viewer["url"] = ow[1]
# try non-essential parameters...
# NB: Need supported_objects OR script_url to enable plugin
try:
if len(ow) > 2:
if "supported_objects" in ow[2]:
viewer["supported_objects"] = ow[2]["supported_objects"]
if "target" in ow[2]:
viewer["target"] = ow[2]["target"]
if "script_url" in ow[2]:
# If we have an absolute url, use it...
if ow[2]["script_url"].startswith("http"):
viewer["script_url"] = ow[2]["script_url"]
else:
# ...otherwise, assume within static
viewer["script_url"] = static(ow[2]["script_url"])
if "label" in ow[2]:
viewer["label"] = ow[2]["label"]
except Exception:
# ignore invalid params
pass
viewers.append(viewer)
return {"open_with_options": viewers}
def searchOptFromRequest(request):
"""
Returns a dict of options for searching, based on
parameters in the http request
Request keys include:
- ctx: (http request) 'imgs' to search only images
- text: (http request) the actual text phrase
- start: starting index (0 based) for result
- limit: nr of results to retuen (0 == unlimited)
- author:
- grabData:
- parents:
@param request: http request
@return: Dict of options
"""
try:
r = request.GET
opts = {
"search": unicode(r.get("text", "")).encode("utf8"),
"ctx": r.get("ctx", ""),
"grabData": not not r.get("grabData", False),
"parents": not not bool(r.get("parents", False)),
"start": int(r.get("start", 0)),
"limit": int(r.get("limit", 0)),
"key": r.get("key", None),
}
author = r.get("author", "")
if author:
opts["search"] += " author:" + author
return opts
except Exception:
logger.error(traceback.format_exc())
return {}
@TimeIt(logging.INFO)
@login_required()
@jsonp
def search_json(request, conn=None, **kwargs):
"""
Search for objects in blitz.
Returns json encoded list of marshalled objects found by the search query
Request keys include:
- text: The text to search for
- ctx: (http request) 'imgs' to search only images
- text: (http request) the actual text phrase
- start: starting index (0 based) for result
- limit: nr of results to retuen (0 == unlimited)
- author:
- grabData:
- parents:
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@return: json search results
TODO: cache
"""
server_id = request.session["connector"].server_id
opts = searchOptFromRequest(request)
rv = []
logger.debug("searchObjects(%s)" % (opts["search"]))
# search returns blitz_connector wrapper objects
def urlprefix(iid):
return reverse("webgateway_render_thumbnail", args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
try:
if opts["ctx"] == "imgs":
sr = conn.searchObjects(["image"], opts["search"], conn.SERVICE_OPTS)
else:
# searches P/D/I
sr = conn.searchObjects(None, opts["search"], conn.SERVICE_OPTS)
except ApiUsageException:
return HttpJavascriptResponseServerError('"parse exception"')
def marshal():
rv = []
if opts["grabData"] and opts["ctx"] == "imgs":
bottom = min(opts["start"], len(sr) - 1)
if opts["limit"] == 0:
top = len(sr)
else:
top = min(len(sr), bottom + opts["limit"])
for i in range(bottom, top):
e = sr[i]
# for e in sr:
try:
rv.append(
imageData_json(
request,
server_id,
iid=e.id,
key=opts["key"],
conn=conn,
_internal=True,
)
)
except AttributeError as x:
logger.debug(
"(iid %i) ignoring Attribute Error: %s" % (e.id, str(x))
)
pass
except omero.ServerError as x:
logger.debug("(iid %i) ignoring Server Error: %s" % (e.id, str(x)))
return rv
else:
return [x.simpleMarshal(xtra=xtra, parents=opts["parents"]) for x in sr]
rv = timeit(marshal)()
logger.debug(rv)
return rv
@require_POST
@login_required()
def save_image_rdef_json(request, iid, conn=None, **kwargs):
"""
Requests that the rendering defs passed in the request be set as the
default for this image.
Rendering defs in request listed at L{getImgDetailsFromReq}
TODO: jsonp
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@return: http response 'true' or 'false'
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(
request, iid, server_id=server_id, conn=conn, saveDefs=True
)
if pi is None:
json_data = "false"
else:
user_id = pi[0]._conn.getEventContext().userId
webgateway_cache.invalidateObject(server_id, user_id, pi[0])
pi[0].getThumbnail()
json_data = "true"
if request.GET.get("callback", None):
json_data = "%s(%s)" % (request.GET["callback"], json_data)
return HttpJavascriptResponse(json_data)
@login_required()
@jsonp
def listLuts_json(request, conn=None, **kwargs):
"""
Lists lookup tables 'LUTs' availble for rendering
This list is dynamic and will change if users add LUTs to their server.
We include 'png_index' which is the index of each LUT within the
static/webgateway/img/luts_10.png or -1 if LUT is not found.
"""
scriptService = conn.getScriptService()
luts = scriptService.getScriptsByMimetype("text/x-lut")
rv = []
for lut in luts:
lutsrc = lut.path.val + lut.name.val
png_index = LUTS_IN_PNG.index(lutsrc) if lutsrc in LUTS_IN_PNG else -1
rv.append(
{
"id": lut.id.val,
"path": lut.path.val,
"name": lut.name.val,
"size": unwrap(lut.size),
"png_index": png_index,
}
)
rv.sort(key=lambda x: x["name"].lower())
return {"luts": rv, "png_luts": LUTS_IN_PNG}
@login_required()
def list_compatible_imgs_json(request, iid, conn=None, **kwargs):
"""
Lists the images on the same project that would be viable targets for
copying rendering settings.
TODO: change method to:
list_compatible_imgs_json (request, iid, server_id=None, conn=None,
**kwargs):
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@return: json list of image IDs
"""
json_data = "false"
r = request.GET
if conn is None:
img = None
else:
img = conn.getObject("Image", iid)
if img is not None:
# List all images in project
imgs = []
for ds in img.getProject().listChildren():
imgs.extend(ds.listChildren())
# Filter the ones that would pass the applySettingsToImages call
img_ptype = img.getPrimaryPixels().getPixelsType().getValue()
img_ccount = img.getSizeC()
img_ew = [x.getLabel() for x in img.getChannels()]
img_ew.sort()
def compat(i):
if long(i.getId()) == long(iid):
return False
pp = i.getPrimaryPixels()
if (
pp is None
or i.getPrimaryPixels().getPixelsType().getValue() != img_ptype
or i.getSizeC() != img_ccount
):
return False
ew = [x.getLabel() for x in i.getChannels()]
ew.sort()
if ew != img_ew:
return False
return True
imgs = filter(compat, imgs)
json_data = json.dumps([x.getId() for x in imgs])
if r.get("callback", None):
json_data = "%s(%s)" % (r["callback"], json_data)
return HttpJavascriptResponse(json_data)
@require_POST
@login_required()
@jsonp
def reset_rdef_json(request, toOwners=False, conn=None, **kwargs):
"""
Simply takes request 'to_type' and 'toids' and
delegates to Rendering Settings service to reset
settings accordings.
@param toOwners: if True, default to the owner's settings.
"""
r = request.POST
toids = r.getlist("toids")
to_type = str(r.get("to_type", "image"))
to_type = to_type.title()
if to_type == "Acquisition":
to_type = "PlateAcquisition"
if len(toids) == 0:
raise Http404(
"Need to specify objects in request, E.g."
" ?totype=dataset&toids=1&toids=2"
)
toids = [int(id) for id in toids]
rss = conn.getRenderingSettingsService()
# get the first object and set the group to match
conn.SERVICE_OPTS.setOmeroGroup("-1")
o = conn.getObject(to_type, toids[0])
if o is not None:
gid = o.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
if toOwners:
rv = rss.resetDefaultsByOwnerInSet(to_type, toids, conn.SERVICE_OPTS)
else:
rv = rss.resetDefaultsInSet(to_type, toids, conn.SERVICE_OPTS)
return rv
@login_required()
@jsonp
def copy_image_rdef_json(request, conn=None, **kwargs):
"""
If 'fromid' is in request, copy the image ID to session,
for applying later using this same method.
If list of 'toids' is in request, paste the image ID from the session
to the specified images.
If 'fromid' AND 'toids' are in the reqest, we simply
apply settings and don't save anything to request.
If 'to_type' is in request, this can be 'dataset', 'plate', 'acquisition'
Returns json dict of Boolean:[Image-IDs] for images that have successfully
had the rendering settings applied, or not.
@param request: http request
@param server_id:
@param conn: L{omero.gateway.BlitzGateway}
@return: json dict of Boolean:[Image-IDs]
"""
server_id = request.session["connector"].server_id
json_data = False
fromid = request.GET.get("fromid", None)
toids = request.POST.getlist("toids")
to_type = str(request.POST.get("to_type", "image"))
rdef = None
if to_type not in ("dataset", "plate", "acquisition"):
to_type = "Image" # default is image
# Only 'fromid' is given, simply save to session
if fromid is not None and len(toids) == 0:
request.session.modified = True
request.session["fromid"] = fromid
if request.session.get("rdef") is not None:
del request.session["rdef"]
return True
# If we've got an rdef encoded in request instead of ImageId...
r = request.GET or request.POST
if r.get("c") is not None:
# make a map of settings we need
rdef = {"c": str(r.get("c"))} # channels
if r.get("maps"):
try:
rdef["maps"] = json.loads(r.get("maps"))
except Exception:
pass
if r.get("pixel_range"):
rdef["pixel_range"] = str(r.get("pixel_range"))
if r.get("m"):
rdef["m"] = str(r.get("m")) # model (grey)
if r.get("z"):
rdef["z"] = str(r.get("z")) # z & t pos
if r.get("t"):
rdef["t"] = str(r.get("t"))
imageId = request.GET.get("imageId", request.POST.get("imageId", None))
if imageId:
rdef["imageId"] = int(imageId)
if request.method == "GET":
request.session.modified = True
request.session["rdef"] = rdef
# remove any previous rdef we may have via 'fromId'
if request.session.get("fromid") is not None:
del request.session["fromid"]
return True
# Check session for 'fromid'
if fromid is None:
fromid = request.session.get("fromid", None)
# maybe these pair of methods should be on ImageWrapper??
def getRenderingSettings(image):
rv = {}
chs = []
maps = []
for i, ch in enumerate(image.getChannels()):
act = "" if ch.isActive() else "-"
start = ch.getWindowStart()
end = ch.getWindowEnd()
color = ch.getLut()
maps.append({"inverted": {"enabled": ch.isInverted()}})
if not color or len(color) == 0:
color = ch.getColor().getHtml()
chs.append("%s%s|%s:%s$%s" % (act, i + 1, start, end, color))
rv["c"] = ",".join(chs)
rv["maps"] = maps
rv["m"] = "g" if image.isGreyscaleRenderingModel() else "c"
rv["z"] = image.getDefaultZ() + 1
rv["t"] = image.getDefaultT() + 1
return rv
def applyRenderingSettings(image, rdef):
invert_flags = _get_maps_enabled(rdef, "inverted", image.getSizeC())
channels, windows, colors = _split_channel_info(rdef["c"])
# also prepares _re
image.setActiveChannels(channels, windows, colors, invert_flags)
if rdef["m"] == "g":
image.setGreyscaleRenderingModel()
else:
image.setColorRenderingModel()
if "z" in rdef:
image._re.setDefaultZ(long(rdef["z"]) - 1)
if "t" in rdef:
image._re.setDefaultT(long(rdef["t"]) - 1)
image.saveDefaults()
# Use rdef from above or previously saved one...
if rdef is None:
rdef = request.session.get("rdef")
if request.method == "POST":
originalSettings = None
fromImage = None
if fromid is None:
# if we have rdef, save to source image, then use that image as
# 'fromId', then revert.
if rdef is not None and len(toids) > 0:
fromImage = conn.getObject("Image", rdef["imageId"])
if fromImage is not None:
# copy orig settings
originalSettings = getRenderingSettings(fromImage)
applyRenderingSettings(fromImage, rdef)
fromid = fromImage.getId()
# If we have both, apply settings...
try:
fromid = long(fromid)
toids = [long(x) for x in toids]
except TypeError:
fromid = None
except ValueError:
fromid = None
if fromid is not None and len(toids) > 0:
fromimg = conn.getObject("Image", fromid)
userid = fromimg.getOwner().getId()
json_data = conn.applySettingsToSet(fromid, to_type, toids)
if json_data and True in json_data:
for iid in json_data[True]:
img = conn.getObject("Image", iid)
img is not None and webgateway_cache.invalidateObject(
server_id, userid, img
)
# finally - if we temporarily saved rdef to original image, revert
# if we're sure that from-image is not in the target set (Dataset etc)
if to_type == "Image" and fromid not in toids:
if originalSettings is not None and fromImage is not None:
applyRenderingSettings(fromImage, originalSettings)
return json_data
else:
return HttpResponseNotAllowed(["POST"])
@login_required()
@jsonp
def get_image_rdef_json(request, conn=None, **kwargs):
"""
Gets any 'rdef' dict from the request.session and
returns it as json
"""
rdef = request.session.get("rdef")
image = None
if rdef is None:
fromid = request.session.get("fromid", None)
if fromid is not None:
# We only have an Image to copy rdefs from
image = conn.getObject("Image", fromid)
if image is not None:
rv = imageMarshal(image, request=request)
chs = []
maps = []
for i, ch in enumerate(rv["channels"]):
act = ch["active"] and str(i + 1) or "-%s" % (i + 1)
color = ch.get("lut") or ch["color"]
chs.append(
"%s|%s:%s$%s"
% (act, ch["window"]["start"], ch["window"]["end"], color)
)
maps.append(
{
"inverted": {"enabled": ch["inverted"]},
"quantization": {
"coefficient": ch["coefficient"],
"family": ch["family"],
},
}
)
rdef = {
"c": (",".join(chs)),
"m": rv["rdefs"]["model"],
"pixel_range": "%s:%s" % (rv["pixel_range"][0], rv["pixel_range"][1]),
"maps": maps,
}
return {"rdef": rdef}
@login_required()
def full_viewer(request, iid, conn=None, **kwargs):
"""
This view is responsible for showing the omero_image template
Image rendering options in request are used in the display page. See
L{getImgDetailsFromReq}.
@param request: http request.
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: Can be used to specify the html 'template' for
rendering
@return: html page of image and metadata
"""
server_id = request.session["connector"].server_id
server_name = Server.get(server_id).server
rid = getImgDetailsFromReq(request)
server_settings = request.session.get("server_settings", {}).get("viewer", {})
interpolate = server_settings.get("interpolate_pixels", True)
roiLimit = server_settings.get("roi_limit", 2000)
try:
image = conn.getObject("Image", iid)
if image is None:
logger.debug("(a)Image %s not found..." % (str(iid)))
raise Http404
opengraph = None
twitter = None
image_preview = None
page_url = None
if hasattr(settings, "SHARING_OPENGRAPH"):
opengraph = settings.SHARING_OPENGRAPH.get(server_name)
logger.debug("Open Graph enabled: %s", opengraph)
if hasattr(settings, "SHARING_TWITTER"):
twitter = settings.SHARING_TWITTER.get(server_name)
logger.debug("Twitter enabled: %s", twitter)
if opengraph or twitter:
urlargs = {"iid": iid}
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
image_preview = request.build_absolute_uri(reverse(prefix, kwargs=urlargs))
page_url = request.build_absolute_uri(
reverse("webgateway_full_viewer", kwargs=urlargs)
)
d = {
"blitzcon": conn,
"image": image,
"opts": rid,
"interpolate": interpolate,
"build_year": build_year,
"roiLimit": roiLimit,
"roiCount": image.getROICount(),
"viewport_server": kwargs.get(
# remove any trailing slash
"viewport_server",
reverse("webgateway"),
).rstrip("/"),
"opengraph": opengraph,
"twitter": twitter,
"image_preview": image_preview,
"page_url": page_url,
"object": "image:%i" % int(iid),
}
template = kwargs.get("template", "webgateway/viewport/omero_image.html")
rsp = render(request, template, d)
except omero.SecurityViolation:
logger.warn("SecurityViolation in Image:%s", iid)
logger.warn(traceback.format_exc())
raise Http404
return HttpResponse(rsp)
@login_required()
def download_as(request, iid=None, conn=None, **kwargs):
"""
Downloads the image as a single jpeg/png/tiff or as a zip (if more than
one image)
"""
format = request.GET.get("format", "png")
if format not in ("jpeg", "png", "tif"):
format = "png"
imgIds = []
wellIds = []
if iid is None:
imgIds = request.GET.getlist("image")
if len(imgIds) == 0:
wellIds = request.GET.getlist("well")
if len(wellIds) == 0:
return HttpResponseServerError(
"No images or wells specified in request."
" Use ?image=123 or ?well=123"
)
else:
imgIds = [iid]
images = []
if imgIds:
images = list(conn.getObjects("Image", imgIds))
elif wellIds:
try:
index = int(request.GET.get("index", 0))
except ValueError:
index = 0
for w in conn.getObjects("Well", wellIds):
images.append(w.getWellSample(index).image())
if len(images) == 0:
msg = "Cannot download as %s. Images (ids: %s) not found." % (format, imgIds)
logger.debug(msg)
return HttpResponseServerError(msg)
if len(images) == 1:
jpeg_data = images[0].renderJpeg()
if jpeg_data is None:
raise Http404
rsp = HttpResponse(jpeg_data, mimetype="image/jpeg")
rsp["Content-Length"] = len(jpeg_data)
rsp["Content-Disposition"] = "attachment; filename=%s.jpg" % (
images[0].getName().replace(" ", "_")
)
else:
temp = tempfile.NamedTemporaryFile(suffix=".download_as")
def makeImageName(originalName, extension, folder_name):
name = os.path.basename(originalName)
imgName = "%s.%s" % (name, extension)
imgName = os.path.join(folder_name, imgName)
# check we don't overwrite existing file
i = 1
name = imgName[: -(len(extension) + 1)]
while os.path.exists(imgName):
imgName = "%s_(%d).%s" % (name, i, extension)
i += 1
return imgName
try:
temp_zip_dir = tempfile.mkdtemp()
logger.debug("download_as dir: %s" % temp_zip_dir)
try:
for img in images:
z = t = None
try:
pilImg = img.renderImage(z, t)
imgPathName = makeImageName(img.getName(), format, temp_zip_dir)
pilImg.save(imgPathName)
finally:
# Close RenderingEngine
img._re.close()
# create zip
zip_file = zipfile.ZipFile(temp, "w", zipfile.ZIP_DEFLATED)
try:
a_files = os.path.join(temp_zip_dir, "*")
for name in glob.glob(a_files):
zip_file.write(name, os.path.basename(name))
finally:
zip_file.close()
finally:
shutil.rmtree(temp_zip_dir, ignore_errors=True)
zipName = request.GET.get("zipname", "Download_as_%s" % format)
zipName = zipName.replace(" ", "_")
if not zipName.endswith(".zip"):
zipName = "%s.zip" % zipName
# return the zip or single file
rsp = StreamingHttpResponse(FileWrapper(temp))
rsp["Content-Length"] = temp.tell()
rsp["Content-Disposition"] = "attachment; filename=%s" % zipName
temp.seek(0)
except Exception:
temp.close()
stack = traceback.format_exc()
logger.error(stack)
return HttpResponseServerError("Cannot download file (id:%s)" % iid)
rsp["Content-Type"] = "application/force-download"
return rsp
@login_required(doConnectionCleanup=False)
def archived_files(request, iid=None, conn=None, **kwargs):
"""
Downloads the archived file(s) as a single file or as a zip (if more than
one file)
"""
imgIds = []
wellIds = []
imgIds = request.GET.getlist("image")
wellIds = request.GET.getlist("well")
if iid is None:
if len(imgIds) == 0 and len(wellIds) == 0:
return HttpResponseServerError(
"No images or wells specified in request."
" Use ?image=123 or ?well=123"
)
else:
imgIds = [iid]
images = list()
wells = list()
if imgIds:
images = list(conn.getObjects("Image", imgIds))
elif wellIds:
try:
index = int(request.GET.get("index", 0))
except ValueError:
index = 0
wells = conn.getObjects("Well", wellIds)
for w in wells:
images.append(w.getWellSample(index).image())
if len(images) == 0:
message = (
"Cannot download archived file because Images not "
"found (ids: %s)" % (imgIds)
)
logger.debug(message)
return HttpResponseServerError(message)
# Test permissions on images and weels
for ob in wells:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
for ob in images:
well = None
try:
well = ob.getParent().getParent()
except Exception:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
else:
if well and isinstance(well, omero.gateway.WellWrapper):
if hasattr(well, "canDownload"):
if not well.canDownload():
return HttpResponseNotFound()
# make list of all files, removing duplicates
fileMap = {}
for image in images:
for f in image.getImportedImageFiles():
fileMap[f.getId()] = f
files = list(fileMap.values())
if len(files) == 0:
message = (
"Tried downloading archived files from image with no" " files archived."
)
logger.debug(message)
return HttpResponseServerError(message)
if len(files) == 1:
orig_file = files[0]
rsp = ConnCleaningHttpResponse(
orig_file.getFileInChunks(buf=settings.CHUNK_SIZE)
)
rsp.conn = conn
rsp["Content-Length"] = orig_file.getSize()
# ',' in name causes duplicate headers
fname = orig_file.getName().replace(" ", "_").replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % (fname)
else:
total_size = sum(f.size for f in files)
if total_size > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
message = (
"Total size of files %d is larger than %d. "
"Try requesting fewer files."
% (total_size, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE)
)
logger.warn(message)
return HttpResponseForbidden(message)
temp = tempfile.NamedTemporaryFile(suffix=".archive")
zipName = request.GET.get("zipname", image.getName())
try:
zipName = zip_archived_files(images, temp, zipName, buf=settings.CHUNK_SIZE)
# return the zip or single file
archivedFile_data = FileWrapper(temp)
rsp = ConnCleaningHttpResponse(archivedFile_data)
rsp.conn = conn
rsp["Content-Length"] = temp.tell()
rsp["Content-Disposition"] = "attachment; filename=%s" % zipName
temp.seek(0)
except Exception:
temp.close()
message = "Cannot download file (id:%s)" % (iid)
logger.error(message, exc_info=True)
return HttpResponseServerError(message)
rsp["Content-Type"] = "application/force-download"
return rsp
@login_required()
@jsonp
def original_file_paths(request, iid, conn=None, **kwargs):
"""
Get a list of path/name strings for original files associated with the
image
"""
image = conn.getObject("Image", iid)
if image is None:
raise Http404
paths = image.getImportedImageFilePaths()
return {"repo": paths["server_paths"], "client": paths["client_paths"]}
@login_required()
@jsonp
def get_shape_json(request, roiId, shapeId, conn=None, **kwargs):
roiId = int(roiId)
shapeId = int(shapeId)
shape = conn.getQueryService().findByQuery(
"select shape from Roi as roi "
"join roi.shapes as shape "
"where roi.id = %d and shape.id = %d" % (roiId, shapeId),
None,
)
logger.debug("Shape: %r" % shape)
if shape is None:
logger.debug("No such shape: %r" % shapeId)
raise Http404
return JsonResponse(shapeMarshal(shape))
@login_required()
@jsonp
def get_rois_json(request, imageId, conn=None, **kwargs):
"""
Returns json data of the ROIs in the specified image.
"""
rois = []
roiService = conn.getRoiService()
# rois = webfigure_utils.getRoiShapes(roiService, long(imageId)) # gets a
# whole json list of ROIs
result = roiService.findByImage(long(imageId), None, conn.SERVICE_OPTS)
for r in result.rois:
roi = {}
roi["id"] = r.getId().getValue()
# go through all the shapes of the ROI
shapes = []
for s in r.copyShapes():
if s is None: # seems possible in some situations
continue
shapes.append(shapeMarshal(s))
# sort shapes by Z, then T.
shapes.sort(key=lambda x: "%03d%03d" % (x.get("theZ", -1), x.get("theT", -1)))
roi["shapes"] = shapes
rois.append(roi)
# sort by ID - same as in measurement tool.
rois.sort(key=lambda x: x["id"])
return rois
@login_required()
def histogram_json(request, iid, theC, conn=None, **kwargs):
"""
Returns a histogram for a single channel as a list of
256 values as json
"""
image = conn.getObject("Image", iid)
if image is None:
raise Http404
maxW, maxH = conn.getMaxPlaneSize()
sizeX = image.getSizeX()
sizeY = image.getSizeY()
if (sizeX * sizeY) > (maxW * maxH):
msg = "Histogram not supported for 'big' images (over %s * %s pixels)" % (
maxW,
maxH,
)
return JsonResponse({"error": msg})
theZ = int(request.GET.get("theZ", 0))
theT = int(request.GET.get("theT", 0))
theC = int(theC)
binCount = int(request.GET.get("bins", 256))
# TODO: handle projection when supported by OMERO
data = image.getHistogram([theC], binCount, theZ=theZ, theT=theT)
histogram = data[theC]
return JsonResponse({"data": histogram})
@login_required(isAdmin=True)
@jsonp
def su(request, user, conn=None, **kwargs):
"""
If current user is admin, switch the session to a new connection owned by
'user' (puts the new session ID in the request.session)
Return False if not possible
@param request: http request.
@param user: Username of new connection owner
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: Can be used to specify the html 'template' for
rendering
@return: Boolean
"""
if request.method == "POST":
conn.setGroupNameForSession("system")
connector = request.session["connector"]
connector = Connector(connector.server_id, connector.is_secure)
session = conn.getSessionService().getSession(conn._sessionUuid)
ttl = session.getTimeToIdle().val
connector.omero_session_key = conn.suConn(user, ttl=ttl)._sessionUuid
request.session["connector"] = connector
conn.revertGroupForSession()
conn.close()
return True
else:
context = {
"url": reverse("webgateway_su", args=[user]),
"submit": "Do you want to su to %s" % user,
}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
def _annotations(request, objtype, objid, conn=None, **kwargs):
warnings.warn("Deprecated. Use _bulk_file_annotations()", DeprecationWarning)
return _bulk_file_annotations(request, objtype, objid, conn, **kwargs)
def _bulk_file_annotations(request, objtype, objid, conn=None, **kwargs):
"""
Retrieve Bulk FileAnnotations for object specified by object type and
identifier optionally traversing object model graph.
Returns dictionary containing annotations in NSBULKANNOTATIONS namespace
if successful, otherwise returns error information.
If the graph has multiple parents, we return annotations from all parents.
Example: /annotations/Plate/1/
retrieves annotations for plate with identifier 1
Example: /annotations/Plate.wells/1/
retrieves annotations for plate that contains well with
identifier 1
Example: /annotations/Screen.plateLinks.child.wells/22/
retrieves annotations for screen that contains plate with
well with identifier 22
@param request: http request.
@param objtype: Type of target object, or type of target object
followed by a slash-separated list of properties to
resolve
@param objid: Identifier of target object, or identifier of object
reached by resolving given properties
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'error' with an error message or
with key 'data' containing an array of dictionaries
with keys 'id' and 'file' of the retrieved annotations
"""
q = conn.getQueryService()
# If more than one objtype is specified, use all in query to
# traverse object model graph
# Example: /annotations/Plate/wells/1/
# retrieves annotations from Plate that contains Well 1
objtype = objtype.split(".")
params = omero.sys.ParametersI()
params.addId(objid)
params.addString("ns", NSBULKANNOTATIONS)
params.addString("mt", "OMERO.tables")
query = "select obj0 from %s obj0\n" % objtype[0]
for i, t in enumerate(objtype[1:]):
query += "join fetch obj%d.%s obj%d\n" % (i, t, i + 1)
query += """
left outer join fetch obj0.annotationLinks links
left outer join fetch links.child as f
left outer join fetch links.parent
left outer join fetch f.file
join fetch links.details.owner
join fetch links.details.creationEvent
where obj%d.id=:id and
(f.ns=:ns or f.file.mimetype=:mt)""" % (
len(objtype) - 1
)
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
try:
objs = q.findAllByQuery(query, params, ctx)
except omero.QueryException:
return dict(error="%s cannot be queried" % objtype, query=query)
data = []
# Process all annotations from all objects...
links = [link for obj in objs for link in obj.copyAnnotationLinks()]
for link in links:
annotation = link.child
if not isinstance(annotation, omero.model.FileAnnotation):
continue
owner = annotation.details.owner
ownerName = "%s %s" % (unwrap(owner.firstName), unwrap(owner.lastName))
addedBy = link.details.owner
addedByName = "%s %s" % (unwrap(addedBy.firstName), unwrap(addedBy.lastName))
data.append(
dict(
id=annotation.id.val,
file=annotation.file.id.val,
parentType=objtype[0],
parentId=link.parent.id.val,
owner=ownerName,
addedBy=addedByName,
addedOn=unwrap(link.details.creationEvent._time),
)
)
return dict(data=data)
annotations = login_required()(jsonp(_bulk_file_annotations))
def _table_query(request, fileid, conn=None, query=None, lazy=False, **kwargs):
"""
Query a table specified by fileid
Returns a dictionary with query result if successful, error information
otherwise
@param request: http request; querystring must contain key 'query'
with query to be executed, or '*' to retrieve all rows.
If query is in the format word-number, e.g. "Well-7",
if will be run as (word==number), e.g. "(Well==7)".
This is supported to allow more readable query strings.
@param fileid: Numeric identifier of file containing the table
@param query: The table query. If None, use request.GET.get('query')
E.g. '*' to return all rows.
If in the form 'colname-1', query will be (colname==1)
@param lazy: If True, instead of returning a 'rows' list,
'lazy_rows' will be a generator.
Each gen.next() will return a list of row data
AND 'table' returned MUST be closed.
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: offset, limit
@return: A dictionary with key 'error' with an error message
or with key 'data' containing a dictionary with keys
'columns' (an array of column names) and 'rows'
(an array of rows, each an array of values)
"""
if query is None:
query = request.GET.get("query")
if not query:
return dict(error="Must specify query parameter, use * to retrieve all")
col_names = request.GET.getlist("col_names")
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
r = conn.getSharedResources()
t = r.openTable(omero.model.OriginalFileI(fileid), ctx)
if not t:
return dict(error="Table %s not found" % fileid)
try:
cols = t.getHeaders()
col_indices = range(len(cols))
if col_names:
enumerated_columns = (
[(i, j) for (i, j) in enumerate(cols) if j.name in col_names]
if col_names
else [(i, j) for (i, j) in enumerate(cols)]
)
cols = []
col_indices = []
for col_name in col_names:
for (i, j) in enumerated_columns:
if col_name == j.name:
col_indices.append(i)
cols.append(j)
break
rows = t.getNumberOfRows()
offset = kwargs.get("offset", 0)
limit = kwargs.get("limit", None)
if not offset:
offset = int(request.GET.get("offset", 0))
if not limit:
limit = (
int(request.GET.get("limit"))
if request.GET.get("limit") is not None
else None
)
range_start = offset
range_size = kwargs.get("limit", rows)
range_end = min(rows, range_start + range_size)
if query == "*":
hits = range(range_start, range_end)
totalCount = rows
else:
match = re.match(r"^(\w+)-(\d+)", query)
if match:
query = "(%s==%s)" % (match.group(1), match.group(2))
try:
logger.info(query)
hits = t.getWhereList(query, None, 0, rows, 1)
totalCount = len(hits)
# paginate the hits
hits = hits[range_start:range_end]
except Exception:
return dict(error="Error executing query: %s" % query)
def row_generator(table, h):
# hits are all consecutive rows - can load them in batches
idx = 0
batch = 1000
while idx < len(h):
batch = min(batch, len(h) - idx)
res = table.slice(col_indices, h[idx : idx + batch])
idx += batch
# yield a list of rows
yield [
[col.values[row] for col in res.columns]
for row in range(0, len(res.rowNumbers))
]
row_gen = row_generator(t, hits)
rsp_data = {
"data": {
"column_types": [col.__class__.__name__ for col in cols],
"columns": [col.name for col in cols],
},
"meta": {
"rowCount": rows,
"totalCount": totalCount,
"limit": limit,
"offset": offset,
},
}
if not lazy:
row_data = []
# Use the generator to add all rows in batches
for rows in list(row_gen):
row_data.extend(rows)
rsp_data["data"]["rows"] = row_data
else:
rsp_data["data"]["lazy_rows"] = row_gen
rsp_data["table"] = t
return rsp_data
finally:
if not lazy:
t.close()
table_query = login_required()(jsonp(_table_query))
def _table_metadata(request, fileid, conn=None, query=None, lazy=False, **kwargs):
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
r = conn.getSharedResources()
t = r.openTable(omero.model.OriginalFileI(fileid), ctx)
if not t:
return dict(error="Table %s not found" % fileid)
try:
cols = t.getHeaders()
rows = t.getNumberOfRows()
rsp_data = {
"columns": [
{
"name": col.name,
"description": col.description,
"type": col.__class__.__name__,
}
for col in cols
],
"totalCount": rows,
}
return rsp_data
finally:
if not lazy:
t.close()
table_metadata = login_required()(jsonp(_table_metadata))
@login_required()
@jsonp
def object_table_query(request, objtype, objid, conn=None, **kwargs):
"""
Query bulk annotations table attached to an object specified by
object type and identifier, optionally traversing object model graph.
Returns a dictionary with query result if successful, error information
otherwise
Example: /table/Plate/1/query/?query=*
queries bulk annotations table for plate with identifier 1
Example: /table/Plate.wells/1/query/?query=*
queries bulk annotations table for plate that contains well with
identifier 1
Example: /table/Screen.plateLinks.child.wells/22/query/?query=Well-22
queries bulk annotations table for screen that contains plate
with well with identifier 22
@param request: http request.
@param objtype: Type of target object, or type of target object
followed by a slash-separated list of properties to
resolve
@param objid: Identifier of target object, or identifier of object
reached by resolving given properties
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'error' with an error message
or with key 'data' containing a dictionary with keys
'columns' (an array of column names) and 'rows'
(an array of rows, each an array of values)
"""
a = _bulk_file_annotations(request, objtype, objid, conn, **kwargs)
if "error" in a:
return a
if len(a["data"]) < 1:
return dict(error="Could not retrieve bulk annotations table")
# multiple bulk annotations files could be attached, use the most recent
# one (= the one with the highest identifier)
fileId = 0
ann = None
annList = sorted(a["data"], key=lambda x: x["file"], reverse=True)
tableData = None
for annotation in annList:
tableData = _table_query(request, annotation["file"], conn, **kwargs)
if "error" not in tableData:
ann = annotation
fileId = annotation["file"]
break
if ann is None:
return dict(
error=tableData.get(
"error", "Could not retrieve matching bulk annotation table"
)
)
tableData["id"] = fileId
tableData["annId"] = ann["id"]
tableData["owner"] = ann["owner"]
tableData["addedBy"] = ann["addedBy"]
tableData["parentType"] = ann["parentType"]
tableData["parentId"] = ann["parentId"]
tableData["addedOn"] = ann["addedOn"]
return tableData
class LoginView(View):
"""Webgateway Login - Subclassed by WebclientLoginView."""
form_class = LoginForm
useragent = "OMERO.webapi"
@method_decorator(sensitive_post_parameters("password", "csrfmiddlewaretoken"))
def dispatch(self, *args, **kwargs):
"""Wrap other methods to add decorators."""
return super(LoginView, self).dispatch(*args, **kwargs)
def get(self, request, api_version=None):
"""Simply return a message to say GET not supported."""
return JsonResponse(
{"message": ("POST only with username, password, " "server and csrftoken")},
status=405,
)
def handle_logged_in(self, request, conn, connector):
"""Return a response for successful login."""
c = conn.getEventContext()
ctx = {}
for a in [
"sessionId",
"sessionUuid",
"userId",
"userName",
"groupId",
"groupName",
"isAdmin",
"eventId",
"eventType",
"memberOfGroups",
"leaderOfGroups",
]:
if hasattr(c, a):
ctx[a] = getattr(c, a)
return JsonResponse({"success": True, "eventContext": ctx})
def handle_not_logged_in(self, request, error=None, form=None):
"""
Return a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if error is None and form is not None:
# If no error from server, maybe form wasn't valid
formErrors = []
for field in form:
for e in field.errors:
formErrors.append("%s: %s" % (field.label, e))
error = " ".join(formErrors)
elif error is None:
# Just in case no error or invalid form is given
error = "Login failed. Reason unknown."
return JsonResponse({"message": error}, status=403)
def post(self, request, api_version=None):
"""
Here we handle the main login logic, creating a connection to OMERO.
and store that on the request.session OR handling login failures
"""
error = None
form = self.form_class(request.POST.copy())
if form.is_valid():
username = form.cleaned_data["username"]
password = form.cleaned_data["password"]
server_id = form.cleaned_data["server"]
is_secure = settings.SECURE
connector = Connector(server_id, is_secure)
# TODO: version check should be done on the low level, see #5983
compatible = True
if settings.CHECK_VERSION:
compatible = connector.check_version(self.useragent)
if (
server_id is not None
and username is not None
and password is not None
and compatible
):
conn = connector.create_connection(
self.useragent, username, password, userip=get_client_ip(request)
)
if conn is not None:
try:
request.session["connector"] = connector
# UpgradeCheck URL should be loaded from the server or
# loaded omero.web.upgrades.url allows to customize web
# only
try:
upgrades_url = settings.UPGRADES_URL
except Exception:
upgrades_url = conn.getUpgradesUrl()
upgradeCheck(url=upgrades_url)
return self.handle_logged_in(request, conn, connector)
finally:
conn.close(hard=False)
# Once here, we are not logged in...
# Need correct error message
if not connector.is_server_up(self.useragent):
error = "Server is not responding," " please contact administrator."
elif not settings.CHECK_VERSION:
error = (
"Connection not available, please check your"
" credentials and version compatibility."
)
else:
if not compatible:
error = (
"Client version does not match server,"
" please contact administrator."
)
else:
error = settings.LOGIN_INCORRECT_CREDENTIALS_TEXT
return self.handle_not_logged_in(request, error, form)
@login_required()
@jsonp
def get_image_rdefs_json(request, img_id=None, conn=None, **kwargs):
"""
Retrieves all rendering definitions for a given image (id).
Example: /get_image_rdefs_json/1
Returns all rdefs for image with id 1
@param request: http request.
@param img_id: the id of the image in question
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'rdefs' in the success case,
one with key 'error' if something went wrong
"""
try:
img = conn.getObject("Image", img_id)
if img is None:
return {"error": "No image with id " + str(img_id)}
return {"rdefs": img.getAllRenderingDefs()}
except Exception:
logger.debug(traceback.format_exc())
return {"error": "Failed to retrieve rdefs"}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# webgateway/views.py - django application view handling functions
#
# Copyright (c) 2007-2020 Glencoe Software, Inc. All rights reserved.
#
# This software is distributed under the terms described by the LICENCE file
# you can find at the root of the distribution bundle, which states you are
# free to use it only for non commercial purposes.
# If the file is missing please request a copy by contacting
# jason@glencoesoftware.com.
#
# Author: Carlos Neves <carlos(at)glencoesoftware.com>
import re
import json
import base64
import warnings
from functools import wraps
import omero
import omero.clients
from past.builtins import unicode
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
JsonResponse,
HttpResponseForbidden,
)
from django.http import (
HttpResponseRedirect,
HttpResponseNotAllowed,
Http404,
StreamingHttpResponse,
HttpResponseNotFound,
)
from django.views.decorators.http import require_POST
from django.views.decorators.debug import sensitive_post_parameters
from django.utils.decorators import method_decorator
from django.core.urlresolvers import reverse, NoReverseMatch
from django.conf import settings
from wsgiref.util import FileWrapper
from omero.rtypes import rlong, unwrap
from omero.constants.namespaces import NSBULKANNOTATIONS
from .util import points_string_to_XY_list, xy_list_to_bbox
from .plategrid import PlateGrid
from omeroweb.version import omeroweb_buildyear as build_year
from .marshal import imageMarshal, shapeMarshal, rgb_int2rgba
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.views.generic import View
from django.shortcuts import render
from omeroweb.webadmin.forms import LoginForm
from omeroweb.decorators import get_client_ip, is_public_user
from omeroweb.webadmin.webadmin_utils import upgradeCheck
try:
from hashlib import md5
except Exception:
from md5 import md5
try:
import long
except ImportError:
long = int
from io import BytesIO
import tempfile
from omero import ApiUsageException
from omero.util.decorators import timeit, TimeIt
from omeroweb.httprsp import HttpJavascriptResponse, HttpJavascriptResponseServerError
from omeroweb.connector import Server
import glob
# from models import StoredConnection
from omeroweb.webgateway.webgateway_cache import (
webgateway_cache,
CacheBase,
webgateway_tempfile,
)
import logging
import os
import traceback
import time
import zipfile
import shutil
from omeroweb.decorators import login_required, ConnCleaningHttpResponse
from omeroweb.connector import Connector
from omeroweb.webgateway.util import zip_archived_files, LUTS_IN_PNG
from omeroweb.webgateway.util import get_longs, getIntOrDefault
cache = CacheBase()
logger = logging.getLogger(__name__)
try:
from PIL import Image
from PIL import ImageDraw
except Exception: # pragma: nocover
try:
import Image
import ImageDraw
except Exception:
logger.error("No Pillow installed")
try:
import numpy
numpyInstalled = True
except ImportError:
logger.error("No numpy installed")
numpyInstalled = False
def index(request):
""" /webgateway/ index placeholder """
return HttpResponse("Welcome to webgateway")
def _safestr(s):
return unicode(s).encode("utf-8")
class UserProxy(object):
"""
Represents the current user of the connection, with methods delegating to
the connection itself.
"""
def __init__(self, blitzcon):
"""
Initialises the User proxy with the L{omero.gateway.BlitzGateway}
connection
@param blitzcon: connection
@type blitzcon: L{omero.gateway.BlitzGateway}
"""
self._blitzcon = blitzcon
self.loggedIn = False
def logIn(self):
""" Sets the loggedIn Flag to True """
self.loggedIn = True
def isAdmin(self):
"""
True if the current user is an admin
@return: True if the current user is an admin
@rtype: Boolean
"""
return self._blitzcon.isAdmin()
def canBeAdmin(self):
"""
True if the current user can be admin
@return: True if the current user can be admin
@rtype: Boolean
"""
return self._blitzcon.canBeAdmin()
def getId(self):
"""
Returns the ID of the current user
@return: User ID
@rtype: Long
"""
return self._blitzcon.getUserId()
def getName(self):
"""
Returns the Name of the current user
@return: User Name
@rtype: String
"""
return self._blitzcon.getUser().omeName
def getFirstName(self):
"""
Returns the first name of the current user
@return: First Name
@rtype: String
"""
return self._blitzcon.getUser().firstName or self.getName()
# def getPreferences (self):
# return self._blitzcon._user.getPreferences()
#
# def getUserObj (self):
# return self._blitzcon._user
#
# class SessionCB (object):
# def _log (self, what, c):
# logger.debug('CONN:%s %s:%d:%s' % (what, c._user, os.getpid(),
# c._sessionUuid))
#
# def create (self, c):
# self._log('create',c)
#
# def join (self, c):
# self._log('join',c)
#
# def close (self, c):
# self._log('close',c)
# _session_cb = SessionCB()
def _split_channel_info(rchannels):
"""
Splits the request query channel information for images into a sequence of
channels, window ranges and channel colors.
@param rchannels: The request string with channel info. E.g
1|100:505$0000FF,-2,3|620:3879$FF0000
@type rchannels: String
@return: E.g. [1, -2, 3] [[100.0, 505.0], (None, None), [620.0,
3879.0]] [u'0000FF', None, u'FF0000']
@rtype: tuple of 3 lists
"""
channels = []
windows = []
colors = []
for chan in rchannels.split(","):
# chan 1|12:1386r$0000FF
chan = chan.split("|", 1)
# chan ['1', '12:1386r$0000FF']
t = chan[0].strip()
# t = '1'
color = None
# Not normally used...
if t.find("$") >= 0:
t, color = t.split("$")
try:
channels.append(int(t))
ch_window = (None, None)
if len(chan) > 1:
t = chan[1].strip()
# t = '12:1386r$0000FF'
if t.find("$") >= 0:
t, color = t.split("$", 1)
# color = '0000FF'
# t = 12:1386
t = t.split(":")
if len(t) == 2:
try:
ch_window = [float(x) for x in t]
except ValueError:
pass
windows.append(ch_window)
colors.append(color)
except ValueError:
pass
logger.debug(str(channels) + "," + str(windows) + "," + str(colors))
return channels, windows, colors
def getImgDetailsFromReq(request, as_string=False):
"""
Break the GET information from the request object into details on how
to render the image.
The following keys are recognized:
z - Z axis position
t - T axis position
q - Quality set (0,0..1,0)
m - Model (g for greyscale, c for color)
p - Projection (see blitz_gateway.ImageWrapper.PROJECTIONS for keys)
x - X position (for now based on top/left offset on the browser window)
y - Y position (same as above)
c - a comma separated list of channels to be rendered (start index 1)
- format for each entry [-]ID[|wndst:wndend][#HEXCOLOR][,...]
zm - the zoom setting (as a percentual value)
@param request: http request with keys above
@param as_string: If True, return a string representation of the
rendering details
@return: A dict or String representation of rendering details
above.
@rtype: Dict or String
"""
r = request.GET
rv = {}
for k in ("z", "t", "q", "m", "zm", "x", "y", "p"):
if k in r:
rv[k] = r[k]
if "c" in r:
rv["c"] = []
ci = _split_channel_info(r["c"])
logger.debug(ci)
for i in range(len(ci[0])):
# a = abs channel, i = channel, s = window start, e = window end,
# c = color
rv["c"].append(
{
"a": abs(ci[0][i]),
"i": ci[0][i],
"s": ci[1][i][0],
"e": ci[1][i][1],
"c": ci[2][i],
}
)
if as_string:
return "&".join(["%s=%s" % (x[0], x[1]) for x in rv.items()])
return rv
@login_required()
def render_birds_eye_view(request, iid, size=None, conn=None, **kwargs):
"""
Returns an HttpResponse wrapped jpeg with the rendered bird's eye view
for image 'iid'. We now use a thumbnail for performance. #10626
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@param size: Maximum size of the longest side of the resulting
bird's eye view.
@return: http response containing jpeg
"""
return render_thumbnail(request, iid, w=size, **kwargs)
def _render_thumbnail(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns a jpeg with the rendered thumbnail for image 'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing jpeg
"""
server_id = request.session["connector"].server_id
server_settings = request.session.get("server_settings", {}).get("browser", {})
defaultSize = server_settings.get("thumb_default_size", 96)
direct = True
if w is None:
size = (defaultSize,)
else:
if h is None:
size = (int(w),)
else:
size = (int(w), int(h))
if size == (defaultSize,):
direct = False
user_id = conn.getUserId()
z = getIntOrDefault(request, "z", None)
t = getIntOrDefault(request, "t", None)
rdefId = getIntOrDefault(request, "rdefId", None)
# TODO - cache handles rdefId
jpeg_data = webgateway_cache.getThumb(request, server_id, user_id, iid, size)
if jpeg_data is None:
prevent_cache = False
img = conn.getObject("Image", iid)
if img is None:
logger.debug("(b)Image %s not found..." % (str(iid)))
if _defcb:
jpeg_data = _defcb(size=size)
prevent_cache = True
else:
raise Http404("Failed to render thumbnail")
else:
jpeg_data = img.getThumbnail(
size=size, direct=direct, rdefId=rdefId, z=z, t=t
)
if jpeg_data is None:
logger.debug("(c)Image %s not found..." % (str(iid)))
if _defcb:
jpeg_data = _defcb(size=size)
prevent_cache = True
else:
raise Http404("Failed to render thumbnail")
else:
prevent_cache = img._thumbInProgress
if not prevent_cache:
webgateway_cache.setThumb(request, server_id, user_id, iid, jpeg_data, size)
else:
pass
return jpeg_data
@login_required()
def render_thumbnail(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns an HttpResponse wrapped jpeg with the rendered thumbnail for image
'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing jpeg
"""
jpeg_data = _render_thumbnail(
request=request, iid=iid, w=w, h=h, conn=conn, _defcb=_defcb, **kwargs
)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
@login_required()
def render_roi_thumbnail(request, roiId, w=None, h=None, conn=None, **kwargs):
"""
For the given ROI, choose the shape to render (first time-point, mid
z-section) then render a region around that shape, scale to width and
height (or default size) and draw the shape on to the region
"""
server_id = request.session["connector"].server_id
# need to find the z indices of the first shape in T
result = conn.getRoiService().findByRoi(long(roiId), None, conn.SERVICE_OPTS)
if result is None or result.rois is None or len(result.rois) == 0:
raise Http404
for roi in result.rois:
imageId = roi.image.id.val
shapes = roi.copyShapes()
shapes = [s for s in shapes if s is not None]
if len(shapes) == 0:
raise Http404("No Shapes found for ROI %s" % roiId)
pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)
if pi is None:
raise Http404
image, compress_quality = pi
shape = None
# if only single shape, use it...
if len(shapes) == 1:
shape = shapes[0]
else:
default_t = image.getDefaultT()
default_z = image.getDefaultZ()
# find shapes on default Z/T plane
def_shapes = [
s
for s in shapes
if unwrap(s.getTheT()) is None or unwrap(s.getTheT()) == default_t
]
if len(def_shapes) == 1:
shape = def_shapes[0]
else:
def_shapes = [
s
for s in def_shapes
if unwrap(s.getTheZ()) is None or unwrap(s.getTheZ()) == default_z
]
if len(def_shapes) > 0:
shape = def_shapes[0]
# otherwise pick first shape
if shape is None and len(shapes) > 0:
shape = shapes[0]
return get_shape_thumbnail(request, conn, image, shape, compress_quality)
@login_required()
def render_shape_thumbnail(request, shapeId, w=None, h=None, conn=None, **kwargs):
"""
For the given Shape, redner a region around that shape, scale to width and
height (or default size) and draw the shape on to the region.
"""
server_id = request.session["connector"].server_id
# need to find the z indices of the first shape in T
params = omero.sys.Parameters()
params.map = {"id": rlong(shapeId)}
shape = conn.getQueryService().findByQuery(
"select s from Shape s join fetch s.roi where s.id = :id",
params,
conn.SERVICE_OPTS,
)
if shape is None:
raise Http404
imageId = shape.roi.image.id.val
pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)
if pi is None:
raise Http404
image, compress_quality = pi
return get_shape_thumbnail(request, conn, image, shape, compress_quality)
def get_shape_thumbnail(request, conn, image, s, compress_quality):
"""
Render a region around the specified Shape, scale to width and height (or
default size) and draw the shape on to the region. Returns jpeg data.
@param image: ImageWrapper
@param s: omero.model.Shape
"""
MAX_WIDTH = 250
color = request.GET.get("color", "fff")
colours = {
"f00": (255, 0, 0),
"0f0": (0, 255, 0),
"00f": (0, 0, 255),
"ff0": (255, 255, 0),
"fff": (255, 255, 255),
"000": (0, 0, 0),
}
lineColour = colours["f00"]
if color in colours:
lineColour = colours[color]
# used for padding if we go outside the image area
bg_color = (221, 221, 221)
bBox = None # bounding box: (x, y, w, h)
shape = {}
theT = unwrap(s.getTheT())
theT = theT if theT is not None else image.getDefaultT()
theZ = unwrap(s.getTheZ())
theZ = theZ if theZ is not None else image.getDefaultZ()
if type(s) == omero.model.RectangleI:
shape["type"] = "Rectangle"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
shape["width"] = s.getWidth().getValue()
shape["height"] = s.getHeight().getValue()
bBox = (shape["x"], shape["y"], shape["width"], shape["height"])
elif type(s) == omero.model.MaskI:
shape["type"] = "Mask"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
shape["width"] = s.getWidth().getValue()
shape["height"] = s.getHeight().getValue()
bBox = (shape["x"], shape["y"], shape["width"], shape["height"])
# TODO: support for mask
elif type(s) == omero.model.EllipseI:
shape["type"] = "Ellipse"
shape["x"] = int(s.getX().getValue())
shape["y"] = int(s.getY().getValue())
shape["radiusX"] = int(s.getRadiusX().getValue())
shape["radiusY"] = int(s.getRadiusY().getValue())
bBox = (
shape["x"] - shape["radiusX"],
shape["y"] - shape["radiusY"],
2 * shape["radiusX"],
2 * shape["radiusY"],
)
elif type(s) == omero.model.PolylineI:
shape["type"] = "PolyLine"
shape["xyList"] = points_string_to_XY_list(s.getPoints().getValue())
bBox = xy_list_to_bbox(shape["xyList"])
elif type(s) == omero.model.LineI:
shape["type"] = "Line"
shape["x1"] = int(s.getX1().getValue())
shape["x2"] = int(s.getX2().getValue())
shape["y1"] = int(s.getY1().getValue())
shape["y2"] = int(s.getY2().getValue())
x = min(shape["x1"], shape["x2"])
y = min(shape["y1"], shape["y2"])
bBox = (
x,
y,
max(shape["x1"], shape["x2"]) - x,
max(shape["y1"], shape["y2"]) - y,
)
elif type(s) == omero.model.PointI:
shape["type"] = "Point"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
bBox = (shape["x"] - 50, shape["y"] - 50, 100, 100)
elif type(s) == omero.model.PolygonI:
shape["type"] = "Polygon"
shape["xyList"] = points_string_to_XY_list(s.getPoints().getValue())
bBox = xy_list_to_bbox(shape["xyList"])
elif type(s) == omero.model.LabelI:
shape["type"] = "Label"
shape["x"] = s.getX().getValue()
shape["y"] = s.getY().getValue()
bBox = (shape["x"] - 50, shape["y"] - 50, 100, 100)
else:
logger.debug("Shape type not supported: %s" % str(type(s)))
# we want to render a region larger than the bounding box
x, y, w, h = bBox
# make the aspect ratio (w/h) = 3/2
requiredWidth = max(w, h * 3 // 2)
requiredHeight = requiredWidth * 2 // 3
# make the rendered region 1.5 times larger than the bounding box
newW = int(requiredWidth * 1.5)
newH = int(requiredHeight * 1.5)
# Don't want the region to be smaller than the thumbnail dimensions
if newW < MAX_WIDTH:
newW = MAX_WIDTH
newH = newW * 2 // 3
# Don't want the region to be bigger than a 'Big Image'!
def getConfigValue(key):
try:
return conn.getConfigService().getConfigValue(key)
except Exception:
logger.warn(
"webgateway: get_shape_thumbnail() could not get"
" Config-Value for %s" % key
)
pass
max_plane_width = getConfigValue("omero.pixeldata.max_plane_width")
max_plane_height = getConfigValue("omero.pixeldata.max_plane_height")
if (
max_plane_width is None
or max_plane_height is None
or (newW > int(max_plane_width))
or (newH > int(max_plane_height))
):
# generate dummy image to return
dummy = Image.new("RGB", (MAX_WIDTH, MAX_WIDTH * 2 // 3), bg_color)
draw = ImageDraw.Draw(dummy)
draw.text((10, 30), "Shape too large to \ngenerate thumbnail", fill=(255, 0, 0))
rv = BytesIO()
dummy.save(rv, "jpeg", quality=90)
return HttpResponse(rv.getvalue(), content_type="image/jpeg")
xOffset = (newW - w) // 2
yOffset = (newH - h) // 2
newX = int(x - xOffset)
newY = int(y - yOffset)
# Need to check if any part of our region is outside the image. (assume
# that SOME of the region is within the image!)
sizeX = image.getSizeX()
sizeY = image.getSizeY()
left_xs, right_xs, top_xs, bottom_xs = 0, 0, 0, 0
if newX < 0:
newW = newW + newX
left_xs = abs(newX)
newX = 0
if newY < 0:
newH = newH + newY
top_xs = abs(newY)
newY = 0
if newW + newX > sizeX:
right_xs = (newW + newX) - sizeX
newW = newW - right_xs
if newH + newY > sizeY:
bottom_xs = (newH + newY) - sizeY
newH = newH - bottom_xs
# now we should be getting the correct region
jpeg_data = image.renderJpegRegion(
theZ, theT, newX, newY, newW, newH, level=None, compression=compress_quality
)
img = Image.open(BytesIO(jpeg_data))
# add back on the xs we were forced to trim
if left_xs != 0 or right_xs != 0 or top_xs != 0 or bottom_xs != 0:
jpg_w, jpg_h = img.size
xs_w = jpg_w + right_xs + left_xs
xs_h = jpg_h + bottom_xs + top_xs
xs_image = Image.new("RGB", (xs_w, xs_h), bg_color)
xs_image.paste(img, (left_xs, top_xs))
img = xs_image
# we have our full-sized region. Need to resize to thumbnail.
current_w, current_h = img.size
factor = float(MAX_WIDTH) / current_w
resizeH = int(current_h * factor)
img = img.resize((MAX_WIDTH, resizeH))
draw = ImageDraw.Draw(img)
if shape["type"] == "Rectangle":
rectX = int(xOffset * factor)
rectY = int(yOffset * factor)
rectW = int((w + xOffset) * factor)
rectH = int((h + yOffset) * factor)
draw.rectangle((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.rectangle((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif shape["type"] == "Line":
lineX1 = (shape["x1"] - newX + left_xs) * factor
lineX2 = (shape["x2"] - newX + left_xs) * factor
lineY1 = (shape["y1"] - newY + top_xs) * factor
lineY2 = (shape["y2"] - newY + top_xs) * factor
draw.line((lineX1, lineY1, lineX2, lineY2), fill=lineColour, width=2)
elif shape["type"] == "Ellipse":
rectX = int(xOffset * factor)
rectY = int(yOffset * factor)
rectW = int((w + xOffset) * factor)
rectH = int((h + yOffset) * factor)
draw.ellipse((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.ellipse((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif shape["type"] == "Point":
point_radius = 2
rectX = (MAX_WIDTH // 2) - point_radius
rectY = int(resizeH // 2) - point_radius
rectW = rectX + (point_radius * 2)
rectH = rectY + (point_radius * 2)
draw.ellipse((rectX, rectY, rectW, rectH), outline=lineColour)
# hack to get line width of 2
draw.ellipse((rectX - 1, rectY - 1, rectW + 1, rectH + 1), outline=lineColour)
elif "xyList" in shape:
# resizedXY = [(int(x*factor), int(y*factor))
# for (x,y) in shape['xyList']]
def resizeXY(xy):
x, y = xy
return (
int((x - newX + left_xs) * factor),
int((y - newY + top_xs) * factor),
)
resizedXY = [resizeXY(xy) for xy in shape["xyList"]]
# doesn't support 'width' of line
# draw.polygon(resizedXY, outline=lineColour)
x2 = y2 = None
for line in range(1, len(resizedXY)):
x1, y1 = resizedXY[line - 1]
x2, y2 = resizedXY[line]
draw.line((x1, y1, x2, y2), fill=lineColour, width=2)
start_x, start_y = resizedXY[0]
if shape["type"] != "PolyLine":
# Seems possible to have Polygon with only 1 point!
if x2 is None:
x2 = start_x + 1 # This will create a visible dot
if y2 is None:
y2 = start_y + 1
draw.line((x2, y2, start_x, start_y), fill=lineColour, width=2)
rv = BytesIO()
compression = 0.9
try:
img.save(rv, "jpeg", quality=int(compression * 100))
jpeg = rv.getvalue()
finally:
rv.close()
return HttpResponse(jpeg, content_type="image/jpeg")
@login_required()
def render_shape_mask(request, shapeId, conn=None, **kwargs):
""" Returns mask as a png (supports transparency) """
if not numpyInstalled:
raise NotImplementedError("numpy not installed")
params = omero.sys.Parameters()
params.map = {"id": rlong(shapeId)}
shape = conn.getQueryService().findByQuery(
"select s from Shape s where s.id = :id", params, conn.SERVICE_OPTS
)
if shape is None:
raise Http404("Shape ID: %s not found" % shapeId)
width = int(shape.getWidth().getValue())
height = int(shape.getHeight().getValue())
color = unwrap(shape.getFillColor())
fill = (255, 255, 0, 255)
if color is not None:
color = rgb_int2rgba(color)
fill = (color[0], color[1], color[2], int(color[3] * 255))
mask_packed = shape.getBytes()
# convert bytearray into something we can use
intarray = numpy.fromstring(mask_packed, dtype=numpy.uint8)
binarray = numpy.unpackbits(intarray)
# Couldn't get the 'proper' way of doing this to work,
# TODO: look at this again later. Faster than simple way below:
# E.g. takes ~2 seconds for 1984 x 1984 mask
# pixels = ""
# steps = len(binarray) / 8
# for i in range(steps):
# b = binarray[i*8: (i+1)*8]
# pixels += struct.pack("8B", b[0], b[1], b[2], b[3], b[4],
# b[5], b[6], b[7])
# for b in binarray:
# pixels += struct.pack("1B", b)
# im = Image.frombytes("1", size=(width, height), data=pixels)
# Simple approach - Just set each pixel in turn
# E.g. takes ~12 seconds for 1984 x 1984 mask with most pixels '1'
# Or ~5 seconds for same size mask with most pixels "0"
img = Image.new("RGBA", size=(width, height), color=(0, 0, 0, 0))
x = 0
y = 0
for pix in binarray:
if pix == 1:
img.putpixel((x, y), fill)
x += 1
if x > width - 1:
x = 0
y += 1
rv = BytesIO()
# return a png (supports transparency)
img.save(rv, "png", quality=int(100))
png = rv.getvalue()
return HttpResponse(png, content_type="image/png")
def _get_signature_from_request(request):
"""
returns a string that identifies this image, along with the settings
passed on the request.
Useful for using as img identifier key, for prepared image.
@param request: http request
@return: String
"""
r = request.GET
rv = r.get("m", "_") + r.get("p", "_") + r.get("c", "_") + r.get("q", "_")
return rv
def _get_maps_enabled(request, name, sizeC=0):
"""
Parses 'maps' query string from request
"""
codomains = None
if "maps" in request:
map_json = request["maps"]
codomains = []
try:
# If coming from request string, need to load -> json
if isinstance(map_json, (unicode, str)):
map_json = json.loads(map_json)
sizeC = max(len(map_json), sizeC)
for c in range(sizeC):
enabled = None
if len(map_json) > c:
m = map_json[c].get(name)
# If None, no change to saved status
if m is not None:
enabled = m.get("enabled") in (True, "true")
codomains.append(enabled)
except Exception:
logger.debug("Invalid json for query ?maps=%s" % map_json)
codomains = None
return codomains
def _get_prepared_image(
request, iid, server_id=None, conn=None, saveDefs=False, retry=True
):
"""
Fetches the Image object for image 'iid' and prepares it according to the
request query, setting the channels, rendering model and projection
arguments. The compression level is parsed and returned too.
For parameters in request, see L{getImgDetailsFromReq}
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@param saveDefs: Try to save the rendering settings, default z and t.
@param retry: Try an extra attempt at this method
@return: Tuple (L{omero.gateway.ImageWrapper} image, quality)
"""
r = request.GET
logger.debug(
"Preparing Image:%r saveDefs=%r "
"retry=%r request=%r conn=%s" % (iid, saveDefs, retry, r, str(conn))
)
img = conn.getObject("Image", iid)
if img is None:
return
invert_flags = None
if "maps" in r:
reverses = _get_maps_enabled(r, "reverse", img.getSizeC())
# 'reverse' is now deprecated (5.4.0). Also check for 'invert'
invert_flags = _get_maps_enabled(r, "inverted", img.getSizeC())
# invert is True if 'invert' OR 'reverse' is enabled
if reverses is not None and invert_flags is not None:
invert_flags = [
z[0] if z[0] is not None else z[1] for z in zip(invert_flags, reverses)
]
try:
# quantization maps (just applied, not saved at the moment)
qm = [m.get("quantization") for m in json.loads(r["maps"])]
img.setQuantizationMaps(qm)
except Exception:
logger.debug("Failed to set quantization maps")
if "c" in r:
logger.debug("c=" + r["c"])
activechannels, windows, colors = _split_channel_info(r["c"])
allchannels = range(1, img.getSizeC() + 1)
# If saving, apply to all channels
if saveDefs and not img.setActiveChannels(
allchannels, windows, colors, invert_flags
):
logger.debug("Something bad happened while setting the active channels...")
# Save the active/inactive state of the channels
if not img.setActiveChannels(activechannels, windows, colors, invert_flags):
logger.debug("Something bad happened while setting the active channels...")
if r.get("m", None) == "g":
img.setGreyscaleRenderingModel()
elif r.get("m", None) == "c":
img.setColorRenderingModel()
# projection 'intmax' OR 'intmax|5:25'
p = r.get("p", None)
pStart, pEnd = None, None
if p is not None and len(p.split("|")) > 1:
p, startEnd = p.split("|", 1)
try:
pStart, pEnd = [int(s) for s in startEnd.split(":")]
except ValueError:
pass
img.setProjection(p)
img.setProjectionRange(pStart, pEnd)
img.setInvertedAxis(bool(r.get("ia", "0") == "1"))
compress_quality = r.get("q", None)
if saveDefs:
"z" in r and img.setDefaultZ(long(r["z"]) - 1)
"t" in r and img.setDefaultT(long(r["t"]) - 1)
img.saveDefaults()
return (img, compress_quality)
@login_required()
def render_image_region(request, iid, z, t, conn=None, **kwargs):
"""
Returns a jpeg of the OMERO image, rendering only a region specified in
query string as region=x,y,width,height. E.g. region=0,512,256,256
Rendering settings can be specified in the request parameters.
@param request: http request
@param iid: image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping jpeg
"""
server_id = request.session["connector"].server_id
# if the region=x,y,w,h is not parsed correctly to give 4 ints then we
# simply provide whole image plane.
# alternatively, could return a 404?
# if h == None:
# return render_image(request, iid, z, t, server_id=None, _conn=None,
# **kwargs)
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
tile = request.GET.get("tile", None)
region = request.GET.get("region", None)
level = None
if tile:
try:
img._prepareRenderingEngine()
w, h = img._re.getTileSize()
levels = img._re.getResolutionLevels() - 1
zxyt = tile.split(",")
# if tile size is given respect it
if len(zxyt) > 4:
tile_size = [int(zxyt[3]), int(zxyt[4])]
tile_defaults = [w, h]
max_tile_length = 1024
try:
max_tile_length = int(
conn.getConfigService().getConfigValue(
"omero.pixeldata.max_tile_length"
)
)
except Exception:
pass
for i, tile_length in enumerate(tile_size):
# use default tile size if <= 0
if tile_length <= 0:
tile_size[i] = tile_defaults[i]
# allow no bigger than max_tile_length
if tile_length > max_tile_length:
tile_size[i] = max_tile_length
w, h = tile_size
v = int(zxyt[0])
if v < 0:
msg = "Invalid resolution level %s < 0" % v
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
if levels == 0: # non pyramid file
if v > 0:
msg = "Invalid resolution level %s, non pyramid file" % v
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
else:
level = None
else:
level = levels - v
if level < 0:
msg = (
"Invalid resolution level, \
%s > number of available levels %s "
% (v, levels)
)
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
x = int(zxyt[1]) * w
y = int(zxyt[2]) * h
except Exception:
msg = "malformed tile argument, tile=%s" % tile
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
elif region:
try:
xywh = region.split(",")
x = int(xywh[0])
y = int(xywh[1])
w = int(xywh[2])
h = int(xywh[3])
except Exception:
msg = "malformed region argument, region=%s" % region
logger.debug(msg, exc_info=True)
return HttpResponseBadRequest(msg)
else:
return HttpResponseBadRequest("tile or region argument required")
# region details in request are used as key for caching.
jpeg_data = webgateway_cache.getImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderJpegRegion(
z, t, x, y, w, h, level=level, compression=compress_quality
)
if jpeg_data is None:
raise Http404
webgateway_cache.setImage(request, server_id, img, z, t, jpeg_data)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
@login_required()
def render_image(request, iid, z=None, t=None, conn=None, **kwargs):
"""
Renders the image with id {{iid}} at {{z}} and {{t}} as jpeg.
Many options are available from the request dict. See
L{getImgDetailsFromReq} for list.
I am assuming a single Pixels object on image with image-Id='iid'. May be
wrong
@param request: http request
@param iid: image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping jpeg
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
jpeg_data = webgateway_cache.getImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderJpeg(z, t, compression=compress_quality)
if jpeg_data is None:
raise Http404
webgateway_cache.setImage(request, server_id, img, z, t, jpeg_data)
format = request.GET.get("format", "jpeg")
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
if "download" in kwargs and kwargs["download"]:
if format == "png":
# convert jpeg data to png...
i = Image.open(BytesIO(jpeg_data))
output = BytesIO()
i.save(output, "png")
jpeg_data = output.getvalue()
output.close()
rsp = HttpResponse(jpeg_data, content_type="image/png")
elif format == "tif":
# convert jpeg data to TIFF
i = Image.open(BytesIO(jpeg_data))
output = BytesIO()
i.save(output, "tiff")
jpeg_data = output.getvalue()
output.close()
rsp = HttpResponse(jpeg_data, content_type="image/tiff")
fileName = img.getName()
try:
fileName = fileName.decode("utf8")
except AttributeError:
pass # python 3
fileName = fileName.replace(",", ".").replace(" ", "_")
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(jpeg_data)
rsp["Content-Disposition"] = "attachment; filename=%s.%s" % (fileName, format)
return rsp
@login_required()
def render_ome_tiff(request, ctx, cid, conn=None, **kwargs):
"""
Renders the OME-TIFF representation of the image(s) with id cid in ctx
(i)mage, (d)ataset, or (p)roject.
For multiple images export, images that require pixels pyramid (big
images) will be silently skipped.
If exporting a single big image or if all images in a multple image export
are big, a 404 will be triggered.
A request parameter dryrun can be passed to return the count of images
that would actually be exported.
@param request: http request
@param ctx: 'p' or 'd' or 'i'
@param cid: Project, Dataset or Image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping the tiff (or zip for multiple
files), or redirect to temp file/zip
if dryrun is True, returns count of images that would
be exported
"""
server_id = request.session["connector"].server_id
imgs = []
if ctx == "p":
obj = conn.getObject("Project", cid)
if obj is None:
raise Http404
for d in obj.listChildren():
imgs.extend(list(d.listChildren()))
name = obj.getName()
elif ctx == "d":
obj = conn.getObject("Dataset", cid)
if obj is None:
raise Http404
imgs.extend(list(obj.listChildren()))
selection = list(filter(None, request.GET.get("selection", "").split(",")))
if len(selection) > 0:
logger.debug(selection)
logger.debug(imgs)
imgs = [x for x in imgs if str(x.getId()) in selection]
logger.debug(imgs)
if len(imgs) == 0:
raise Http404
name = "%s-%s" % (obj.getParent().getName(), obj.getName())
elif ctx == "w":
obj = conn.getObject("Well", cid)
if obj is None:
raise Http404
imgs.extend([x.getImage() for x in obj.listChildren()])
plate = obj.getParent()
coord = "%s%s" % (
plate.getRowLabels()[obj.row],
plate.getColumnLabels()[obj.column],
)
name = "%s-%s-%s" % (plate.getParent().getName(), plate.getName(), coord)
else:
obj = conn.getObject("Image", cid)
if obj is None:
raise Http404
imgs.append(obj)
imgs = [x for x in imgs if not x.requiresPixelsPyramid()]
if request.GET.get("dryrun", False):
rv = json.dumps(len(imgs))
c = request.GET.get("callback", None)
if c is not None and not kwargs.get("_internal", False):
rv = "%s(%s)" % (c, rv)
return HttpJavascriptResponse(rv)
if len(imgs) == 0:
raise Http404
if len(imgs) == 1:
obj = imgs[0]
key = (
"_".join((str(x.getId()) for x in obj.getAncestry()))
+ "_"
+ str(obj.getId())
+ "_ome_tiff"
)
# total name len <= 255, 9 is for .ome.tiff
fnamemax = 255 - len(str(obj.getId())) - 10
objname = obj.getName()[:fnamemax]
fpath, rpath, fobj = webgateway_tempfile.new(
str(obj.getId()) + "-" + objname + ".ome.tiff", key=key
)
if fobj is True:
# already exists
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, imgs[0])
if tiff_data is None:
try:
tiff_data = imgs[0].exportOmeTiff()
except Exception:
logger.debug("Failed to export image (2)", exc_info=True)
tiff_data = None
if tiff_data is None:
webgateway_tempfile.abort(fpath)
raise Http404
webgateway_cache.setOmeTiffImage(request, server_id, imgs[0], tiff_data)
if fobj is None:
rsp = HttpResponse(tiff_data, content_type="image/tiff")
rsp["Content-Disposition"] = 'attachment; filename="%s.ome.tiff"' % (
str(obj.getId()) + "-" + objname
)
rsp["Content-Length"] = len(tiff_data)
return rsp
else:
fobj.write(tiff_data)
fobj.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
else:
try:
img_ids = "+".join((str(x.getId()) for x in imgs)).encode("utf-8")
key = (
"_".join((str(x.getId()) for x in imgs[0].getAncestry()))
+ "_"
+ md5(img_ids).hexdigest()
+ "_ome_tiff_zip"
)
fpath, rpath, fobj = webgateway_tempfile.new(name + ".zip", key=key)
if fobj is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
logger.debug(fpath)
if fobj is None:
fobj = BytesIO()
zobj = zipfile.ZipFile(fobj, "w", zipfile.ZIP_STORED)
for obj in imgs:
tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, obj)
if tiff_data is None:
tiff_data = obj.exportOmeTiff()
if tiff_data is None:
continue
webgateway_cache.setOmeTiffImage(request, server_id, obj, tiff_data)
# While ZIP itself doesn't have the 255 char limit for
# filenames, the FS where these get unarchived might, so trim
# names
# total name len <= 255, 9 is for .ome.tiff
fnamemax = 255 - len(str(obj.getId())) - 10
objname = obj.getName()[:fnamemax]
zobj.writestr(str(obj.getId()) + "-" + objname + ".ome.tiff", tiff_data)
zobj.close()
if fpath is None:
zip_data = fobj.getvalue()
rsp = HttpResponse(zip_data, content_type="application/zip")
rsp["Content-Disposition"] = 'attachment; filename="%s.zip"' % name
rsp["Content-Length"] = len(zip_data)
return rsp
except Exception:
logger.debug(traceback.format_exc())
raise
return HttpResponseRedirect(settings.STATIC_URL + "webgateway/tfiles/" + rpath)
@login_required()
def render_movie(request, iid, axis, pos, conn=None, **kwargs):
"""
Renders a movie from the image with id iid
@param request: http request
@param iid: Image ID
@param axis: Movie frames are along 'z' or 't' dimension. String
@param pos: The T index (for z axis) or Z index (for t axis)
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping the file, or redirect to temp
file
"""
server_id = request.session["connector"].server_id
try:
# Prepare a filename we'll use for temp cache, and check if file is
# already there
opts = {}
opts["format"] = "video/" + request.GET.get("format", "quicktime")
opts["fps"] = int(request.GET.get("fps", 4))
opts["minsize"] = (512, 512, "Black")
ext = ".avi"
key = "%s-%s-%s-%d-%s-%s" % (
iid,
axis,
pos,
opts["fps"],
_get_signature_from_request(request),
request.GET.get("format", "quicktime"),
)
pos = int(pos)
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
fpath, rpath, fobj = webgateway_tempfile.new(img.getName() + ext, key=key)
logger.debug(fpath, rpath, fobj)
if fobj is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
# os.path.join(rpath, img.getName() + ext))
if "optsCB" in kwargs:
opts.update(kwargs["optsCB"](img))
opts.update(kwargs.get("opts", {}))
logger.debug(
"rendering movie for img %s with axis %s, pos %i and opts %s"
% (iid, axis, pos, opts)
)
# fpath, rpath = webgateway_tempfile.newdir()
if fpath is None:
fo, fn = tempfile.mkstemp()
else:
fn = fpath # os.path.join(fpath, img.getName())
if axis.lower() == "z":
dext, mimetype = img.createMovie(
fn, 0, img.getSizeZ() - 1, pos - 1, pos - 1, opts
)
else:
dext, mimetype = img.createMovie(
fn, pos - 1, pos - 1, 0, img.getSizeT() - 1, opts
)
if dext is None and mimetype is None:
# createMovie is currently only available on 4.1_custom
# https://trac.openmicroscopy.org/ome/ticket/3857
raise Http404
if fpath is None:
movie = open(fn).read()
os.close(fo)
rsp = HttpResponse(movie, content_type=mimetype)
rsp["Content-Disposition"] = 'attachment; filename="%s"' % (
img.getName() + ext
)
rsp["Content-Length"] = len(movie)
return rsp
else:
fobj.close()
# shutil.move(fn, fn + ext)
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + rpath
)
# os.path.join(rpath, img.getName() + ext))
except Exception:
logger.debug(traceback.format_exc())
raise
@login_required()
def render_split_channel(request, iid, z, t, conn=None, **kwargs):
"""
Renders a split channel view of the image with id {{iid}} at {{z}} and
{{t}} as jpeg.
Many options are available from the request dict.
Requires Pillow to be installed on the server.
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping a jpeg
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
compress_quality = compress_quality and float(compress_quality) or 0.9
jpeg_data = webgateway_cache.getSplitChannelImage(request, server_id, img, z, t)
if jpeg_data is None:
jpeg_data = img.renderSplitChannel(z, t, compression=compress_quality)
if jpeg_data is None:
raise Http404
webgateway_cache.setSplitChannelImage(request, server_id, img, z, t, jpeg_data)
rsp = HttpResponse(jpeg_data, content_type="image/jpeg")
return rsp
def debug(f):
"""
Decorator for adding debugging functionality to methods.
@param f: The function to wrap
@return: The wrapped function
"""
@wraps(f)
def wrap(request, *args, **kwargs):
debug = request.GET.getlist("debug")
if "slow" in debug:
time.sleep(5)
if "fail" in debug:
raise Http404
if "error" in debug:
raise AttributeError("Debug requested error")
return f(request, *args, **kwargs)
return wrap
def jsonp(f):
"""
Decorator for adding connection debugging and returning function result as
json, depending on values in kwargs
@param f: The function to wrap
@return: The wrapped function, which will return json
"""
@wraps(f)
def wrap(request, *args, **kwargs):
logger.debug("jsonp")
try:
server_id = kwargs.get("server_id", None)
if server_id is None and request.session.get("connector"):
server_id = request.session["connector"].server_id
kwargs["server_id"] = server_id
rv = f(request, *args, **kwargs)
if kwargs.get("_raw", False):
return rv
if isinstance(rv, HttpResponse):
return rv
c = request.GET.get("callback", None)
if c is not None and not kwargs.get("_internal", False):
rv = json.dumps(rv)
rv = "%s(%s)" % (c, rv)
# mimetype for JSONP is application/javascript
return HttpJavascriptResponse(rv)
if kwargs.get("_internal", False):
return rv
# mimetype for JSON is application/json
# NB: To support old api E.g. /get_rois_json/
# We need to support lists
safe = type(rv) is dict
return JsonResponse(rv, safe=safe)
except Exception as ex:
# Default status is 500 'server error'
# But we try to handle all 'expected' errors appropriately
# TODO: handle omero.ConcurrencyException
status = 500
if isinstance(ex, omero.SecurityViolation):
status = 403
elif isinstance(ex, omero.ApiUsageException):
status = 400
trace = traceback.format_exc()
logger.debug(trace)
if kwargs.get("_raw", False) or kwargs.get("_internal", False):
raise
return JsonResponse(
{"message": str(ex), "stacktrace": trace}, status=status
)
return wrap
@debug
@login_required()
def render_row_plot(request, iid, z, t, y, conn=None, w=1, **kwargs):
"""
Renders the line plot for the image with id {{iid}} at {{z}} and {{t}} as
gif with transparent background.
Many options are available from the request dict.
I am assuming a single Pixels object on image with Image ID='iid'. May be
wrong
TODO: cache
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param y: Y position of row to measure
@param conn: L{omero.gateway.BlitzGateway} connection
@param w: Line width
@return: http response wrapping a gif
"""
if not w:
w = 1
pi = _get_prepared_image(request, iid, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
try:
gif_data = img.renderRowLinePlotGif(int(z), int(t), int(y), int(w))
except Exception:
logger.debug("a", exc_info=True)
raise
if gif_data is None:
raise Http404
rsp = HttpResponse(gif_data, content_type="image/gif")
return rsp
@debug
@login_required()
def render_col_plot(request, iid, z, t, x, w=1, conn=None, **kwargs):
"""
Renders the line plot for the image with id {{iid}} at {{z}} and {{t}} as
gif with transparent background.
Many options are available from the request dict.
I am assuming a single Pixels object on image with id='iid'. May be wrong
TODO: cache
@param request: http request
@param iid: Image ID
@param z: Z index
@param t: T index
@param x: X position of column to measure
@param conn: L{omero.gateway.BlitzGateway} connection
@param w: Line width
@return: http response wrapping a gif
"""
if not w:
w = 1
pi = _get_prepared_image(request, iid, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
gif_data = img.renderColLinePlotGif(int(z), int(t), int(x), int(w))
if gif_data is None:
raise Http404
rsp = HttpResponse(gif_data, content_type="image/gif")
return rsp
@login_required()
@jsonp
def imageData_json(request, conn=None, _internal=False, **kwargs):
"""
Get a dict with image information
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@param _internal: TODO: ?
@return: Dict
"""
iid = kwargs["iid"]
key = kwargs.get("key", None)
image = conn.getObject("Image", iid)
if image is None:
if is_public_user(request):
# 403 - Should try logging in
return HttpResponseForbidden()
else:
return HttpResponseNotFound("Image:%s not found" % iid)
if request.GET.get("getDefaults") == "true":
image.resetDefaults(save=False)
rv = imageMarshal(image, key=key, request=request)
return rv
@login_required()
@jsonp
def wellData_json(request, conn=None, _internal=False, **kwargs):
"""
Get a dict with image information
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@param _internal: TODO: ?
@return: Dict
"""
wid = kwargs["wid"]
well = conn.getObject("Well", wid)
if well is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
rv = well.simpleMarshal(xtra=xtra)
return rv
@login_required()
@jsonp
def plateGrid_json(request, pid, field=0, conn=None, **kwargs):
""""""
try:
field = long(field or 0)
except ValueError:
field = 0
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
thumbsize = getIntOrDefault(request, "size", None)
logger.debug(thumbsize)
server_id = kwargs["server_id"]
def get_thumb_url(iid):
if thumbsize is not None:
return reverse(prefix, args=(iid, thumbsize))
return reverse(prefix, args=(iid,))
plateGrid = PlateGrid(conn, pid, field, kwargs.get("urlprefix", get_thumb_url))
plate = plateGrid.plate
if plate is None:
return Http404
cache_key = "plategrid-%d-%s" % (field, thumbsize)
rv = webgateway_cache.getJson(request, server_id, plate, cache_key)
if rv is None:
rv = plateGrid.metadata
webgateway_cache.setJson(request, server_id, plate, json.dumps(rv), cache_key)
else:
rv = json.loads(rv)
return rv
@login_required()
@jsonp
def get_thumbnails_json(request, w=None, conn=None, **kwargs):
"""
Returns base64 encoded jpeg with the rendered thumbnail for images
'id'
@param request: http request
@param w: Thumbnail max width. 96 by default
@return: http response containing base64 encoded thumbnails
"""
server_settings = request.session.get("server_settings", {}).get("browser", {})
defaultSize = server_settings.get("thumb_default_size", 96)
if w is None:
w = defaultSize
image_ids = get_longs(request, "id")
image_ids = list(set(image_ids)) # remove any duplicates
# If we only have a single ID, simply use getThumbnail()
if len(image_ids) == 1:
iid = image_ids[0]
try:
data = _render_thumbnail(request, iid, w=w, conn=conn)
return {
iid: "data:image/jpeg;base64,%s"
% base64.b64encode(data).decode("utf-8")
}
except Exception:
return {iid: None}
logger.debug("Image ids: %r" % image_ids)
if len(image_ids) > settings.THUMBNAILS_BATCH:
return HttpJavascriptResponseServerError(
"Max %s thumbnails at a time." % settings.THUMBNAILS_BATCH
)
thumbnails = conn.getThumbnailSet([rlong(i) for i in image_ids], w)
rv = dict()
for i in image_ids:
rv[i] = None
try:
t = thumbnails[i]
if len(t) > 0:
# replace thumbnail urls by base64 encoded image
rv[i] = "data:image/jpeg;base64,%s" % base64.b64encode(t).decode(
"utf-8"
)
except KeyError:
logger.error("Thumbnail not available. (img id: %d)" % i)
except Exception:
logger.error(traceback.format_exc())
return rv
@login_required()
@jsonp
def get_thumbnail_json(request, iid, w=None, h=None, conn=None, _defcb=None, **kwargs):
"""
Returns an HttpResponse base64 encoded jpeg with the rendered thumbnail
for image 'iid'
@param request: http request
@param iid: Image ID
@param w: Thumbnail max width. 96 by default
@param h: Thumbnail max height
@return: http response containing base64 encoded thumbnail
"""
jpeg_data = _render_thumbnail(
request=request, iid=iid, w=w, h=h, conn=conn, _defcb=_defcb, **kwargs
)
rv = "data:image/jpeg;base64,%s" % base64.b64encode(jpeg_data).decode("utf-8")
return rv
@login_required()
@jsonp
def listImages_json(request, did, conn=None, **kwargs):
"""
lists all Images in a Dataset, as json
TODO: cache
@param request: http request
@param did: Dataset ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of image json.
"""
dataset = conn.getObject("Dataset", did)
if dataset is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {
"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix),
"tiled": request.GET.get("tiled", False),
}
return [x.simpleMarshal(xtra=xtra) for x in dataset.listChildren()]
@login_required()
@jsonp
def listWellImages_json(request, did, conn=None, **kwargs):
"""
lists all Images in a Well, as json
TODO: cache
@param request: http request
@param did: Well ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of image json.
"""
well = conn.getObject("Well", did)
acq = getIntOrDefault(request, "run", None)
if well is None:
return HttpJavascriptResponseServerError('""')
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
def urlprefix(iid):
return reverse(prefix, args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
def marshal_pos(w):
d = {}
for x, p in (["x", w.getPosX()], ["y", w.getPosY()]):
if p is not None:
d[x] = {"value": p.getValue(), "unit": str(p.getUnit())}
return d
wellImgs = []
for ws in well.listChildren():
# optionally filter by acquisition 'run'
if (
acq is not None
and ws.plateAcquisition is not None
and ws.plateAcquisition.id.val != acq
):
continue
img = ws.getImage()
if img is not None:
m = img.simpleMarshal(xtra=xtra)
pos = marshal_pos(ws)
if len(pos.keys()) > 0:
m["position"] = pos
wellImgs.append(m)
return wellImgs
@login_required()
@jsonp
def listDatasets_json(request, pid, conn=None, **kwargs):
"""
lists all Datasets in a Project, as json
TODO: cache
@param request: http request
@param pid: Project ID
@param conn: L{omero.gateway.BlitzGateway}
@return: list of dataset json.
"""
project = conn.getObject("Project", pid)
if project is None:
return HttpJavascriptResponse("[]")
return [x.simpleMarshal(xtra={"childCount": 0}) for x in project.listChildren()]
@login_required()
@jsonp
def datasetDetail_json(request, did, conn=None, **kwargs):
"""
return json encoded details for a dataset
TODO: cache
"""
ds = conn.getObject("Dataset", did)
return ds.simpleMarshal()
@login_required()
@jsonp
def listProjects_json(request, conn=None, **kwargs):
"""
lists all Projects, as json
TODO: cache
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@return: list of project json.
"""
rv = []
for pr in conn.listProjects():
rv.append({"id": pr.id, "name": pr.name, "description": pr.description or ""})
return rv
@login_required()
@jsonp
def projectDetail_json(request, pid, conn=None, **kwargs):
"""
grab details from one specific project
TODO: cache
@param request: http request
@param pid: Project ID
@param conn: L{omero.gateway.BlitzGateway}
@return: project details as dict.
"""
pr = conn.getObject("Project", pid)
rv = pr.simpleMarshal()
return rv
@jsonp
def open_with_options(request, **kwargs):
"""
Make the settings.OPEN_WITH available via JSON
"""
open_with = settings.OPEN_WITH
viewers = []
for ow in open_with:
if len(ow) < 2:
continue
viewer = {}
viewer["id"] = ow[0]
try:
viewer["url"] = reverse(ow[1])
except NoReverseMatch:
viewer["url"] = ow[1]
# try non-essential parameters...
# NB: Need supported_objects OR script_url to enable plugin
try:
if len(ow) > 2:
if "supported_objects" in ow[2]:
viewer["supported_objects"] = ow[2]["supported_objects"]
if "target" in ow[2]:
viewer["target"] = ow[2]["target"]
if "script_url" in ow[2]:
# If we have an absolute url, use it...
if ow[2]["script_url"].startswith("http"):
viewer["script_url"] = ow[2]["script_url"]
else:
# ...otherwise, assume within static
viewer["script_url"] = static(ow[2]["script_url"])
if "label" in ow[2]:
viewer["label"] = ow[2]["label"]
except Exception:
# ignore invalid params
pass
viewers.append(viewer)
return {"open_with_options": viewers}
def searchOptFromRequest(request):
"""
Returns a dict of options for searching, based on
parameters in the http request
Request keys include:
- ctx: (http request) 'imgs' to search only images
- text: (http request) the actual text phrase
- start: starting index (0 based) for result
- limit: nr of results to retuen (0 == unlimited)
- author:
- grabData:
- parents:
@param request: http request
@return: Dict of options
"""
try:
r = request.GET
opts = {
"search": unicode(r.get("text", "")).encode("utf8"),
"ctx": r.get("ctx", ""),
"grabData": not not r.get("grabData", False),
"parents": not not bool(r.get("parents", False)),
"start": int(r.get("start", 0)),
"limit": int(r.get("limit", 0)),
"key": r.get("key", None),
}
author = r.get("author", "")
if author:
opts["search"] += " author:" + author
return opts
except Exception:
logger.error(traceback.format_exc())
return {}
@TimeIt(logging.INFO)
@login_required()
@jsonp
def search_json(request, conn=None, **kwargs):
"""
Search for objects in blitz.
Returns json encoded list of marshalled objects found by the search query
Request keys include:
- text: The text to search for
- ctx: (http request) 'imgs' to search only images
- text: (http request) the actual text phrase
- start: starting index (0 based) for result
- limit: nr of results to retuen (0 == unlimited)
- author:
- grabData:
- parents:
@param request: http request
@param conn: L{omero.gateway.BlitzGateway}
@return: json search results
TODO: cache
"""
server_id = request.session["connector"].server_id
opts = searchOptFromRequest(request)
rv = []
logger.debug("searchObjects(%s)" % (opts["search"]))
# search returns blitz_connector wrapper objects
def urlprefix(iid):
return reverse("webgateway_render_thumbnail", args=(iid,))
xtra = {"thumbUrlPrefix": kwargs.get("urlprefix", urlprefix)}
try:
if opts["ctx"] == "imgs":
sr = conn.searchObjects(["image"], opts["search"], conn.SERVICE_OPTS)
else:
# searches P/D/I
sr = conn.searchObjects(None, opts["search"], conn.SERVICE_OPTS)
except ApiUsageException:
return HttpJavascriptResponseServerError('"parse exception"')
def marshal():
rv = []
if opts["grabData"] and opts["ctx"] == "imgs":
bottom = min(opts["start"], len(sr) - 1)
if opts["limit"] == 0:
top = len(sr)
else:
top = min(len(sr), bottom + opts["limit"])
for i in range(bottom, top):
e = sr[i]
# for e in sr:
try:
rv.append(
imageData_json(
request,
server_id,
iid=e.id,
key=opts["key"],
conn=conn,
_internal=True,
)
)
except AttributeError as x:
logger.debug(
"(iid %i) ignoring Attribute Error: %s" % (e.id, str(x))
)
pass
except omero.ServerError as x:
logger.debug("(iid %i) ignoring Server Error: %s" % (e.id, str(x)))
return rv
else:
return [x.simpleMarshal(xtra=xtra, parents=opts["parents"]) for x in sr]
rv = timeit(marshal)()
logger.debug(rv)
return rv
@require_POST
@login_required()
def save_image_rdef_json(request, iid, conn=None, **kwargs):
"""
Requests that the rendering defs passed in the request be set as the
default for this image.
Rendering defs in request listed at L{getImgDetailsFromReq}
TODO: jsonp
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@return: http response 'true' or 'false'
"""
server_id = request.session["connector"].server_id
pi = _get_prepared_image(
request, iid, server_id=server_id, conn=conn, saveDefs=True
)
if pi is None:
json_data = "false"
else:
user_id = pi[0]._conn.getEventContext().userId
webgateway_cache.invalidateObject(server_id, user_id, pi[0])
pi[0].getThumbnail()
json_data = "true"
if request.GET.get("callback", None):
json_data = "%s(%s)" % (request.GET["callback"], json_data)
return HttpJavascriptResponse(json_data)
@login_required()
@jsonp
def listLuts_json(request, conn=None, **kwargs):
"""
Lists lookup tables 'LUTs' availble for rendering
This list is dynamic and will change if users add LUTs to their server.
We include 'png_index' which is the index of each LUT within the
static/webgateway/img/luts_10.png or -1 if LUT is not found.
"""
scriptService = conn.getScriptService()
luts = scriptService.getScriptsByMimetype("text/x-lut")
rv = []
for lut in luts:
lutsrc = lut.path.val + lut.name.val
png_index = LUTS_IN_PNG.index(lutsrc) if lutsrc in LUTS_IN_PNG else -1
rv.append(
{
"id": lut.id.val,
"path": lut.path.val,
"name": lut.name.val,
"size": unwrap(lut.size),
"png_index": png_index,
}
)
rv.sort(key=lambda x: x["name"].lower())
return {"luts": rv, "png_luts": LUTS_IN_PNG}
@login_required()
def list_compatible_imgs_json(request, iid, conn=None, **kwargs):
"""
Lists the images on the same project that would be viable targets for
copying rendering settings.
TODO: change method to:
list_compatible_imgs_json (request, iid, server_id=None, conn=None,
**kwargs):
@param request: http request
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@return: json list of image IDs
"""
json_data = "false"
r = request.GET
if conn is None:
img = None
else:
img = conn.getObject("Image", iid)
if img is not None:
# List all images in project
imgs = []
for ds in img.getProject().listChildren():
imgs.extend(ds.listChildren())
# Filter the ones that would pass the applySettingsToImages call
img_ptype = img.getPrimaryPixels().getPixelsType().getValue()
img_ccount = img.getSizeC()
img_ew = [x.getLabel() for x in img.getChannels()]
img_ew.sort()
def compat(i):
if long(i.getId()) == long(iid):
return False
pp = i.getPrimaryPixels()
if (
pp is None
or i.getPrimaryPixels().getPixelsType().getValue() != img_ptype
or i.getSizeC() != img_ccount
):
return False
ew = [x.getLabel() for x in i.getChannels()]
ew.sort()
if ew != img_ew:
return False
return True
imgs = filter(compat, imgs)
json_data = json.dumps([x.getId() for x in imgs])
if r.get("callback", None):
json_data = "%s(%s)" % (r["callback"], json_data)
return HttpJavascriptResponse(json_data)
@require_POST
@login_required()
@jsonp
def reset_rdef_json(request, toOwners=False, conn=None, **kwargs):
"""
Simply takes request 'to_type' and 'toids' and
delegates to Rendering Settings service to reset
settings accordings.
@param toOwners: if True, default to the owner's settings.
"""
r = request.POST
toids = r.getlist("toids")
to_type = str(r.get("to_type", "image"))
to_type = to_type.title()
if to_type == "Acquisition":
to_type = "PlateAcquisition"
if len(toids) == 0:
raise Http404(
"Need to specify objects in request, E.g."
" ?totype=dataset&toids=1&toids=2"
)
toids = [int(id) for id in toids]
rss = conn.getRenderingSettingsService()
# get the first object and set the group to match
conn.SERVICE_OPTS.setOmeroGroup("-1")
o = conn.getObject(to_type, toids[0])
if o is not None:
gid = o.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
if toOwners:
rv = rss.resetDefaultsByOwnerInSet(to_type, toids, conn.SERVICE_OPTS)
else:
rv = rss.resetDefaultsInSet(to_type, toids, conn.SERVICE_OPTS)
return rv
@login_required()
@jsonp
def copy_image_rdef_json(request, conn=None, **kwargs):
"""
If 'fromid' is in request, copy the image ID to session,
for applying later using this same method.
If list of 'toids' is in request, paste the image ID from the session
to the specified images.
If 'fromid' AND 'toids' are in the reqest, we simply
apply settings and don't save anything to request.
If 'to_type' is in request, this can be 'dataset', 'plate', 'acquisition'
Returns json dict of Boolean:[Image-IDs] for images that have successfully
had the rendering settings applied, or not.
@param request: http request
@param server_id:
@param conn: L{omero.gateway.BlitzGateway}
@return: json dict of Boolean:[Image-IDs]
"""
server_id = request.session["connector"].server_id
json_data = False
fromid = request.GET.get("fromid", None)
toids = request.POST.getlist("toids")
to_type = str(request.POST.get("to_type", "image"))
rdef = None
if to_type not in ("dataset", "plate", "acquisition"):
to_type = "Image" # default is image
# Only 'fromid' is given, simply save to session
if fromid is not None and len(toids) == 0:
request.session.modified = True
request.session["fromid"] = fromid
if request.session.get("rdef") is not None:
del request.session["rdef"]
return True
# If we've got an rdef encoded in request instead of ImageId...
r = request.GET or request.POST
if r.get("c") is not None:
# make a map of settings we need
rdef = {"c": str(r.get("c"))} # channels
if r.get("maps"):
try:
rdef["maps"] = json.loads(r.get("maps"))
except Exception:
pass
if r.get("pixel_range"):
rdef["pixel_range"] = str(r.get("pixel_range"))
if r.get("m"):
rdef["m"] = str(r.get("m")) # model (grey)
if r.get("z"):
rdef["z"] = str(r.get("z")) # z & t pos
if r.get("t"):
rdef["t"] = str(r.get("t"))
imageId = request.GET.get("imageId", request.POST.get("imageId", None))
if imageId:
rdef["imageId"] = int(imageId)
if request.method == "GET":
request.session.modified = True
request.session["rdef"] = rdef
# remove any previous rdef we may have via 'fromId'
if request.session.get("fromid") is not None:
del request.session["fromid"]
return True
# Check session for 'fromid'
if fromid is None:
fromid = request.session.get("fromid", None)
# maybe these pair of methods should be on ImageWrapper??
def getRenderingSettings(image):
rv = {}
chs = []
maps = []
for i, ch in enumerate(image.getChannels()):
act = "" if ch.isActive() else "-"
start = ch.getWindowStart()
end = ch.getWindowEnd()
color = ch.getLut()
maps.append({"inverted": {"enabled": ch.isInverted()}})
if not color or len(color) == 0:
color = ch.getColor().getHtml()
chs.append("%s%s|%s:%s$%s" % (act, i + 1, start, end, color))
rv["c"] = ",".join(chs)
rv["maps"] = maps
rv["m"] = "g" if image.isGreyscaleRenderingModel() else "c"
rv["z"] = image.getDefaultZ() + 1
rv["t"] = image.getDefaultT() + 1
return rv
def applyRenderingSettings(image, rdef):
invert_flags = _get_maps_enabled(rdef, "inverted", image.getSizeC())
channels, windows, colors = _split_channel_info(rdef["c"])
# also prepares _re
image.setActiveChannels(channels, windows, colors, invert_flags)
if rdef["m"] == "g":
image.setGreyscaleRenderingModel()
else:
image.setColorRenderingModel()
if "z" in rdef:
image._re.setDefaultZ(long(rdef["z"]) - 1)
if "t" in rdef:
image._re.setDefaultT(long(rdef["t"]) - 1)
image.saveDefaults()
# Use rdef from above or previously saved one...
if rdef is None:
rdef = request.session.get("rdef")
if request.method == "POST":
originalSettings = None
fromImage = None
if fromid is None:
# if we have rdef, save to source image, then use that image as
# 'fromId', then revert.
if rdef is not None and len(toids) > 0:
fromImage = conn.getObject("Image", rdef["imageId"])
if fromImage is not None:
# copy orig settings
originalSettings = getRenderingSettings(fromImage)
applyRenderingSettings(fromImage, rdef)
fromid = fromImage.getId()
# If we have both, apply settings...
try:
fromid = long(fromid)
toids = [long(x) for x in toids]
except TypeError:
fromid = None
except ValueError:
fromid = None
if fromid is not None and len(toids) > 0:
fromimg = conn.getObject("Image", fromid)
userid = fromimg.getOwner().getId()
json_data = conn.applySettingsToSet(fromid, to_type, toids)
if json_data and True in json_data:
for iid in json_data[True]:
img = conn.getObject("Image", iid)
img is not None and webgateway_cache.invalidateObject(
server_id, userid, img
)
# finally - if we temporarily saved rdef to original image, revert
# if we're sure that from-image is not in the target set (Dataset etc)
if to_type == "Image" and fromid not in toids:
if originalSettings is not None and fromImage is not None:
applyRenderingSettings(fromImage, originalSettings)
return json_data
else:
return HttpResponseNotAllowed(["POST"])
@login_required()
@jsonp
def get_image_rdef_json(request, conn=None, **kwargs):
"""
Gets any 'rdef' dict from the request.session and
returns it as json
"""
rdef = request.session.get("rdef")
image = None
if rdef is None:
fromid = request.session.get("fromid", None)
if fromid is not None:
# We only have an Image to copy rdefs from
image = conn.getObject("Image", fromid)
if image is not None:
rv = imageMarshal(image, request=request)
chs = []
maps = []
for i, ch in enumerate(rv["channels"]):
act = ch["active"] and str(i + 1) or "-%s" % (i + 1)
color = ch.get("lut") or ch["color"]
chs.append(
"%s|%s:%s$%s"
% (act, ch["window"]["start"], ch["window"]["end"], color)
)
maps.append(
{
"inverted": {"enabled": ch["inverted"]},
"quantization": {
"coefficient": ch["coefficient"],
"family": ch["family"],
},
}
)
rdef = {
"c": (",".join(chs)),
"m": rv["rdefs"]["model"],
"pixel_range": "%s:%s" % (rv["pixel_range"][0], rv["pixel_range"][1]),
"maps": maps,
}
return {"rdef": rdef}
@login_required()
def full_viewer(request, iid, conn=None, **kwargs):
"""
This view is responsible for showing the omero_image template
Image rendering options in request are used in the display page. See
L{getImgDetailsFromReq}.
@param request: http request.
@param iid: Image ID
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: Can be used to specify the html 'template' for
rendering
@return: html page of image and metadata
"""
server_id = request.session["connector"].server_id
server_name = Server.get(server_id).server
rid = getImgDetailsFromReq(request)
server_settings = request.session.get("server_settings", {}).get("viewer", {})
interpolate = server_settings.get("interpolate_pixels", True)
roiLimit = server_settings.get("roi_limit", 2000)
try:
image = conn.getObject("Image", iid)
if image is None:
logger.debug("(a)Image %s not found..." % (str(iid)))
raise Http404
opengraph = None
twitter = None
image_preview = None
page_url = None
if hasattr(settings, "SHARING_OPENGRAPH"):
opengraph = settings.SHARING_OPENGRAPH.get(server_name)
logger.debug("Open Graph enabled: %s", opengraph)
if hasattr(settings, "SHARING_TWITTER"):
twitter = settings.SHARING_TWITTER.get(server_name)
logger.debug("Twitter enabled: %s", twitter)
if opengraph or twitter:
urlargs = {"iid": iid}
prefix = kwargs.get("thumbprefix", "webgateway_render_thumbnail")
image_preview = request.build_absolute_uri(reverse(prefix, kwargs=urlargs))
page_url = request.build_absolute_uri(
reverse("webgateway_full_viewer", kwargs=urlargs)
)
d = {
"blitzcon": conn,
"image": image,
"opts": rid,
"interpolate": interpolate,
"build_year": build_year,
"roiLimit": roiLimit,
"roiCount": image.getROICount(),
"viewport_server": kwargs.get(
# remove any trailing slash
"viewport_server",
reverse("webgateway"),
).rstrip("/"),
"opengraph": opengraph,
"twitter": twitter,
"image_preview": image_preview,
"page_url": page_url,
"object": "image:%i" % int(iid),
}
template = kwargs.get("template", "webgateway/viewport/omero_image.html")
rsp = render(request, template, d)
except omero.SecurityViolation:
logger.warn("SecurityViolation in Image:%s", iid)
logger.warn(traceback.format_exc())
raise Http404
return HttpResponse(rsp)
@login_required()
def download_as(request, iid=None, conn=None, **kwargs):
"""
Downloads the image as a single jpeg/png/tiff or as a zip (if more than
one image)
"""
format = request.GET.get("format", "png")
if format not in ("jpeg", "png", "tif"):
format = "png"
imgIds = []
wellIds = []
if iid is None:
imgIds = request.GET.getlist("image")
if len(imgIds) == 0:
wellIds = request.GET.getlist("well")
if len(wellIds) == 0:
return HttpResponseServerError(
"No images or wells specified in request."
" Use ?image=123 or ?well=123"
)
else:
imgIds = [iid]
images = []
if imgIds:
images = list(conn.getObjects("Image", imgIds))
elif wellIds:
try:
index = int(request.GET.get("index", 0))
except ValueError:
index = 0
for w in conn.getObjects("Well", wellIds):
images.append(w.getWellSample(index).image())
if len(images) == 0:
msg = "Cannot download as %s. Images (ids: %s) not found." % (format, imgIds)
logger.debug(msg)
return HttpResponseServerError(msg)
if len(images) == 1:
jpeg_data = images[0].renderJpeg()
if jpeg_data is None:
raise Http404
rsp = HttpResponse(jpeg_data, mimetype="image/jpeg")
rsp["Content-Length"] = len(jpeg_data)
rsp["Content-Disposition"] = "attachment; filename=%s.jpg" % (
images[0].getName().replace(" ", "_")
)
else:
temp = tempfile.NamedTemporaryFile(suffix=".download_as")
def makeImageName(originalName, extension, folder_name):
name = os.path.basename(originalName)
imgName = "%s.%s" % (name, extension)
imgName = os.path.join(folder_name, imgName)
# check we don't overwrite existing file
i = 1
name = imgName[: -(len(extension) + 1)]
while os.path.exists(imgName):
imgName = "%s_(%d).%s" % (name, i, extension)
i += 1
return imgName
try:
temp_zip_dir = tempfile.mkdtemp()
logger.debug("download_as dir: %s" % temp_zip_dir)
try:
for img in images:
z = t = None
try:
pilImg = img.renderImage(z, t)
imgPathName = makeImageName(img.getName(), format, temp_zip_dir)
pilImg.save(imgPathName)
finally:
# Close RenderingEngine
img._re.close()
# create zip
zip_file = zipfile.ZipFile(temp, "w", zipfile.ZIP_DEFLATED)
try:
a_files = os.path.join(temp_zip_dir, "*")
for name in glob.glob(a_files):
zip_file.write(name, os.path.basename(name))
finally:
zip_file.close()
finally:
shutil.rmtree(temp_zip_dir, ignore_errors=True)
zipName = request.GET.get("zipname", "Download_as_%s" % format)
zipName = zipName.replace(" ", "_")
if not zipName.endswith(".zip"):
zipName = "%s.zip" % zipName
# return the zip or single file
rsp = StreamingHttpResponse(FileWrapper(temp))
rsp["Content-Length"] = temp.tell()
rsp["Content-Disposition"] = "attachment; filename=%s" % zipName
temp.seek(0)
except Exception:
temp.close()
stack = traceback.format_exc()
logger.error(stack)
return HttpResponseServerError("Cannot download file (id:%s)" % iid)
rsp["Content-Type"] = "application/force-download"
return rsp
@login_required(doConnectionCleanup=False)
def archived_files(request, iid=None, conn=None, **kwargs):
"""
Downloads the archived file(s) as a single file or as a zip (if more than
one file)
"""
imgIds = []
wellIds = []
imgIds = request.GET.getlist("image")
wellIds = request.GET.getlist("well")
if iid is None:
if len(imgIds) == 0 and len(wellIds) == 0:
return HttpResponseServerError(
"No images or wells specified in request."
" Use ?image=123 or ?well=123"
)
else:
imgIds = [iid]
images = list()
wells = list()
if imgIds:
images = list(conn.getObjects("Image", imgIds))
elif wellIds:
try:
index = int(request.GET.get("index", 0))
except ValueError:
index = 0
wells = conn.getObjects("Well", wellIds)
for w in wells:
images.append(w.getWellSample(index).image())
if len(images) == 0:
message = (
"Cannot download archived file because Images not "
"found (ids: %s)" % (imgIds)
)
logger.debug(message)
return HttpResponseServerError(message)
# Test permissions on images and weels
for ob in wells:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
for ob in images:
well = None
try:
well = ob.getParent().getParent()
except Exception:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
else:
if well and isinstance(well, omero.gateway.WellWrapper):
if hasattr(well, "canDownload"):
if not well.canDownload():
return HttpResponseNotFound()
# make list of all files, removing duplicates
fileMap = {}
for image in images:
for f in image.getImportedImageFiles():
fileMap[f.getId()] = f
files = list(fileMap.values())
if len(files) == 0:
message = (
"Tried downloading archived files from image with no" " files archived."
)
logger.debug(message)
return HttpResponseServerError(message)
if len(files) == 1:
orig_file = files[0]
rsp = ConnCleaningHttpResponse(
orig_file.getFileInChunks(buf=settings.CHUNK_SIZE)
)
rsp.conn = conn
rsp["Content-Length"] = orig_file.getSize()
# ',' in name causes duplicate headers
fname = orig_file.getName().replace(" ", "_").replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % (fname)
else:
total_size = sum(f.size for f in files)
if total_size > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
message = (
"Total size of files %d is larger than %d. "
"Try requesting fewer files."
% (total_size, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE)
)
logger.warn(message)
return HttpResponseForbidden(message)
temp = tempfile.NamedTemporaryFile(suffix=".archive")
zipName = request.GET.get("zipname", image.getName())
try:
zipName = zip_archived_files(images, temp, zipName, buf=settings.CHUNK_SIZE)
# return the zip or single file
archivedFile_data = FileWrapper(temp)
rsp = ConnCleaningHttpResponse(archivedFile_data)
rsp.conn = conn
rsp["Content-Length"] = temp.tell()
rsp["Content-Disposition"] = "attachment; filename=%s" % zipName
temp.seek(0)
except Exception:
temp.close()
message = "Cannot download file (id:%s)" % (iid)
logger.error(message, exc_info=True)
return HttpResponseServerError(message)
rsp["Content-Type"] = "application/force-download"
return rsp
@login_required()
@jsonp
def original_file_paths(request, iid, conn=None, **kwargs):
"""
Get a list of path/name strings for original files associated with the
image
"""
image = conn.getObject("Image", iid)
if image is None:
raise Http404
paths = image.getImportedImageFilePaths()
return {"repo": paths["server_paths"], "client": paths["client_paths"]}
@login_required()
@jsonp
def get_shape_json(request, roiId, shapeId, conn=None, **kwargs):
roiId = int(roiId)
shapeId = int(shapeId)
shape = conn.getQueryService().findByQuery(
"select shape from Roi as roi "
"join roi.shapes as shape "
"where roi.id = %d and shape.id = %d" % (roiId, shapeId),
None,
)
logger.debug("Shape: %r" % shape)
if shape is None:
logger.debug("No such shape: %r" % shapeId)
raise Http404
return JsonResponse(shapeMarshal(shape))
@login_required()
@jsonp
def get_rois_json(request, imageId, conn=None, **kwargs):
"""
Returns json data of the ROIs in the specified image.
"""
rois = []
roiService = conn.getRoiService()
# rois = webfigure_utils.getRoiShapes(roiService, long(imageId)) # gets a
# whole json list of ROIs
result = roiService.findByImage(long(imageId), None, conn.SERVICE_OPTS)
for r in result.rois:
roi = {}
roi["id"] = r.getId().getValue()
# go through all the shapes of the ROI
shapes = []
for s in r.copyShapes():
if s is None: # seems possible in some situations
continue
shapes.append(shapeMarshal(s))
# sort shapes by Z, then T.
shapes.sort(key=lambda x: "%03d%03d" % (x.get("theZ", -1), x.get("theT", -1)))
roi["shapes"] = shapes
rois.append(roi)
# sort by ID - same as in measurement tool.
rois.sort(key=lambda x: x["id"])
return rois
@login_required()
def histogram_json(request, iid, theC, conn=None, **kwargs):
"""
Returns a histogram for a single channel as a list of
256 values as json
"""
image = conn.getObject("Image", iid)
if image is None:
raise Http404
maxW, maxH = conn.getMaxPlaneSize()
sizeX = image.getSizeX()
sizeY = image.getSizeY()
if (sizeX * sizeY) > (maxW * maxH):
msg = "Histogram not supported for 'big' images (over %s * %s pixels)" % (
maxW,
maxH,
)
return JsonResponse({"error": msg})
theZ = int(request.GET.get("theZ", 0))
theT = int(request.GET.get("theT", 0))
theC = int(theC)
binCount = int(request.GET.get("bins", 256))
# TODO: handle projection when supported by OMERO
data = image.getHistogram([theC], binCount, theZ=theZ, theT=theT)
histogram = data[theC]
return JsonResponse({"data": histogram})
@login_required(isAdmin=True)
@jsonp
def su(request, user, conn=None, **kwargs):
"""
If current user is admin, switch the session to a new connection owned by
'user' (puts the new session ID in the request.session)
Return False if not possible
@param request: http request.
@param user: Username of new connection owner
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: Can be used to specify the html 'template' for
rendering
@return: Boolean
"""
if request.method == "POST":
conn.setGroupNameForSession("system")
connector = request.session["connector"]
connector = Connector(connector.server_id, connector.is_secure)
session = conn.getSessionService().getSession(conn._sessionUuid)
ttl = session.getTimeToIdle().val
connector.omero_session_key = conn.suConn(user, ttl=ttl)._sessionUuid
request.session["connector"] = connector
conn.revertGroupForSession()
conn.close()
return True
else:
context = {
"url": reverse("webgateway_su", args=[user]),
"submit": "Do you want to su to %s" % user,
}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
def _annotations(request, objtype, objid, conn=None, **kwargs):
warnings.warn("Deprecated. Use _bulk_file_annotations()", DeprecationWarning)
return _bulk_file_annotations(request, objtype, objid, conn, **kwargs)
def _bulk_file_annotations(request, objtype, objid, conn=None, **kwargs):
"""
Retrieve Bulk FileAnnotations for object specified by object type and
identifier optionally traversing object model graph.
Returns dictionary containing annotations in NSBULKANNOTATIONS namespace
if successful, otherwise returns error information.
If the graph has multiple parents, we return annotations from all parents.
Example: /annotations/Plate/1/
retrieves annotations for plate with identifier 1
Example: /annotations/Plate.wells/1/
retrieves annotations for plate that contains well with
identifier 1
Example: /annotations/Screen.plateLinks.child.wells/22/
retrieves annotations for screen that contains plate with
well with identifier 22
@param request: http request.
@param objtype: Type of target object, or type of target object
followed by a slash-separated list of properties to
resolve
@param objid: Identifier of target object, or identifier of object
reached by resolving given properties
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'error' with an error message or
with key 'data' containing an array of dictionaries
with keys 'id' and 'file' of the retrieved annotations
"""
q = conn.getQueryService()
# If more than one objtype is specified, use all in query to
# traverse object model graph
# Example: /annotations/Plate/wells/1/
# retrieves annotations from Plate that contains Well 1
objtype = objtype.split(".")
params = omero.sys.ParametersI()
params.addId(objid)
params.addString("ns", NSBULKANNOTATIONS)
params.addString("mt", "OMERO.tables")
query = "select obj0 from %s obj0\n" % objtype[0]
for i, t in enumerate(objtype[1:]):
query += "join fetch obj%d.%s obj%d\n" % (i, t, i + 1)
query += """
left outer join fetch obj0.annotationLinks links
left outer join fetch links.child as f
left outer join fetch links.parent
left outer join fetch f.file
join fetch links.details.owner
join fetch links.details.creationEvent
where obj%d.id=:id and
(f.ns=:ns or f.file.mimetype=:mt)""" % (
len(objtype) - 1
)
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
try:
objs = q.findAllByQuery(query, params, ctx)
except omero.QueryException:
return dict(error="%s cannot be queried" % objtype, query=query)
data = []
# Process all annotations from all objects...
links = [link for obj in objs for link in obj.copyAnnotationLinks()]
for link in links:
annotation = link.child
if not isinstance(annotation, omero.model.FileAnnotation):
continue
owner = annotation.details.owner
ownerName = "%s %s" % (unwrap(owner.firstName), unwrap(owner.lastName))
addedBy = link.details.owner
addedByName = "%s %s" % (unwrap(addedBy.firstName), unwrap(addedBy.lastName))
data.append(
dict(
id=annotation.id.val,
file=annotation.file.id.val,
parentType=objtype[0],
parentId=link.parent.id.val,
owner=ownerName,
addedBy=addedByName,
addedOn=unwrap(link.details.creationEvent._time),
)
)
return dict(data=data)
annotations = login_required()(jsonp(_bulk_file_annotations))
def _table_query(request, fileid, conn=None, query=None, lazy=False, **kwargs):
"""
Query a table specified by fileid
Returns a dictionary with query result if successful, error information
otherwise
@param request: http request; querystring must contain key 'query'
with query to be executed, or '*' to retrieve all rows.
If query is in the format word-number, e.g. "Well-7",
if will be run as (word==number), e.g. "(Well==7)".
This is supported to allow more readable query strings.
@param fileid: Numeric identifier of file containing the table
@param query: The table query. If None, use request.GET.get('query')
E.g. '*' to return all rows.
If in the form 'colname-1', query will be (colname==1)
@param lazy: If True, instead of returning a 'rows' list,
'lazy_rows' will be a generator.
Each gen.next() will return a list of row data
AND 'table' returned MUST be closed.
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: offset, limit
@return: A dictionary with key 'error' with an error message
or with key 'data' containing a dictionary with keys
'columns' (an array of column names) and 'rows'
(an array of rows, each an array of values)
"""
if query is None:
query = request.GET.get("query")
if not query:
return dict(error="Must specify query parameter, use * to retrieve all")
col_names = request.GET.getlist("col_names")
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
r = conn.getSharedResources()
t = r.openTable(omero.model.OriginalFileI(fileid), ctx)
if not t:
return dict(error="Table %s not found" % fileid)
try:
cols = t.getHeaders()
col_indices = range(len(cols))
if col_names:
enumerated_columns = (
[(i, j) for (i, j) in enumerate(cols) if j.name in col_names]
if col_names
else [(i, j) for (i, j) in enumerate(cols)]
)
cols = []
col_indices = []
for col_name in col_names:
for (i, j) in enumerated_columns:
if col_name == j.name:
col_indices.append(i)
cols.append(j)
break
rows = t.getNumberOfRows()
offset = kwargs.get("offset", 0)
limit = kwargs.get("limit", None)
if not offset:
offset = int(request.GET.get("offset", 0))
if not limit:
limit = (
int(request.GET.get("limit"))
if request.GET.get("limit") is not None
else rows
)
range_start = offset
range_size = limit
range_end = min(rows, range_start + range_size)
if query == "*":
hits = range(range_start, range_end)
totalCount = rows
else:
match = re.match(r"^(\w+)-(\d+)", query)
if match:
query = "(%s==%s)" % (match.group(1), match.group(2))
try:
logger.info(query)
hits = t.getWhereList(query, None, 0, rows, 1)
totalCount = len(hits)
# paginate the hits
hits = hits[range_start:range_end]
except Exception:
return dict(error="Error executing query: %s" % query)
def row_generator(table, h):
# hits are all consecutive rows - can load them in batches
idx = 0
batch = 1000
while idx < len(h):
batch = min(batch, len(h) - idx)
res = table.slice(col_indices, h[idx : idx + batch])
idx += batch
# yield a list of rows
yield [
[col.values[row] for col in res.columns]
for row in range(0, len(res.rowNumbers))
]
row_gen = row_generator(t, hits)
rsp_data = {
"data": {
"column_types": [col.__class__.__name__ for col in cols],
"columns": [col.name for col in cols],
},
"meta": {
"rowCount": rows,
"totalCount": totalCount,
"limit": limit,
"offset": offset,
},
}
if not lazy:
row_data = []
# Use the generator to add all rows in batches
for rows in list(row_gen):
row_data.extend(rows)
rsp_data["data"]["rows"] = row_data
else:
rsp_data["data"]["lazy_rows"] = row_gen
rsp_data["table"] = t
return rsp_data
finally:
if not lazy:
t.close()
table_query = login_required()(jsonp(_table_query))
def _table_metadata(request, fileid, conn=None, query=None, lazy=False, **kwargs):
ctx = conn.createServiceOptsDict()
ctx.setOmeroGroup("-1")
r = conn.getSharedResources()
t = r.openTable(omero.model.OriginalFileI(fileid), ctx)
if not t:
return dict(error="Table %s not found" % fileid)
try:
cols = t.getHeaders()
rows = t.getNumberOfRows()
rsp_data = {
"columns": [
{
"name": col.name,
"description": col.description,
"type": col.__class__.__name__,
}
for col in cols
],
"totalCount": rows,
}
return rsp_data
finally:
if not lazy:
t.close()
table_metadata = login_required()(jsonp(_table_metadata))
@login_required()
@jsonp
def object_table_query(request, objtype, objid, conn=None, **kwargs):
"""
Query bulk annotations table attached to an object specified by
object type and identifier, optionally traversing object model graph.
Returns a dictionary with query result if successful, error information
otherwise
Example: /table/Plate/1/query/?query=*
queries bulk annotations table for plate with identifier 1
Example: /table/Plate.wells/1/query/?query=*
queries bulk annotations table for plate that contains well with
identifier 1
Example: /table/Screen.plateLinks.child.wells/22/query/?query=Well-22
queries bulk annotations table for screen that contains plate
with well with identifier 22
@param request: http request.
@param objtype: Type of target object, or type of target object
followed by a slash-separated list of properties to
resolve
@param objid: Identifier of target object, or identifier of object
reached by resolving given properties
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'error' with an error message
or with key 'data' containing a dictionary with keys
'columns' (an array of column names) and 'rows'
(an array of rows, each an array of values)
"""
a = _bulk_file_annotations(request, objtype, objid, conn, **kwargs)
if "error" in a:
return a
if len(a["data"]) < 1:
return dict(error="Could not retrieve bulk annotations table")
# multiple bulk annotations files could be attached, use the most recent
# one (= the one with the highest identifier)
fileId = 0
ann = None
annList = sorted(a["data"], key=lambda x: x["file"], reverse=True)
tableData = None
for annotation in annList:
tableData = _table_query(request, annotation["file"], conn, **kwargs)
if "error" not in tableData:
ann = annotation
fileId = annotation["file"]
break
if ann is None:
return dict(
error=tableData.get(
"error", "Could not retrieve matching bulk annotation table"
)
)
tableData["id"] = fileId
tableData["annId"] = ann["id"]
tableData["owner"] = ann["owner"]
tableData["addedBy"] = ann["addedBy"]
tableData["parentType"] = ann["parentType"]
tableData["parentId"] = ann["parentId"]
tableData["addedOn"] = ann["addedOn"]
return tableData
class LoginView(View):
"""Webgateway Login - Subclassed by WebclientLoginView."""
form_class = LoginForm
useragent = "OMERO.webapi"
@method_decorator(sensitive_post_parameters("password", "csrfmiddlewaretoken"))
def dispatch(self, *args, **kwargs):
"""Wrap other methods to add decorators."""
return super(LoginView, self).dispatch(*args, **kwargs)
def get(self, request, api_version=None):
"""Simply return a message to say GET not supported."""
return JsonResponse(
{"message": ("POST only with username, password, " "server and csrftoken")},
status=405,
)
def handle_logged_in(self, request, conn, connector):
"""Return a response for successful login."""
c = conn.getEventContext()
ctx = {}
for a in [
"sessionId",
"sessionUuid",
"userId",
"userName",
"groupId",
"groupName",
"isAdmin",
"eventId",
"eventType",
"memberOfGroups",
"leaderOfGroups",
]:
if hasattr(c, a):
ctx[a] = getattr(c, a)
return JsonResponse({"success": True, "eventContext": ctx})
def handle_not_logged_in(self, request, error=None, form=None):
"""
Return a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if error is None and form is not None:
# If no error from server, maybe form wasn't valid
formErrors = []
for field in form:
for e in field.errors:
formErrors.append("%s: %s" % (field.label, e))
error = " ".join(formErrors)
elif error is None:
# Just in case no error or invalid form is given
error = "Login failed. Reason unknown."
return JsonResponse({"message": error}, status=403)
def post(self, request, api_version=None):
"""
Here we handle the main login logic, creating a connection to OMERO.
and store that on the request.session OR handling login failures
"""
error = None
form = self.form_class(request.POST.copy())
if form.is_valid():
username = form.cleaned_data["username"]
password = form.cleaned_data["password"]
server_id = form.cleaned_data["server"]
is_secure = settings.SECURE
connector = Connector(server_id, is_secure)
# TODO: version check should be done on the low level, see #5983
compatible = True
if settings.CHECK_VERSION:
compatible = connector.check_version(self.useragent)
if (
server_id is not None
and username is not None
and password is not None
and compatible
):
conn = connector.create_connection(
self.useragent, username, password, userip=get_client_ip(request)
)
if conn is not None:
try:
request.session["connector"] = connector
# UpgradeCheck URL should be loaded from the server or
# loaded omero.web.upgrades.url allows to customize web
# only
try:
upgrades_url = settings.UPGRADES_URL
except Exception:
upgrades_url = conn.getUpgradesUrl()
upgradeCheck(url=upgrades_url)
return self.handle_logged_in(request, conn, connector)
finally:
conn.close(hard=False)
# Once here, we are not logged in...
# Need correct error message
if not connector.is_server_up(self.useragent):
error = "Server is not responding," " please contact administrator."
elif not settings.CHECK_VERSION:
error = (
"Connection not available, please check your"
" credentials and version compatibility."
)
else:
if not compatible:
error = (
"Client version does not match server,"
" please contact administrator."
)
else:
error = settings.LOGIN_INCORRECT_CREDENTIALS_TEXT
return self.handle_not_logged_in(request, error, form)
@login_required()
@jsonp
def get_image_rdefs_json(request, img_id=None, conn=None, **kwargs):
"""
Retrieves all rendering definitions for a given image (id).
Example: /get_image_rdefs_json/1
Returns all rdefs for image with id 1
@param request: http request.
@param img_id: the id of the image in question
@param conn: L{omero.gateway.BlitzGateway}
@param **kwargs: unused
@return: A dictionary with key 'rdefs' in the success case,
one with key 'error' if something went wrong
"""
try:
img = conn.getObject("Image", img_id)
if img is None:
return {"error": "No image with id " + str(img_id)}
return {"rdefs": img.getAllRenderingDefs()}
except Exception:
logger.debug(traceback.format_exc())
return {"error": "Failed to retrieve rdefs"}
| open_redirect | {
"code": [
" else None",
" range_size = kwargs.get(\"limit\", rows)"
],
"line_no": [
2963,
2966
]
} | {
"code": [
" else rows",
" range_size = limit"
],
"line_no": [
2963,
2966
]
} |
import .re
import json
import base64
import .warnings
from functools import .wraps
import .omero
import .omero.clients
from past.builtins import unicode
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
JsonResponse,
HttpResponseForbidden,
)
from django.http import (
HttpResponseRedirect,
HttpResponseNotAllowed,
Http404,
StreamingHttpResponse,
HttpResponseNotFound,
)
from django.views.decorators.http import .require_POST
from django.views.decorators.debug import .sensitive_post_parameters
from django.utils.decorators import .method_decorator
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.conf import .settings
from wsgiref.util import FileWrapper
from omero.rtypes import .rlong, unwrap
from omero.constants.namespaces import NSBULKANNOTATIONS
from .util import .points_string_to_XY_list, xy_list_to_bbox
from .plategrid import PlateGrid
from omeroweb.version import .omeroweb_buildyear as build_year
from .marshal import .imageMarshal, shapeMarshal, rgb_int2rgba
from django.contrib.staticfiles.templatetags.staticfiles import .static
from django.views.generic import View
from django.shortcuts import .render
from omeroweb.webadmin.forms import LoginForm
from omeroweb.decorators import .get_client_ip, is_public_user
from omeroweb.webadmin.webadmin_utils import upgradeCheck
try:
from hashlib import .md5
except Exception:
from md5 import .md5
try:
import .long
except ImportError:
VAR_178 = int
from io import BytesIO
import .tempfile
from omero import ApiUsageException
from omero.util.decorators import .timeit, TimeIt
from omeroweb.httprsp import HttpJavascriptResponse, HttpJavascriptResponseServerError
from omeroweb.connector import Server
import glob
from omeroweb.webgateway.webgateway_cache import (
webgateway_cache,
CacheBase,
webgateway_tempfile,
)
import logging
import .os
import .traceback
import .time
import .zipfile
import .shutil
from omeroweb.decorators import login_required, ConnCleaningHttpResponse
from omeroweb.connector import Connector
from omeroweb.webgateway.util import .zip_archived_files, LUTS_IN_PNG
from omeroweb.webgateway.util import .get_longs, getIntOrDefault
VAR_0 = CacheBase()
VAR_1 = logging.getLogger(__name__)
try:
from PIL import Image
from PIL import ImageDraw
except Exception: # pragma: nocover
try:
import Image
import ImageDraw
except Exception:
VAR_1.error("No Pillow installed")
try:
import numpy
VAR_48 = True
except ImportError:
VAR_1.error("No numpy installed")
VAR_48 = False
def VAR_319(VAR_2):
return HttpResponse("Welcome to webgateway")
def FUNC_1(VAR_3):
return unicode(VAR_3).encode("utf-8")
class CLASS_0(object):
def __init__(self, VAR_49):
self._blitzcon = VAR_49
self.loggedIn = False
def FUNC_57(self):
self.loggedIn = True
def FUNC_58(self):
return self._blitzcon.isAdmin()
def FUNC_59(self):
return self._blitzcon.canBeAdmin()
def FUNC_60(self):
return self._blitzcon.getUserId()
def FUNC_61(self):
return self._blitzcon.getUser().omeName
def FUNC_62(self):
return self._blitzcon.getUser().firstName or self.getName()
def FUNC_2(VAR_4):
VAR_50 = []
VAR_51 = []
VAR_52 = []
for VAR_179 in VAR_4.split(","):
VAR_179 = chan.split("|", 1)
VAR_23 = VAR_179[0].strip()
VAR_68 = None
if VAR_23.find("$") >= 0:
VAR_23, VAR_68 = VAR_23.split("$")
try:
VAR_50.append(int(VAR_23))
VAR_262 = (None, None)
if len(VAR_179) > 1:
VAR_23 = VAR_179[1].strip()
if VAR_23.find("$") >= 0:
VAR_23, VAR_68 = VAR_23.split("$", 1)
VAR_23 = VAR_23.split(":")
if len(VAR_23) == 2:
try:
VAR_262 = [float(VAR_30) for VAR_30 in VAR_23]
except ValueError:
pass
VAR_51.append(VAR_262)
VAR_52.append(VAR_68)
except ValueError:
pass
VAR_1.debug(str(VAR_50) + "," + str(VAR_51) + "," + str(VAR_52))
return VAR_50, VAR_51, VAR_52
def FUNC_3(VAR_2, VAR_5=False):
VAR_53 = VAR_2.GET
VAR_54 = {}
for VAR_263 in ("z", "t", "q", "m", "zm", "x", "y", "p"):
if VAR_263 in VAR_53:
VAR_54[VAR_263] = VAR_53[VAR_263]
if "c" in VAR_53:
VAR_54["c"] = []
VAR_180 = FUNC_2(VAR_53["c"])
VAR_1.debug(VAR_180)
for VAR_212 in range(len(VAR_180[0])):
VAR_54["c"].append(
{
"a": abs(VAR_180[0][VAR_212]),
"i": VAR_180[0][VAR_212],
"s": VAR_180[1][VAR_212][0],
"e": VAR_180[1][VAR_212][1],
"c": VAR_180[2][VAR_212],
}
)
if VAR_5:
return "&".join(["%VAR_3=%s" % (VAR_30[0], VAR_30[1]) for VAR_30 in VAR_54.items()])
return VAR_54
@login_required()
def FUNC_4(VAR_2, VAR_6, VAR_7=None, VAR_8=None, **VAR_9):
return FUNC_6(VAR_2, VAR_6, VAR_10=VAR_7, **VAR_9)
def FUNC_5(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_55 = VAR_2.session.get("server_settings", {}).get("browser", {})
VAR_56 = VAR_55.get("thumb_default_size", 96)
VAR_57 = True
if VAR_10 is None:
VAR_7 = (VAR_56,)
else:
if VAR_11 is None:
VAR_7 = (int(VAR_10),)
else:
VAR_7 = (int(VAR_10), int(VAR_11))
if VAR_7 == (VAR_56,):
VAR_57 = False
VAR_58 = VAR_8.getUserId()
VAR_22 = getIntOrDefault(VAR_2, "z", None)
VAR_23 = getIntOrDefault(VAR_2, "t", None)
VAR_59 = getIntOrDefault(VAR_2, "rdefId", None)
VAR_60 = webgateway_cache.getThumb(VAR_2, VAR_19, VAR_58, VAR_6, VAR_7)
if VAR_60 is None:
VAR_181 = False
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is None:
VAR_1.debug("(b)Image %VAR_3 not found..." % (str(VAR_6)))
if VAR_12:
VAR_60 = VAR_12(VAR_7=size)
VAR_181 = True
else:
raise Http404("Failed to render thumbnail")
else:
VAR_60 = VAR_92.getThumbnail(
VAR_7=size, VAR_57=direct, VAR_59=rdefId, VAR_22=z, VAR_23=t
)
if VAR_60 is None:
VAR_1.debug("(VAR_203)Image %VAR_3 not found..." % (str(VAR_6)))
if VAR_12:
VAR_60 = VAR_12(VAR_7=size)
VAR_181 = True
else:
raise Http404("Failed to render thumbnail")
else:
VAR_181 = VAR_92._thumbInProgress
if not VAR_181:
webgateway_cache.setThumb(VAR_2, VAR_19, VAR_58, VAR_6, VAR_60, VAR_7)
else:
pass
return VAR_60
@login_required()
def FUNC_6(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_60 = FUNC_5(
VAR_2=request, VAR_6=iid, VAR_10=w, VAR_11=h, VAR_8=conn, VAR_12=_defcb, **VAR_9
)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
@login_required()
def FUNC_7(VAR_2, VAR_13, VAR_10=None, VAR_11=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_62 = VAR_8.getRoiService().findByRoi(VAR_178(VAR_13), None, VAR_8.SERVICE_OPTS)
if VAR_62 is None or VAR_62.rois is None or len(VAR_62.rois) == 0:
raise Http404
for VAR_241 in VAR_62.rois:
VAR_36 = VAR_241.image.id.val
VAR_63 = VAR_241.copyShapes()
VAR_63 = [VAR_3 for VAR_3 in VAR_63 if VAR_3 is not None]
if len(VAR_63) == 0:
raise Http404("No Shapes found for ROI %s" % VAR_13)
VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_15, VAR_16 = VAR_64
VAR_65 = None
if len(VAR_63) == 1:
VAR_65 = VAR_63[0]
else:
VAR_182 = VAR_15.getDefaultT()
VAR_183 = VAR_15.getDefaultZ()
VAR_184 = [
VAR_3
for VAR_3 in VAR_63
if unwrap(VAR_3.getTheT()) is None or unwrap(VAR_3.getTheT()) == VAR_182
]
if len(VAR_184) == 1:
VAR_65 = VAR_184[0]
else:
VAR_184 = [
VAR_3
for VAR_3 in VAR_184
if unwrap(VAR_3.getTheZ()) is None or unwrap(VAR_3.getTheZ()) == VAR_183
]
if len(VAR_184) > 0:
VAR_65 = VAR_184[0]
if VAR_65 is None and len(VAR_63) > 0:
VAR_65 = VAR_63[0]
return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)
@login_required()
def FUNC_8(VAR_2, VAR_14, VAR_10=None, VAR_11=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_66 = omero.sys.Parameters()
VAR_66.map = {"id": rlong(VAR_14)}
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_3 from Shape VAR_3 join fetch VAR_3.roi where VAR_3.id = :id",
VAR_66,
VAR_8.SERVICE_OPTS,
)
if VAR_65 is None:
raise Http404
VAR_36 = VAR_65.roi.image.id.val
VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_15, VAR_16 = VAR_64
return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)
def FUNC_9(VAR_2, VAR_8, VAR_15, VAR_3, VAR_16):
VAR_67 = 250
VAR_68 = VAR_2.GET.get("color", "fff")
VAR_69 = {
"f00": (255, 0, 0),
"0f0": (0, 255, 0),
"00f": (0, 0, 255),
"ff0": (255, 255, 0),
"fff": (255, 255, 255),
"000": (0, 0, 0),
}
VAR_70 = VAR_69["f00"]
if VAR_68 in VAR_69:
VAR_70 = VAR_69[VAR_68]
VAR_71 = (221, 221, 221)
VAR_72 = None # bounding box: (VAR_30, VAR_29, VAR_10, VAR_11)
VAR_65 = {}
VAR_73 = unwrap(VAR_3.getTheT())
VAR_73 = VAR_73 if VAR_73 is not None else VAR_15.getDefaultT()
VAR_74 = unwrap(VAR_3.getTheZ())
VAR_74 = VAR_74 if VAR_74 is not None else VAR_15.getDefaultZ()
if type(VAR_3) == omero.model.RectangleI:
VAR_65["type"] = "Rectangle"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_65["width"] = VAR_3.getWidth().getValue()
VAR_65["height"] = VAR_3.getHeight().getValue()
VAR_72 = (VAR_65["x"], VAR_65["y"], VAR_65["width"], VAR_65["height"])
elif type(VAR_3) == omero.model.MaskI:
VAR_65["type"] = "Mask"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_65["width"] = VAR_3.getWidth().getValue()
VAR_65["height"] = VAR_3.getHeight().getValue()
VAR_72 = (VAR_65["x"], VAR_65["y"], VAR_65["width"], VAR_65["height"])
elif type(VAR_3) == omero.model.EllipseI:
VAR_65["type"] = "Ellipse"
VAR_65["x"] = int(VAR_3.getX().getValue())
VAR_65["y"] = int(VAR_3.getY().getValue())
VAR_65["radiusX"] = int(VAR_3.getRadiusX().getValue())
VAR_65["radiusY"] = int(VAR_3.getRadiusY().getValue())
VAR_72 = (
VAR_65["x"] - VAR_65["radiusX"],
VAR_65["y"] - VAR_65["radiusY"],
2 * VAR_65["radiusX"],
2 * VAR_65["radiusY"],
)
elif type(VAR_3) == omero.model.PolylineI:
VAR_65["type"] = "PolyLine"
VAR_65["xyList"] = points_string_to_XY_list(VAR_3.getPoints().getValue())
VAR_72 = xy_list_to_bbox(VAR_65["xyList"])
elif type(VAR_3) == omero.model.LineI:
VAR_65["type"] = "Line"
VAR_65["x1"] = int(VAR_3.getX1().getValue())
VAR_65["x2"] = int(VAR_3.getX2().getValue())
VAR_65["y1"] = int(VAR_3.getY1().getValue())
VAR_65["y2"] = int(VAR_3.getY2().getValue())
VAR_30 = min(VAR_65["x1"], VAR_65["x2"])
VAR_29 = min(VAR_65["y1"], VAR_65["y2"])
VAR_72 = (
VAR_30,
VAR_29,
max(VAR_65["x1"], VAR_65["x2"]) - VAR_30,
max(VAR_65["y1"], VAR_65["y2"]) - VAR_29,
)
elif type(VAR_3) == omero.model.PointI:
VAR_65["type"] = "Point"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_72 = (VAR_65["x"] - 50, VAR_65["y"] - 50, 100, 100)
elif type(VAR_3) == omero.model.PolygonI:
VAR_65["type"] = "Polygon"
VAR_65["xyList"] = points_string_to_XY_list(VAR_3.getPoints().getValue())
VAR_72 = xy_list_to_bbox(VAR_65["xyList"])
elif type(VAR_3) == omero.model.LabelI:
VAR_65["type"] = "Label"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_72 = (VAR_65["x"] - 50, VAR_65["y"] - 50, 100, 100)
else:
VAR_1.debug("Shape type not supported: %s" % str(type(VAR_3)))
VAR_30, VAR_29, VAR_10, VAR_11 = VAR_72
VAR_75 = max(VAR_10, VAR_11 * 3 // 2)
VAR_76 = VAR_75 * 2 // 3
VAR_77 = int(VAR_75 * 1.5)
VAR_78 = int(VAR_76 * 1.5)
if VAR_77 < VAR_67:
VAR_77 = VAR_67
VAR_78 = VAR_77 * 2 // 3
def FUNC_63(VAR_79):
try:
return VAR_8.getConfigService().getConfigValue(VAR_79)
except Exception:
VAR_1.warn(
"webgateway: FUNC_9() could not get"
" Config-Value for %s" % VAR_79
)
pass
VAR_80 = FUNC_63("omero.pixeldata.max_plane_width")
VAR_81 = FUNC_63("omero.pixeldata.max_plane_height")
if (
VAR_80 is None
or VAR_81 is None
or (VAR_77 > int(VAR_80))
or (VAR_78 > int(VAR_81))
):
VAR_185 = Image.new("RGB", (VAR_67, MAX_WIDTH * 2 // 3), VAR_71)
VAR_97 = ImageDraw.Draw(VAR_185)
VAR_97.text((10, 30), "Shape too large to \ngenerate thumbnail", VAR_101=(255, 0, 0))
VAR_54 = BytesIO()
VAR_185.save(VAR_54, "jpeg", quality=90)
return HttpResponse(VAR_54.getvalue(), content_type="image/jpeg")
VAR_82 = (VAR_77 - VAR_10) // 2
VAR_83 = (VAR_78 - VAR_11) // 2
VAR_84 = int(VAR_30 - VAR_82)
VAR_85 = int(VAR_29 - VAR_83)
VAR_86 = VAR_15.getSizeX()
VAR_87 = VAR_15.getSizeY()
VAR_88, VAR_89, VAR_90, VAR_91 = 0, 0, 0, 0
if VAR_84 < 0:
VAR_77 = VAR_77 + VAR_84
VAR_88 = abs(VAR_84)
newX = 0
if VAR_85 < 0:
VAR_78 = VAR_78 + VAR_85
VAR_90 = abs(VAR_85)
newY = 0
if VAR_77 + VAR_84 > VAR_86:
VAR_89 = (VAR_77 + VAR_84) - VAR_86
VAR_77 = newW - VAR_89
if VAR_78 + VAR_85 > VAR_87:
VAR_91 = (VAR_78 + VAR_85) - VAR_87
VAR_78 = newH - VAR_91
VAR_60 = VAR_15.renderJpegRegion(
VAR_74, VAR_73, VAR_84, VAR_85, VAR_77, VAR_78, VAR_113=None, VAR_98=VAR_16
)
VAR_92 = Image.open(BytesIO(VAR_60))
if VAR_88 != 0 or VAR_89 != 0 or VAR_90 != 0 or VAR_91 != 0:
VAR_186, VAR_187 = VAR_92.size
VAR_188 = VAR_186 + VAR_89 + VAR_88
VAR_189 = VAR_187 + VAR_91 + VAR_90
VAR_190 = Image.new("RGB", (VAR_188, VAR_189), VAR_71)
VAR_190.paste(VAR_92, (VAR_88, VAR_90))
VAR_92 = VAR_190
VAR_93, VAR_94 = VAR_92.size
VAR_95 = float(VAR_67) / VAR_93
VAR_96 = int(VAR_94 * VAR_95)
VAR_92 = VAR_92.resize((VAR_67, VAR_96))
VAR_97 = ImageDraw.Draw(VAR_92)
if VAR_65["type"] == "Rectangle":
VAR_191 = int(VAR_82 * VAR_95)
VAR_192 = int(VAR_83 * VAR_95)
VAR_193 = int((VAR_10 + VAR_82) * VAR_95)
VAR_194 = int((VAR_11 + VAR_83) * VAR_95)
VAR_97.rectangle((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.rectangle((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif VAR_65["type"] == "Line":
VAR_264 = (VAR_65["x1"] - VAR_84 + VAR_88) * VAR_95
VAR_265 = (VAR_65["x2"] - VAR_84 + VAR_88) * VAR_95
VAR_266 = (VAR_65["y1"] - VAR_85 + VAR_90) * VAR_95
VAR_267 = (VAR_65["y2"] - VAR_85 + VAR_90) * VAR_95
VAR_97.line((VAR_264, VAR_266, VAR_265, VAR_267), VAR_101=VAR_70, VAR_99=2)
elif VAR_65["type"] == "Ellipse":
VAR_191 = int(VAR_82 * VAR_95)
VAR_192 = int(VAR_83 * VAR_95)
VAR_193 = int((VAR_10 + VAR_82) * VAR_95)
VAR_194 = int((VAR_11 + VAR_83) * VAR_95)
VAR_97.ellipse((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.ellipse((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif VAR_65["type"] == "Point":
VAR_323 = 2
VAR_191 = (VAR_67 // 2) - VAR_323
VAR_192 = int(VAR_96 // 2) - VAR_323
VAR_193 = VAR_191 + (VAR_323 * 2)
VAR_194 = VAR_192 + (VAR_323 * 2)
VAR_97.ellipse((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.ellipse((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif "xyList" in VAR_65:
def FUNC_79(VAR_325):
VAR_30, VAR_29 = VAR_325
return (
int((VAR_30 - VAR_84 + VAR_88) * VAR_95),
int((VAR_29 - VAR_85 + VAR_90) * VAR_95),
)
VAR_326 = [FUNC_79(VAR_325) for VAR_325 in VAR_65["xyList"]]
VAR_327 = VAR_328 = None
for line in range(1, len(VAR_326)):
VAR_333, VAR_334 = VAR_326[line - 1]
VAR_327, VAR_328 = VAR_326[line]
VAR_97.line((VAR_333, VAR_334, VAR_327, VAR_328), VAR_101=VAR_70, VAR_99=2)
VAR_329, VAR_330 = VAR_326[0]
if VAR_65["type"] != "PolyLine":
if VAR_327 is None:
VAR_327 = VAR_329 + 1 # This will create VAR_167 visible dot
if VAR_328 is None:
VAR_328 = VAR_330 + 1
VAR_97.line((VAR_327, VAR_328, VAR_329, VAR_330), VAR_101=VAR_70, VAR_99=2)
VAR_54 = BytesIO()
VAR_98 = 0.9
try:
VAR_92.save(VAR_54, "jpeg", quality=int(VAR_98 * 100))
VAR_195 = VAR_54.getvalue()
finally:
VAR_54.close()
return HttpResponse(VAR_195, content_type="image/jpeg")
@login_required()
def FUNC_10(VAR_2, VAR_14, VAR_8=None, **VAR_9):
if not VAR_48:
raise NotImplementedError("numpy not installed")
VAR_66 = omero.sys.Parameters()
VAR_66.map = {"id": rlong(VAR_14)}
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_3 from Shape VAR_3 where VAR_3.id = :id", VAR_66, VAR_8.SERVICE_OPTS
)
if VAR_65 is None:
raise Http404("Shape ID: %VAR_3 not found" % VAR_14)
VAR_99 = int(VAR_65.getWidth().getValue())
VAR_100 = int(VAR_65.getHeight().getValue())
VAR_68 = unwrap(VAR_65.getFillColor())
VAR_101 = (255, 255, 0, 255)
if VAR_68 is not None:
VAR_68 = rgb_int2rgba(VAR_68)
VAR_101 = (VAR_68[0], VAR_68[1], VAR_68[2], int(VAR_68[3] * 255))
VAR_102 = VAR_65.getBytes()
VAR_103 = numpy.fromstring(VAR_102, dtype=numpy.uint8)
VAR_104 = numpy.unpackbits(VAR_103)
VAR_92 = Image.new("RGBA", VAR_7=(VAR_99, VAR_100), VAR_68=(0, 0, 0, 0))
VAR_30 = 0
VAR_29 = 0
for pix in VAR_104:
if pix == 1:
VAR_92.putpixel((VAR_30, VAR_29), VAR_101)
VAR_30 += 1
if VAR_30 > VAR_99 - 1:
VAR_30 = 0
VAR_29 += 1
VAR_54 = BytesIO()
VAR_92.save(VAR_54, "png", quality=int(100))
VAR_105 = VAR_54.getvalue()
return HttpResponse(VAR_105, content_type="image/png")
def FUNC_11(VAR_2):
VAR_53 = VAR_2.GET
VAR_54 = VAR_53.get("m", "_") + VAR_53.get("p", "_") + VAR_53.get("c", "_") + VAR_53.get("q", "_")
return VAR_54
def FUNC_12(VAR_2, VAR_17, VAR_18=0):
VAR_106 = None
if "maps" in VAR_2:
VAR_196 = VAR_2["maps"]
VAR_106 = []
try:
if isinstance(VAR_196, (unicode, str)):
VAR_196 = json.loads(VAR_196)
VAR_18 = max(len(VAR_196), VAR_18)
for VAR_203 in range(VAR_18):
VAR_308 = None
if len(VAR_196) > VAR_203:
VAR_282 = VAR_196[VAR_203].get(VAR_17)
if VAR_282 is not None:
VAR_308 = VAR_282.get("enabled") in (True, "true")
VAR_106.append(VAR_308)
except Exception:
VAR_1.debug("Invalid json for VAR_43 ?VAR_223=%s" % VAR_196)
VAR_106 = None
return VAR_106
def FUNC_13(
VAR_2, VAR_6, VAR_19=None, VAR_8=None, VAR_20=False, VAR_21=True
):
VAR_53 = VAR_2.GET
VAR_1.debug(
"Preparing Image:%VAR_53 VAR_20=%VAR_53 "
"retry=%VAR_53 VAR_2=%VAR_53 VAR_8=%s" % (VAR_6, VAR_20, VAR_21, VAR_53, str(VAR_8))
)
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is None:
return
VAR_107 = None
if "maps" in VAR_53:
VAR_197 = FUNC_12(VAR_53, "reverse", VAR_92.getSizeC())
VAR_107 = FUNC_12(VAR_53, "inverted", VAR_92.getSizeC())
if VAR_197 is not None and VAR_107 is not None:
VAR_107 = [
VAR_22[0] if VAR_22[0] is not None else VAR_22[1] for VAR_22 in zip(VAR_107, VAR_197)
]
try:
VAR_268 = [VAR_282.get("quantization") for VAR_282 in json.loads(VAR_53["maps"])]
VAR_92.setQuantizationMaps(VAR_268)
except Exception:
VAR_1.debug("Failed to set quantization maps")
if "c" in VAR_53:
VAR_1.debug("c=" + VAR_53["c"])
VAR_198, VAR_51, VAR_52 = FUNC_2(VAR_53["c"])
VAR_199 = range(1, VAR_92.getSizeC() + 1)
if VAR_20 and not VAR_92.setActiveChannels(
VAR_199, VAR_51, VAR_52, VAR_107
):
VAR_1.debug("Something bad happened while setting the active VAR_50...")
if not VAR_92.setActiveChannels(VAR_198, VAR_51, VAR_52, VAR_107):
VAR_1.debug("Something bad happened while setting the active VAR_50...")
if VAR_53.get("m", None) == "g":
VAR_92.setGreyscaleRenderingModel()
elif VAR_53.get("m", None) == "c":
VAR_92.setColorRenderingModel()
VAR_108 = VAR_53.get("p", None)
VAR_109, VAR_110 = None, None
if VAR_108 is not None and len(VAR_108.split("|")) > 1:
VAR_108, VAR_200 = VAR_108.split("|", 1)
try:
VAR_109, VAR_110 = [int(VAR_3) for VAR_3 in VAR_200.split(":")]
except ValueError:
pass
VAR_92.setProjection(VAR_108)
VAR_92.setProjectionRange(VAR_109, VAR_110)
VAR_92.setInvertedAxis(bool(VAR_53.get("ia", "0") == "1"))
VAR_16 = VAR_53.get("q", None)
if VAR_20:
"z" in VAR_53 and VAR_92.setDefaultZ(VAR_178(VAR_53["z"]) - 1)
"t" in VAR_53 and VAR_92.setDefaultT(VAR_178(VAR_53["t"]) - 1)
VAR_92.saveDefaults()
return (VAR_92, VAR_16)
@login_required()
def FUNC_14(VAR_2, VAR_6, VAR_22, VAR_23, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_111 = VAR_2.GET.get("tile", None)
VAR_112 = VAR_2.GET.get("region", None)
VAR_113 = None
if VAR_111:
try:
VAR_92._prepareRenderingEngine()
VAR_10, VAR_11 = VAR_92._re.getTileSize()
VAR_269 = VAR_92._re.getResolutionLevels() - 1
VAR_270 = VAR_111.split(",")
if len(VAR_270) > 4:
VAR_309 = [int(VAR_270[3]), int(VAR_270[4])]
VAR_310 = [VAR_10, VAR_11]
VAR_311 = 1024
try:
VAR_311 = int(
VAR_8.getConfigService().getConfigValue(
"omero.pixeldata.max_tile_length"
)
)
except Exception:
pass
for VAR_212, tile_length in enumerate(VAR_309):
if tile_length <= 0:
VAR_309[VAR_212] = VAR_310[VAR_212]
if tile_length > VAR_311:
VAR_309[VAR_212] = VAR_311
VAR_10, VAR_11 = VAR_309
VAR_271 = int(VAR_270[0])
if VAR_271 < 0:
VAR_231 = "Invalid resolution VAR_113 %VAR_3 < 0" % VAR_271
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
if VAR_269 == 0: # non pyramid file
if VAR_271 > 0:
VAR_231 = "Invalid resolution VAR_113 %VAR_3, non pyramid file" % VAR_271
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
else:
VAR_113 = None
else:
VAR_113 = VAR_269 - VAR_271
if VAR_113 < 0:
VAR_231 = (
"Invalid resolution VAR_113, \
%VAR_3 > number of available VAR_269 %VAR_3 "
% (VAR_271, VAR_269)
)
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
VAR_30 = int(VAR_270[1]) * VAR_10
VAR_29 = int(VAR_270[2]) * VAR_11
except Exception:
VAR_231 = "malformed VAR_111 argument, VAR_111=%s" % VAR_111
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
elif VAR_112:
try:
VAR_312 = VAR_112.split(",")
VAR_30 = int(VAR_312[0])
VAR_29 = int(VAR_312[1])
VAR_10 = int(VAR_312[2])
VAR_11 = int(VAR_312[3])
except Exception:
VAR_231 = "malformed VAR_112 argument, VAR_112=%s" % VAR_112
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
else:
return HttpResponseBadRequest("tile or VAR_112 argument required")
VAR_60 = webgateway_cache.getImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderJpegRegion(
VAR_22, VAR_23, VAR_30, VAR_29, VAR_10, VAR_11, VAR_113=level, VAR_98=VAR_16
)
if VAR_60 is None:
raise Http404
webgateway_cache.setImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
@login_required()
def FUNC_15(VAR_2, VAR_6, VAR_22=None, VAR_23=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_60 = webgateway_cache.getImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderJpeg(VAR_22, VAR_23, VAR_98=VAR_16)
if VAR_60 is None:
raise Http404
webgateway_cache.setImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_114 = VAR_2.GET.get("format", "jpeg")
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
if "download" in VAR_9 and VAR_9["download"]:
if VAR_114 == "png":
VAR_212 = Image.open(BytesIO(VAR_60))
VAR_272 = BytesIO()
VAR_212.save(VAR_272, "png")
VAR_60 = VAR_272.getvalue()
VAR_272.close()
VAR_61 = HttpResponse(VAR_60, content_type="image/png")
elif VAR_114 == "tif":
VAR_212 = Image.open(BytesIO(VAR_60))
VAR_272 = BytesIO()
VAR_212.save(VAR_272, "tiff")
VAR_60 = VAR_272.getvalue()
VAR_272.close()
VAR_61 = HttpResponse(VAR_60, content_type="image/tiff")
VAR_201 = VAR_92.getName()
try:
VAR_201 = fileName.decode("utf8")
except AttributeError:
pass # python 3
VAR_201 = fileName.replace(",", ".").replace(" ", "_")
VAR_61["Content-Type"] = "application/force-download"
VAR_61["Content-Length"] = len(VAR_60)
VAR_61["Content-Disposition"] = "attachment; filename=%VAR_3.%s" % (VAR_201, VAR_114)
return VAR_61
@login_required()
def FUNC_16(VAR_2, VAR_24, VAR_25, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_115 = []
if VAR_24 == "p":
VAR_202 = VAR_8.getObject("Project", VAR_25)
if VAR_202 is None:
raise Http404
for VAR_213 in VAR_202.listChildren():
VAR_115.extend(list(VAR_213.listChildren()))
VAR_17 = VAR_202.getName()
elif VAR_24 == "d":
VAR_202 = VAR_8.getObject("Dataset", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.extend(list(VAR_202.listChildren()))
VAR_273 = list(filter(None, VAR_2.GET.get("selection", "").split(",")))
if len(VAR_273) > 0:
VAR_1.debug(VAR_273)
VAR_1.debug(VAR_115)
VAR_115 = [VAR_30 for VAR_30 in VAR_115 if str(VAR_30.getId()) in VAR_273]
VAR_1.debug(VAR_115)
if len(VAR_115) == 0:
raise Http404
VAR_17 = "%VAR_3-%s" % (VAR_202.getParent().getName(), VAR_202.getName())
elif VAR_24 == "w":
VAR_202 = VAR_8.getObject("Well", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.extend([VAR_30.getImage() for VAR_30 in VAR_202.listChildren()])
VAR_124 = VAR_202.getParent()
VAR_313 = "%VAR_3%s" % (
VAR_124.getRowLabels()[VAR_202.row],
VAR_124.getColumnLabels()[VAR_202.column],
)
VAR_17 = "%VAR_3-%VAR_3-%s" % (VAR_124.getParent().getName(), VAR_124.getName(), VAR_313)
else:
VAR_202 = VAR_8.getObject("Image", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.append(VAR_202)
VAR_115 = [VAR_30 for VAR_30 in VAR_115 if not VAR_30.requiresPixelsPyramid()]
if VAR_2.GET.get("dryrun", False):
VAR_54 = json.dumps(len(VAR_115))
VAR_203 = VAR_2.GET.get("callback", None)
if VAR_203 is not None and not VAR_9.get("_internal", False):
VAR_54 = "%VAR_3(%VAR_3)" % (VAR_203, VAR_54)
return HttpJavascriptResponse(VAR_54)
if len(VAR_115) == 0:
raise Http404
if len(VAR_115) == 1:
VAR_202 = VAR_115[0]
VAR_79 = (
"_".join((str(VAR_30.getId()) for VAR_30 in VAR_202.getAncestry()))
+ "_"
+ str(VAR_202.getId())
+ "_ome_tiff"
)
VAR_204 = 255 - len(str(VAR_202.getId())) - 10
VAR_205 = VAR_202.getName()[:VAR_204]
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(
str(VAR_202.getId()) + "-" + VAR_205 + ".ome.tiff", VAR_79=key
)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_115[0])
if VAR_209 is None:
try:
VAR_209 = VAR_115[0].exportOmeTiff()
except Exception:
VAR_1.debug("Failed to export VAR_15 (2)", exc_info=True)
VAR_209 = None
if VAR_209 is None:
webgateway_tempfile.abort(VAR_206)
raise Http404
webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_115[0], VAR_209)
if VAR_208 is None:
VAR_61 = HttpResponse(VAR_209, content_type="image/tiff")
VAR_61["Content-Disposition"] = 'attachment; filename="%VAR_3.ome.tiff"' % (
str(VAR_202.getId()) + "-" + VAR_205
)
VAR_61["Content-Length"] = len(VAR_209)
return VAR_61
else:
VAR_208.write(VAR_209)
VAR_208.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
else:
try:
VAR_274 = "+".join((str(VAR_30.getId()) for VAR_30 in VAR_115)).encode("utf-8")
VAR_79 = (
"_".join((str(VAR_30.getId()) for VAR_30 in VAR_115[0].getAncestry()))
+ "_"
+ md5(VAR_274).hexdigest()
+ "_ome_tiff_zip"
)
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_17 + ".zip", VAR_79=key)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
VAR_1.debug(VAR_206)
if VAR_208 is None:
VAR_208 = BytesIO()
VAR_275 = zipfile.ZipFile(VAR_208, "w", zipfile.ZIP_STORED)
for VAR_202 in VAR_115:
VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_202)
if VAR_209 is None:
VAR_209 = VAR_202.exportOmeTiff()
if VAR_209 is None:
continue
webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_202, VAR_209)
VAR_204 = 255 - len(str(VAR_202.getId())) - 10
VAR_205 = VAR_202.getName()[:VAR_204]
VAR_275.writestr(str(VAR_202.getId()) + "-" + VAR_205 + ".ome.tiff", VAR_209)
VAR_275.close()
if VAR_206 is None:
VAR_314 = VAR_208.getvalue()
VAR_61 = HttpResponse(VAR_314, content_type="application/zip")
VAR_61["Content-Disposition"] = 'attachment; filename="%VAR_3.zip"' % VAR_17
VAR_61["Content-Length"] = len(VAR_314)
return VAR_61
except Exception:
VAR_1.debug(traceback.format_exc())
raise
return HttpResponseRedirect(settings.STATIC_URL + "webgateway/tfiles/" + VAR_207)
@login_required()
def FUNC_17(VAR_2, VAR_6, VAR_26, VAR_27, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
try:
VAR_136 = {}
VAR_136["format"] = "video/" + VAR_2.GET.get("format", "quicktime")
VAR_136["fps"] = int(VAR_2.GET.get("fps", 4))
VAR_136["minsize"] = (512, 512, "Black")
VAR_210 = ".avi"
VAR_79 = "%VAR_3-%VAR_3-%VAR_3-%VAR_213-%VAR_3-%s" % (
VAR_6,
VAR_26,
VAR_27,
VAR_136["fps"],
FUNC_11(VAR_2),
VAR_2.GET.get("format", "quicktime"),
)
VAR_27 = int(VAR_27)
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_92.getName() + VAR_210, VAR_79=key)
VAR_1.debug(VAR_206, VAR_207, VAR_208)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
if "optsCB" in VAR_9:
VAR_136.update(VAR_9["optsCB"](VAR_92))
VAR_136.update(VAR_9.get("opts", {}))
VAR_1.debug(
"rendering VAR_280 for VAR_92 %VAR_3 with VAR_26 %VAR_3, VAR_27 %VAR_212 and VAR_136 %s"
% (VAR_6, VAR_26, VAR_27, VAR_136)
)
if VAR_206 is None:
VAR_276, VAR_277 = tempfile.mkstemp()
else:
VAR_277 = VAR_206 # os.path.join(VAR_206, VAR_92.getName())
if VAR_26.lower() == "z":
VAR_278, VAR_279 = VAR_92.createMovie(
VAR_277, 0, VAR_92.getSizeZ() - 1, VAR_27 - 1, VAR_27 - 1, VAR_136
)
else:
VAR_278, VAR_279 = VAR_92.createMovie(
VAR_277, VAR_27 - 1, VAR_27 - 1, 0, VAR_92.getSizeT() - 1, VAR_136
)
if VAR_278 is None and VAR_279 is None:
raise Http404
if VAR_206 is None:
VAR_280 = open(VAR_277).read()
os.close(VAR_276)
VAR_61 = HttpResponse(VAR_280, content_type=VAR_279)
VAR_61["Content-Disposition"] = 'attachment; filename="%s"' % (
VAR_92.getName() + VAR_210
)
VAR_61["Content-Length"] = len(VAR_280)
return VAR_61
else:
VAR_208.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
except Exception:
VAR_1.debug(traceback.format_exc())
raise
@login_required()
def FUNC_18(VAR_2, VAR_6, VAR_22, VAR_23, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_16 = compress_quality and float(VAR_16) or 0.9
VAR_60 = webgateway_cache.getSplitChannelImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderSplitChannel(VAR_22, VAR_23, VAR_98=VAR_16)
if VAR_60 is None:
raise Http404
webgateway_cache.setSplitChannelImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
def VAR_211(VAR_28):
@wraps(VAR_28)
def FUNC_64(VAR_2, *VAR_116, **VAR_9):
VAR_211 = VAR_2.GET.getlist("debug")
if "slow" in VAR_211:
time.sleep(5)
if "fail" in VAR_211:
raise Http404
if "error" in VAR_211:
raise AttributeError("Debug requested error")
return VAR_28(VAR_2, *VAR_116, **VAR_9)
return FUNC_64
def FUNC_20(VAR_28):
@wraps(VAR_28)
def FUNC_64(VAR_2, *VAR_116, **VAR_9):
VAR_1.debug("jsonp")
try:
VAR_19 = VAR_9.get("server_id", None)
if VAR_19 is None and VAR_2.session.get("connector"):
VAR_19 = VAR_2.session["connector"].server_id
VAR_9["server_id"] = VAR_19
VAR_54 = VAR_28(VAR_2, *VAR_116, **VAR_9)
if VAR_9.get("_raw", False):
return VAR_54
if isinstance(VAR_54, HttpResponse):
return VAR_54
VAR_203 = VAR_2.GET.get("callback", None)
if VAR_203 is not None and not VAR_9.get("_internal", False):
VAR_54 = json.dumps(VAR_54)
VAR_54 = "%VAR_3(%VAR_3)" % (VAR_203, VAR_54)
return HttpJavascriptResponse(VAR_54)
if VAR_9.get("_internal", False):
return VAR_54
VAR_281 = type(VAR_54) is dict
return JsonResponse(VAR_54, VAR_281=safe)
except Exception as ex:
VAR_315 = 500
if isinstance(ex, omero.SecurityViolation):
VAR_315 = 403
elif isinstance(ex, omero.ApiUsageException):
VAR_315 = 400
VAR_316 = traceback.format_exc()
VAR_1.debug(VAR_316)
if VAR_9.get("_raw", False) or VAR_9.get("_internal", False):
raise
return JsonResponse(
{"message": str(ex), "stacktrace": VAR_316}, VAR_315=status
)
return FUNC_64
@VAR_211
@login_required()
def FUNC_21(VAR_2, VAR_6, VAR_22, VAR_23, VAR_29, VAR_8=None, VAR_10=1, **VAR_9):
if not VAR_10:
VAR_10 = 1
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
try:
VAR_117 = VAR_92.renderRowLinePlotGif(int(VAR_22), int(VAR_23), int(VAR_29), int(VAR_10))
except Exception:
VAR_1.debug("a", exc_info=True)
raise
if VAR_117 is None:
raise Http404
VAR_61 = HttpResponse(VAR_117, content_type="image/gif")
return VAR_61
@VAR_211
@login_required()
def FUNC_22(VAR_2, VAR_6, VAR_22, VAR_23, VAR_30, VAR_10=1, VAR_8=None, **VAR_9):
if not VAR_10:
VAR_10 = 1
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_117 = VAR_92.renderColLinePlotGif(int(VAR_22), int(VAR_23), int(VAR_30), int(VAR_10))
if VAR_117 is None:
raise Http404
VAR_61 = HttpResponse(VAR_117, content_type="image/gif")
return VAR_61
@login_required()
@FUNC_20
def FUNC_23(VAR_2, VAR_8=None, VAR_31=False, **VAR_9):
VAR_6 = VAR_9["iid"]
VAR_79 = VAR_9.get("key", None)
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
if is_public_user(VAR_2):
return HttpResponseForbidden()
else:
return HttpResponseNotFound("Image:%VAR_3 not found" % VAR_6)
if VAR_2.GET.get("getDefaults") == "true":
VAR_15.resetDefaults(save=False)
VAR_54 = imageMarshal(VAR_15, VAR_79=key, VAR_2=request)
return VAR_54
@login_required()
@FUNC_20
def FUNC_24(VAR_2, VAR_8=None, VAR_31=False, **VAR_9):
VAR_118 = VAR_9["wid"]
VAR_119 = VAR_8.getObject("Well", VAR_118)
if VAR_119 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
VAR_54 = VAR_119.simpleMarshal(VAR_121=xtra)
return VAR_54
@login_required()
@FUNC_20
def FUNC_25(VAR_2, VAR_32, VAR_33=0, VAR_8=None, **VAR_9):
try:
VAR_33 = VAR_178(VAR_33 or 0)
except ValueError:
VAR_33 = 0
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
VAR_122 = getIntOrDefault(VAR_2, "size", None)
VAR_1.debug(VAR_122)
VAR_19 = VAR_9["server_id"]
def FUNC_66(VAR_6):
if VAR_122 is not None:
return reverse(VAR_120, VAR_116=(VAR_6, VAR_122))
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_123 = PlateGrid(VAR_8, VAR_32, VAR_33, VAR_9.get("urlprefix", FUNC_66))
VAR_124 = VAR_123.plate
if VAR_124 is None:
return Http404
VAR_125 = "plategrid-%VAR_213-%s" % (VAR_33, VAR_122)
VAR_54 = webgateway_cache.getJson(VAR_2, VAR_19, VAR_124, VAR_125)
if VAR_54 is None:
VAR_54 = VAR_123.metadata
webgateway_cache.setJson(VAR_2, VAR_19, VAR_124, json.dumps(VAR_54), VAR_125)
else:
VAR_54 = json.loads(VAR_54)
return VAR_54
@login_required()
@FUNC_20
def FUNC_26(VAR_2, VAR_10=None, VAR_8=None, **VAR_9):
VAR_55 = VAR_2.session.get("server_settings", {}).get("browser", {})
VAR_56 = VAR_55.get("thumb_default_size", 96)
if VAR_10 is None:
VAR_10 = VAR_56
VAR_126 = get_longs(VAR_2, "id")
VAR_126 = list(set(VAR_126)) # remove any duplicates
if len(VAR_126) == 1:
VAR_6 = VAR_126[0]
try:
VAR_162 = FUNC_5(VAR_2, VAR_6, VAR_10=w, VAR_8=conn)
return {
VAR_6: "data:VAR_15/VAR_195;base64,%s"
% base64.b64encode(VAR_162).decode("utf-8")
}
except Exception:
return {VAR_6: None}
VAR_1.debug("Image ids: %r" % VAR_126)
if len(VAR_126) > settings.THUMBNAILS_BATCH:
return HttpJavascriptResponseServerError(
"Max %VAR_3 VAR_127 at VAR_167 time." % settings.THUMBNAILS_BATCH
)
VAR_127 = VAR_8.getThumbnailSet([rlong(VAR_212) for VAR_212 in VAR_126], VAR_10)
VAR_54 = dict()
for VAR_212 in VAR_126:
VAR_54[VAR_212] = None
try:
VAR_23 = VAR_127[VAR_212]
if len(VAR_23) > 0:
VAR_54[VAR_212] = "data:VAR_15/VAR_195;base64,%s" % base64.b64encode(VAR_23).decode(
"utf-8"
)
except KeyError:
VAR_1.error("Thumbnail not available. (VAR_92 id: %VAR_213)" % VAR_212)
except Exception:
VAR_1.error(traceback.format_exc())
return VAR_54
@login_required()
@FUNC_20
def FUNC_27(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_60 = FUNC_5(
VAR_2=request, VAR_6=iid, VAR_10=w, VAR_11=h, VAR_8=conn, VAR_12=_defcb, **VAR_9
)
VAR_54 = "data:VAR_15/VAR_195;base64,%s" % base64.b64encode(VAR_60).decode("utf-8")
return VAR_54
@login_required()
@FUNC_20
def FUNC_28(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_128 = VAR_8.getObject("Dataset", VAR_34)
if VAR_128 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {
"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65),
"tiled": VAR_2.GET.get("tiled", False),
}
return [VAR_30.simpleMarshal(VAR_121=xtra) for VAR_30 in VAR_128.listChildren()]
@login_required()
@FUNC_20
def FUNC_29(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_119 = VAR_8.getObject("Well", VAR_34)
VAR_129 = getIntOrDefault(VAR_2, "run", None)
if VAR_119 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
def FUNC_67(VAR_10):
VAR_213 = {}
for VAR_30, VAR_108 in (["x", VAR_10.getPosX()], ["y", VAR_10.getPosY()]):
if VAR_108 is not None:
VAR_213[VAR_30] = {"value": VAR_108.getValue(), "unit": str(VAR_108.getUnit())}
return VAR_213
VAR_130 = []
for ws in VAR_119.listChildren():
if (
VAR_129 is not None
and ws.plateAcquisition is not None
and ws.plateAcquisition.id.val != VAR_129
):
continue
VAR_92 = ws.getImage()
if VAR_92 is not None:
VAR_282 = VAR_92.simpleMarshal(VAR_121=xtra)
VAR_27 = FUNC_67(ws)
if len(VAR_27.keys()) > 0:
VAR_282["position"] = VAR_27
VAR_130.append(VAR_282)
return VAR_130
@login_required()
@FUNC_20
def FUNC_30(VAR_2, VAR_32, VAR_8=None, **VAR_9):
VAR_131 = VAR_8.getObject("Project", VAR_32)
if VAR_131 is None:
return HttpJavascriptResponse("[]")
return [VAR_30.simpleMarshal(VAR_121={"childCount": 0}) for VAR_30 in VAR_131.listChildren()]
@login_required()
@FUNC_20
def FUNC_31(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_132 = VAR_8.getObject("Dataset", VAR_34)
return VAR_132.simpleMarshal()
@login_required()
@FUNC_20
def FUNC_32(VAR_2, VAR_8=None, **VAR_9):
VAR_54 = []
for VAR_133 in VAR_8.listProjects():
VAR_54.append({"id": VAR_133.id, "name": VAR_133.name, "description": VAR_133.description or ""})
return VAR_54
@login_required()
@FUNC_20
def FUNC_33(VAR_2, VAR_32, VAR_8=None, **VAR_9):
VAR_133 = VAR_8.getObject("Project", VAR_32)
VAR_54 = VAR_133.simpleMarshal()
return VAR_54
@FUNC_20
def FUNC_34(VAR_2, **VAR_9):
VAR_134 = settings.OPEN_WITH
VAR_135 = []
for ow in VAR_134:
if len(ow) < 2:
continue
VAR_214 = {}
viewer["id"] = ow[0]
try:
VAR_214["url"] = reverse(ow[1])
except NoReverseMatch:
VAR_214["url"] = ow[1]
try:
if len(ow) > 2:
if "supported_objects" in ow[2]:
VAR_214["supported_objects"] = ow[2]["supported_objects"]
if "target" in ow[2]:
VAR_214["target"] = ow[2]["target"]
if "script_url" in ow[2]:
if ow[2]["script_url"].startswith("http"):
VAR_214["script_url"] = ow[2]["script_url"]
else:
VAR_214["script_url"] = static(ow[2]["script_url"])
if "label" in ow[2]:
VAR_214["label"] = ow[2]["label"]
except Exception:
pass
VAR_135.append(VAR_214)
return {"open_with_options": VAR_135}
def FUNC_35(VAR_2):
try:
VAR_53 = VAR_2.GET
VAR_136 = {
"search": unicode(VAR_53.get("text", "")).encode("utf8"),
"ctx": VAR_53.get("ctx", ""),
"grabData": not not VAR_53.get("grabData", False),
"parents": not not bool(VAR_53.get("parents", False)),
"start": int(VAR_53.get("start", 0)),
"limit": int(VAR_53.get("limit", 0)),
"key": VAR_53.get("key", None),
}
VAR_215 = VAR_53.get("author", "")
if VAR_215:
VAR_136["search"] += " VAR_215:" + VAR_215
return VAR_136
except Exception:
VAR_1.error(traceback.format_exc())
return {}
@TimeIt(logging.INFO)
@login_required()
@FUNC_20
def FUNC_36(VAR_2, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_136 = FUNC_35(VAR_2)
VAR_54 = []
VAR_1.debug("searchObjects(%VAR_3)" % (VAR_136["search"]))
def FUNC_65(VAR_6):
return reverse("webgateway_render_thumbnail", VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
try:
if VAR_136["ctx"] == "imgs":
VAR_283 = VAR_8.searchObjects(["image"], VAR_136["search"], VAR_8.SERVICE_OPTS)
else:
VAR_283 = VAR_8.searchObjects(None, VAR_136["search"], VAR_8.SERVICE_OPTS)
except ApiUsageException:
return HttpJavascriptResponseServerError('"parse exception"')
def FUNC_68():
VAR_54 = []
if VAR_136["grabData"] and VAR_136["ctx"] == "imgs":
VAR_284 = min(VAR_136["start"], len(VAR_283) - 1)
if VAR_136["limit"] == 0:
VAR_317 = len(VAR_283)
else:
VAR_317 = min(len(VAR_283), VAR_284 + VAR_136["limit"])
for VAR_212 in range(VAR_284, VAR_317):
VAR_318 = VAR_283[VAR_212]
try:
VAR_54.append(
FUNC_23(
VAR_2,
VAR_19,
VAR_6=VAR_318.id,
VAR_79=VAR_136["key"],
VAR_8=conn,
VAR_31=True,
)
)
except AttributeError as VAR_30:
VAR_1.debug(
"(VAR_6 %VAR_212) ignoring Attribute Error: %s" % (VAR_318.id, str(VAR_30))
)
pass
except omero.ServerError as VAR_30:
VAR_1.debug("(VAR_6 %VAR_212) ignoring Server Error: %s" % (VAR_318.id, str(VAR_30)))
return VAR_54
else:
return [VAR_30.simpleMarshal(VAR_121=xtra, parents=VAR_136["parents"]) for VAR_30 in VAR_283]
VAR_54 = timeit(FUNC_68)()
VAR_1.debug(VAR_54)
return VAR_54
@require_POST
@login_required()
def FUNC_37(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(
VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn, VAR_20=True
)
if VAR_64 is None:
VAR_139 = "false"
else:
VAR_58 = VAR_64[0]._conn.getEventContext().userId
webgateway_cache.invalidateObject(VAR_19, VAR_58, VAR_64[0])
VAR_64[0].getThumbnail()
VAR_139 = "true"
if VAR_2.GET.get("callback", None):
VAR_139 = "%VAR_3(%VAR_3)" % (VAR_2.GET["callback"], VAR_139)
return HttpJavascriptResponse(VAR_139)
@login_required()
@FUNC_20
def FUNC_38(VAR_2, VAR_8=None, **VAR_9):
VAR_137 = VAR_8.getScriptService()
VAR_138 = VAR_137.getScriptsByMimetype("text/VAR_30-lut")
VAR_54 = []
for lut in VAR_138:
VAR_216 = lut.path.val + lut.name.val
VAR_217 = LUTS_IN_PNG.index(VAR_216) if VAR_216 in LUTS_IN_PNG else -1
VAR_54.append(
{
"id": lut.id.val,
"path": lut.path.val,
"name": lut.name.val,
"size": unwrap(lut.size),
"png_index": VAR_217,
}
)
VAR_54.sort(VAR_79=lambda VAR_30: x["name"].lower())
return {"luts": VAR_54, "png_luts": LUTS_IN_PNG}
@login_required()
def FUNC_39(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_139 = "false"
VAR_53 = VAR_2.GET
if VAR_8 is None:
VAR_92 = None
else:
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is not None:
VAR_115 = []
for VAR_132 in VAR_92.getProject().listChildren():
VAR_115.extend(VAR_132.listChildren())
VAR_218 = VAR_92.getPrimaryPixels().getPixelsType().getValue()
VAR_219 = VAR_92.getSizeC()
VAR_220 = [VAR_30.getLabel() for VAR_30 in VAR_92.getChannels()]
VAR_220.sort()
def FUNC_76(VAR_212):
if VAR_178(VAR_212.getId()) == VAR_178(VAR_6):
return False
VAR_285 = VAR_212.getPrimaryPixels()
if (
VAR_285 is None
or VAR_212.getPrimaryPixels().getPixelsType().getValue() != VAR_218
or VAR_212.getSizeC() != VAR_219
):
return False
VAR_286 = [VAR_30.getLabel() for VAR_30 in VAR_212.getChannels()]
VAR_286.sort()
if VAR_286 != VAR_220:
return False
return True
VAR_115 = filter(FUNC_76, VAR_115)
VAR_139 = json.dumps([VAR_30.getId() for VAR_30 in VAR_115])
if VAR_53.get("callback", None):
VAR_139 = "%VAR_3(%VAR_3)" % (VAR_53["callback"], VAR_139)
return HttpJavascriptResponse(VAR_139)
@require_POST
@login_required()
@FUNC_20
def FUNC_40(VAR_2, VAR_35=False, VAR_8=None, **VAR_9):
VAR_53 = VAR_2.POST
VAR_140 = VAR_53.getlist("toids")
VAR_141 = str(VAR_53.get("to_type", "image"))
VAR_141 = to_type.title()
if VAR_141 == "Acquisition":
VAR_141 = "PlateAcquisition"
if len(VAR_140) == 0:
raise Http404(
"Need to specify objects in VAR_2, E.g."
" ?totype=VAR_128&VAR_140=1&VAR_140=2"
)
VAR_140 = [int(id) for id in VAR_140]
VAR_142 = VAR_8.getRenderingSettingsService()
VAR_8.SERVICE_OPTS.setOmeroGroup("-1")
VAR_143 = VAR_8.getObject(VAR_141, VAR_140[0])
if VAR_143 is not None:
VAR_221 = VAR_143.getDetails().group.id.val
VAR_8.SERVICE_OPTS.setOmeroGroup(VAR_221)
if VAR_35:
VAR_54 = VAR_142.resetDefaultsByOwnerInSet(VAR_141, VAR_140, VAR_8.SERVICE_OPTS)
else:
VAR_54 = VAR_142.resetDefaultsInSet(VAR_141, VAR_140, VAR_8.SERVICE_OPTS)
return VAR_54
@login_required()
@FUNC_20
def FUNC_41(VAR_2, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_139 = False
VAR_144 = VAR_2.GET.get("fromid", None)
VAR_140 = VAR_2.POST.getlist("toids")
VAR_141 = str(VAR_2.POST.get("to_type", "image"))
VAR_145 = None
if VAR_141 not in ("dataset", "plate", "acquisition"):
VAR_141 = "Image" # default is VAR_15
if VAR_144 is not None and len(VAR_140) == 0:
VAR_2.session.modified = True
VAR_2.session["fromid"] = VAR_144
if VAR_2.session.get("rdef") is not None:
del VAR_2.session["rdef"]
return True
VAR_53 = VAR_2.GET or VAR_2.POST
if VAR_53.get("c") is not None:
VAR_145 = {"c": str(VAR_53.get("c"))} # VAR_50
if VAR_53.get("maps"):
try:
VAR_145["maps"] = json.loads(VAR_53.get("maps"))
except Exception:
pass
if VAR_53.get("pixel_range"):
VAR_145["pixel_range"] = str(VAR_53.get("pixel_range"))
if VAR_53.get("m"):
VAR_145["m"] = str(VAR_53.get("m")) # model (grey)
if VAR_53.get("z"):
VAR_145["z"] = str(VAR_53.get("z")) # VAR_22 & VAR_23 VAR_27
if VAR_53.get("t"):
VAR_145["t"] = str(VAR_53.get("t"))
VAR_36 = VAR_2.GET.get("imageId", VAR_2.POST.get("imageId", None))
if VAR_36:
VAR_145["imageId"] = int(VAR_36)
if VAR_2.method == "GET":
VAR_2.session.modified = True
VAR_2.session["rdef"] = VAR_145
if VAR_2.session.get("fromid") is not None:
del VAR_2.session["fromid"]
return True
if VAR_144 is None:
VAR_144 = VAR_2.session.get("fromid", None)
def FUNC_69(VAR_15):
VAR_54 = {}
VAR_222 = []
VAR_223 = []
for VAR_212, ch in enumerate(VAR_15.getChannels()):
VAR_287 = "" if ch.isActive() else "-"
VAR_288 = ch.getWindowStart()
VAR_289 = ch.getWindowEnd()
VAR_68 = ch.getLut()
VAR_223.append({"inverted": {"enabled": ch.isInverted()}})
if not VAR_68 or len(VAR_68) == 0:
VAR_68 = ch.getColor().getHtml()
VAR_222.append("%VAR_3%VAR_3|%VAR_3:%VAR_3$%s" % (VAR_287, VAR_212 + 1, VAR_288, VAR_289, VAR_68))
VAR_54["c"] = ",".join(VAR_222)
VAR_54["maps"] = VAR_223
VAR_54["m"] = "g" if VAR_15.isGreyscaleRenderingModel() else "c"
VAR_54["z"] = VAR_15.getDefaultZ() + 1
VAR_54["t"] = VAR_15.getDefaultT() + 1
return VAR_54
def FUNC_70(VAR_15, VAR_145):
VAR_107 = FUNC_12(VAR_145, "inverted", VAR_15.getSizeC())
VAR_50, VAR_51, VAR_52 = FUNC_2(VAR_145["c"])
VAR_15.setActiveChannels(VAR_50, VAR_51, VAR_52, VAR_107)
if VAR_145["m"] == "g":
VAR_15.setGreyscaleRenderingModel()
else:
VAR_15.setColorRenderingModel()
if "z" in VAR_145:
VAR_15._re.setDefaultZ(VAR_178(VAR_145["z"]) - 1)
if "t" in VAR_145:
VAR_15._re.setDefaultT(VAR_178(VAR_145["t"]) - 1)
VAR_15.saveDefaults()
if VAR_145 is None:
VAR_145 = VAR_2.session.get("rdef")
if VAR_2.method == "POST":
VAR_224 = None
VAR_225 = None
if VAR_144 is None:
if VAR_145 is not None and len(VAR_140) > 0:
VAR_225 = VAR_8.getObject("Image", VAR_145["imageId"])
if VAR_225 is not None:
VAR_224 = FUNC_69(VAR_225)
FUNC_70(VAR_225, VAR_145)
VAR_144 = VAR_225.getId()
try:
VAR_144 = VAR_178(VAR_144)
VAR_140 = [VAR_178(VAR_30) for VAR_30 in VAR_140]
except TypeError:
VAR_144 = None
except ValueError:
VAR_144 = None
if VAR_144 is not None and len(VAR_140) > 0:
VAR_290 = VAR_8.getObject("Image", VAR_144)
VAR_291 = VAR_290.getOwner().getId()
VAR_139 = VAR_8.applySettingsToSet(VAR_144, VAR_141, VAR_140)
if VAR_139 and True in VAR_139:
for VAR_6 in VAR_139[True]:
VAR_92 = VAR_8.getObject("Image", VAR_6)
VAR_92 is not None and webgateway_cache.invalidateObject(
VAR_19, VAR_291, VAR_92
)
if VAR_141 == "Image" and VAR_144 not in VAR_140:
if VAR_224 is not None and VAR_225 is not None:
FUNC_70(VAR_225, VAR_224)
return VAR_139
else:
return HttpResponseNotAllowed(["POST"])
@login_required()
@FUNC_20
def FUNC_42(VAR_2, VAR_8=None, **VAR_9):
VAR_145 = VAR_2.session.get("rdef")
VAR_15 = None
if VAR_145 is None:
VAR_144 = VAR_2.session.get("fromid", None)
if VAR_144 is not None:
VAR_15 = VAR_8.getObject("Image", VAR_144)
if VAR_15 is not None:
VAR_54 = imageMarshal(VAR_15, VAR_2=request)
VAR_222 = []
VAR_223 = []
for VAR_212, ch in enumerate(VAR_54["channels"]):
VAR_287 = ch["active"] and str(VAR_212 + 1) or "-%s" % (VAR_212 + 1)
VAR_68 = ch.get("lut") or ch["color"]
VAR_222.append(
"%VAR_3|%VAR_3:%VAR_3$%s"
% (VAR_287, ch["window"]["start"], ch["window"]["end"], VAR_68)
)
VAR_223.append(
{
"inverted": {"enabled": ch["inverted"]},
"quantization": {
"coefficient": ch["coefficient"],
"family": ch["family"],
},
}
)
VAR_145 = {
"c": (",".join(VAR_222)),
"m": VAR_54["rdefs"]["model"],
"pixel_range": "%VAR_3:%s" % (VAR_54["pixel_range"][0], VAR_54["pixel_range"][1]),
"maps": VAR_223,
}
return {"rdef": VAR_145}
@login_required()
def FUNC_43(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_146 = Server.get(VAR_19).server
VAR_147 = FUNC_3(VAR_2)
VAR_55 = VAR_2.session.get("server_settings", {}).get("viewer", {})
VAR_148 = VAR_55.get("interpolate_pixels", True)
VAR_149 = VAR_55.get("roi_limit", 2000)
try:
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
VAR_1.debug("(VAR_167)Image %VAR_3 not found..." % (str(VAR_6)))
raise Http404
VAR_226 = None
VAR_227 = None
VAR_228 = None
VAR_229 = None
if hasattr(settings, "SHARING_OPENGRAPH"):
VAR_226 = settings.SHARING_OPENGRAPH.get(VAR_146)
VAR_1.debug("Open Graph VAR_308: %s", VAR_226)
if hasattr(settings, "SHARING_TWITTER"):
VAR_227 = settings.SHARING_TWITTER.get(VAR_146)
VAR_1.debug("Twitter VAR_308: %s", VAR_227)
if VAR_226 or VAR_227:
VAR_292 = {"iid": VAR_6}
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
VAR_228 = VAR_2.build_absolute_uri(reverse(VAR_120, VAR_9=VAR_292))
VAR_229 = VAR_2.build_absolute_uri(
reverse("webgateway_full_viewer", VAR_9=VAR_292)
)
VAR_213 = {
"blitzcon": VAR_8,
"image": VAR_15,
"opts": VAR_147,
"interpolate": VAR_148,
"build_year": build_year,
"roiLimit": VAR_149,
"roiCount": VAR_15.getROICount(),
"viewport_server": VAR_9.get(
"viewport_server",
reverse("webgateway"),
).rstrip("/"),
"opengraph": VAR_226,
"twitter": VAR_227,
"image_preview": VAR_228,
"page_url": VAR_229,
"object": "image:%i" % int(VAR_6),
}
VAR_230 = VAR_9.get("template", "webgateway/viewport/omero_image.html")
VAR_61 = render(VAR_2, VAR_230, VAR_213)
except omero.SecurityViolation:
VAR_1.warn("SecurityViolation in Image:%s", VAR_6)
VAR_1.warn(traceback.format_exc())
raise Http404
return HttpResponse(VAR_61)
@login_required()
def FUNC_44(VAR_2, VAR_6=None, VAR_8=None, **VAR_9):
VAR_114 = VAR_2.GET.get("format", "png")
if VAR_114 not in ("jpeg", "png", "tif"):
VAR_114 = "png"
VAR_150 = []
VAR_151 = []
if VAR_6 is None:
VAR_150 = VAR_2.GET.getlist("image")
if len(VAR_150) == 0:
VAR_151 = VAR_2.GET.getlist("well")
if len(VAR_151) == 0:
return HttpResponseServerError(
"No VAR_152 or VAR_153 specified in VAR_2."
" Use ?VAR_15=123 or ?VAR_119=123"
)
else:
VAR_150 = [VAR_6]
VAR_152 = []
if VAR_150:
VAR_152 = list(VAR_8.getObjects("Image", VAR_150))
elif VAR_151:
try:
VAR_319 = int(VAR_2.GET.get("index", 0))
except ValueError:
VAR_319 = 0
for VAR_10 in VAR_8.getObjects("Well", VAR_151):
VAR_152.append(VAR_10.getWellSample(VAR_319).image())
if len(VAR_152) == 0:
VAR_231 = "Cannot download as %VAR_3. Images (ids: %VAR_3) not found." % (VAR_114, VAR_150)
VAR_1.debug(VAR_231)
return HttpResponseServerError(VAR_231)
if len(VAR_152) == 1:
VAR_60 = VAR_152[0].renderJpeg()
if VAR_60 is None:
raise Http404
VAR_61 = HttpResponse(VAR_60, VAR_279="image/jpeg")
VAR_61["Content-Length"] = len(VAR_60)
VAR_61["Content-Disposition"] = "attachment; filename=%VAR_3.jpg" % (
VAR_152[0].getName().replace(" ", "_")
)
else:
VAR_232 = tempfile.NamedTemporaryFile(suffix=".download_as")
def FUNC_77(VAR_233, VAR_234, VAR_235):
VAR_17 = os.path.basename(VAR_233)
VAR_293 = "%VAR_3.%s" % (VAR_17, VAR_234)
VAR_293 = os.path.join(VAR_235, VAR_293)
VAR_212 = 1
VAR_17 = VAR_293[: -(len(VAR_234) + 1)]
while os.path.exists(VAR_293):
imgName = "%s_(%VAR_213).%s" % (VAR_17, VAR_212, VAR_234)
VAR_212 += 1
return VAR_293
try:
VAR_294 = tempfile.mkdtemp()
VAR_1.debug("download_as dir: %s" % VAR_294)
try:
for VAR_92 in VAR_152:
VAR_22 = VAR_23 = None
try:
VAR_331 = VAR_92.renderImage(VAR_22, VAR_23)
VAR_332 = FUNC_77(VAR_92.getName(), VAR_114, VAR_294)
VAR_331.save(VAR_332)
finally:
VAR_92._re.close()
VAR_320 = zipfile.ZipFile(VAR_232, "w", zipfile.ZIP_DEFLATED)
try:
VAR_324 = os.path.join(VAR_294, "*")
for VAR_17 in glob.glob(VAR_324):
VAR_320.write(VAR_17, os.path.basename(VAR_17))
finally:
VAR_320.close()
finally:
shutil.rmtree(VAR_294, ignore_errors=True)
VAR_240 = VAR_2.GET.get("zipname", "Download_as_%s" % VAR_114)
VAR_240 = VAR_240.replace(" ", "_")
if not VAR_240.endswith(".zip"):
VAR_240 = "%VAR_3.zip" % VAR_240
VAR_61 = StreamingHttpResponse(FileWrapper(VAR_232))
VAR_61["Content-Length"] = VAR_232.tell()
VAR_61["Content-Disposition"] = "attachment; filename=%s" % VAR_240
VAR_232.seek(0)
except Exception:
VAR_232.close()
VAR_321 = traceback.format_exc()
VAR_1.error(VAR_321)
return HttpResponseServerError("Cannot download file (id:%VAR_3)" % VAR_6)
VAR_61["Content-Type"] = "application/force-download"
return VAR_61
@login_required(doConnectionCleanup=False)
def FUNC_45(VAR_2, VAR_6=None, VAR_8=None, **VAR_9):
VAR_150 = []
VAR_151 = []
VAR_150 = VAR_2.GET.getlist("image")
VAR_151 = VAR_2.GET.getlist("well")
if VAR_6 is None:
if len(VAR_150) == 0 and len(VAR_151) == 0:
return HttpResponseServerError(
"No VAR_152 or VAR_153 specified in VAR_2."
" Use ?VAR_15=123 or ?VAR_119=123"
)
else:
VAR_150 = [VAR_6]
VAR_152 = list()
VAR_153 = list()
if VAR_150:
VAR_152 = list(VAR_8.getObjects("Image", VAR_150))
elif VAR_151:
try:
VAR_319 = int(VAR_2.GET.get("index", 0))
except ValueError:
VAR_319 = 0
VAR_153 = VAR_8.getObjects("Well", VAR_151)
for VAR_10 in VAR_153:
VAR_152.append(VAR_10.getWellSample(VAR_319).image())
if len(VAR_152) == 0:
VAR_236 = (
"Cannot download archived file because Images not "
"found (ids: %VAR_3)" % (VAR_150)
)
VAR_1.debug(VAR_236)
return HttpResponseServerError(VAR_236)
for ob in VAR_153:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
for ob in VAR_152:
VAR_119 = None
try:
VAR_119 = ob.getParent().getParent()
except Exception:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
else:
if VAR_119 and isinstance(VAR_119, omero.gateway.WellWrapper):
if hasattr(VAR_119, "canDownload"):
if not VAR_119.canDownload():
return HttpResponseNotFound()
VAR_154 = {}
for VAR_15 in VAR_152:
for VAR_28 in VAR_15.getImportedImageFiles():
VAR_154[VAR_28.getId()] = VAR_28
VAR_155 = list(VAR_154.values())
if len(VAR_155) == 0:
VAR_236 = (
"Tried downloading archived VAR_155 from VAR_15 with no" " VAR_155 archived."
)
VAR_1.debug(VAR_236)
return HttpResponseServerError(VAR_236)
if len(VAR_155) == 1:
VAR_237 = VAR_155[0]
VAR_61 = ConnCleaningHttpResponse(
VAR_237.getFileInChunks(buf=settings.CHUNK_SIZE)
)
VAR_61.conn = VAR_8
VAR_61["Content-Length"] = VAR_237.getSize()
VAR_238 = VAR_237.getName().replace(" ", "_").replace(",", ".")
VAR_61["Content-Disposition"] = "attachment; filename=%s" % (VAR_238)
else:
VAR_239 = sum(VAR_28.size for VAR_28 in VAR_155)
if VAR_239 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_236 = (
"Total VAR_7 of VAR_155 %VAR_213 is larger than %VAR_213. "
"Try requesting fewer VAR_155."
% (VAR_239, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE)
)
VAR_1.warn(VAR_236)
return HttpResponseForbidden(VAR_236)
VAR_232 = tempfile.NamedTemporaryFile(suffix=".archive")
VAR_240 = VAR_2.GET.get("zipname", VAR_15.getName())
try:
VAR_240 = zip_archived_files(VAR_152, VAR_232, VAR_240, buf=settings.CHUNK_SIZE)
VAR_295 = FileWrapper(VAR_232)
VAR_61 = ConnCleaningHttpResponse(VAR_295)
VAR_61.conn = VAR_8
VAR_61["Content-Length"] = VAR_232.tell()
VAR_61["Content-Disposition"] = "attachment; filename=%s" % VAR_240
VAR_232.seek(0)
except Exception:
VAR_232.close()
VAR_236 = "Cannot download file (id:%VAR_3)" % (VAR_6)
VAR_1.error(VAR_236, exc_info=True)
return HttpResponseServerError(VAR_236)
VAR_61["Content-Type"] = "application/force-download"
return VAR_61
@login_required()
@FUNC_20
def FUNC_46(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
raise Http404
VAR_156 = VAR_15.getImportedImageFilePaths()
return {"repo": VAR_156["server_paths"], "client": VAR_156["client_paths"]}
@login_required()
@FUNC_20
def FUNC_47(VAR_2, VAR_13, VAR_14, VAR_8=None, **VAR_9):
VAR_13 = int(VAR_13)
VAR_14 = int(VAR_14)
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_65 from Roi as VAR_241 "
"join VAR_241.shapes as VAR_65 "
"where VAR_241.id = %VAR_213 and VAR_65.id = %d" % (VAR_13, VAR_14),
None,
)
VAR_1.debug("Shape: %r" % VAR_65)
if VAR_65 is None:
VAR_1.debug("No such VAR_65: %r" % VAR_14)
raise Http404
return JsonResponse(shapeMarshal(VAR_65))
@login_required()
@FUNC_20
def FUNC_48(VAR_2, VAR_36, VAR_8=None, **VAR_9):
VAR_157 = []
VAR_158 = VAR_8.getRoiService()
VAR_62 = VAR_158.findByImage(VAR_178(VAR_36), None, VAR_8.SERVICE_OPTS)
for VAR_53 in VAR_62.rois:
VAR_241 = {}
roi["id"] = VAR_53.getId().getValue()
VAR_63 = []
for VAR_3 in VAR_53.copyShapes():
if VAR_3 is None: # seems possible in some situations
continue
VAR_63.append(shapeMarshal(VAR_3))
VAR_63.sort(VAR_79=lambda VAR_30: "%03d%03d" % (VAR_30.get("theZ", -1), VAR_30.get("theT", -1)))
VAR_241["shapes"] = VAR_63
VAR_157.append(VAR_241)
VAR_157.sort(VAR_79=lambda VAR_30: x["id"])
return VAR_157
@login_required()
def FUNC_49(VAR_2, VAR_6, VAR_37, VAR_8=None, **VAR_9):
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
raise Http404
VAR_159, VAR_160 = VAR_8.getMaxPlaneSize()
VAR_86 = VAR_15.getSizeX()
VAR_87 = VAR_15.getSizeY()
if (VAR_86 * VAR_87) > (VAR_159 * VAR_160):
VAR_231 = "Histogram not supported for 'big' VAR_152 (over %VAR_3 * %VAR_3 pixels)" % (
VAR_159,
VAR_160,
)
return JsonResponse({"error": VAR_231})
VAR_74 = int(VAR_2.GET.get("theZ", 0))
VAR_73 = int(VAR_2.GET.get("theT", 0))
VAR_37 = int(VAR_37)
VAR_161 = int(VAR_2.GET.get("bins", 256))
VAR_162 = VAR_15.getHistogram([VAR_37], VAR_161, VAR_74=theZ, VAR_73=theT)
VAR_163 = VAR_162[VAR_37]
return JsonResponse({"data": VAR_163})
@login_required(FUNC_58=True)
@FUNC_20
def FUNC_50(VAR_2, VAR_38, VAR_8=None, **VAR_9):
if VAR_2.method == "POST":
VAR_8.setGroupNameForSession("system")
VAR_175 = VAR_2.session["connector"]
VAR_175 = Connector(VAR_175.server_id, VAR_175.is_secure)
VAR_242 = VAR_8.getSessionService().getSession(VAR_8._sessionUuid)
VAR_243 = VAR_242.getTimeToIdle().val
VAR_175.omero_session_key = VAR_8.suConn(VAR_38, VAR_243=ttl)._sessionUuid
VAR_2.session["connector"] = VAR_175
VAR_8.revertGroupForSession()
VAR_8.close()
return True
else:
VAR_244 = {
"url": reverse("webgateway_su", VAR_116=[VAR_38]),
"submit": "Do you want to FUNC_50 to %s" % VAR_38,
}
VAR_230 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_230, VAR_244)
def FUNC_51(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
warnings.warn("Deprecated. Use FUNC_52()", DeprecationWarning)
return FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **VAR_9)
def FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
VAR_164 = VAR_8.getQueryService()
VAR_39 = objtype.split(".")
VAR_66 = omero.sys.ParametersI()
VAR_66.addId(VAR_40)
VAR_66.addString("ns", NSBULKANNOTATIONS)
VAR_66.addString("mt", "OMERO.tables")
VAR_43 = "select obj0 from %VAR_3 obj0\n" % VAR_39[0]
for VAR_212, VAR_23 in enumerate(VAR_39[1:]):
VAR_43 += "join fetch VAR_202%VAR_213.%VAR_3 VAR_202%VAR_213\n" % (VAR_212, VAR_23, VAR_212 + 1)
VAR_43 += """
left outer join fetch obj0.annotationLinks VAR_165
left outer join fetch VAR_165.child as VAR_28
left outer join fetch VAR_165.parent
left outer join fetch VAR_28.file
join fetch VAR_165.details.owner
join fetch VAR_165.details.creationEvent
where VAR_202%VAR_213.id=:id and
(VAR_28.ns=:ns or VAR_28.file.mimetype=:mt)""" % (
len(VAR_39) - 1
)
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
try:
VAR_245 = VAR_164.findAllByQuery(VAR_43, VAR_66, VAR_24)
except omero.QueryException:
return dict(VAR_176="%VAR_3 cannot be queried" % VAR_39, VAR_43=query)
VAR_162 = []
VAR_165 = [link for VAR_202 in VAR_245 for link in VAR_202.copyAnnotationLinks()]
for link in VAR_165:
VAR_246 = link.child
if not isinstance(VAR_246, omero.model.FileAnnotation):
continue
VAR_247 = VAR_246.details.owner
VAR_248 = "%VAR_3 %s" % (unwrap(VAR_247.firstName), unwrap(VAR_247.lastName))
VAR_249 = link.details.owner
VAR_250 = "%VAR_3 %s" % (unwrap(VAR_249.firstName), unwrap(VAR_249.lastName))
VAR_162.append(
dict(
id=VAR_246.id.val,
file=VAR_246.file.id.val,
parentType=VAR_39[0],
parentId=link.parent.id.val,
VAR_247=VAR_248,
VAR_249=VAR_250,
addedOn=unwrap(link.details.creationEvent._time),
)
)
return dict(VAR_162=data)
VAR_41 = login_required()(FUNC_20(FUNC_52))
def FUNC_53(VAR_2, VAR_42, VAR_8=None, VAR_43=None, VAR_44=False, **VAR_9):
if VAR_43 is None:
VAR_43 = VAR_2.GET.get("query")
if not VAR_43:
return dict(VAR_176="Must specify VAR_43 parameter, use * to retrieve all")
VAR_166 = VAR_2.GET.getlist("col_names")
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
VAR_53 = VAR_8.getSharedResources()
VAR_23 = VAR_53.openTable(omero.model.OriginalFileI(VAR_42), VAR_24)
if not VAR_23:
return dict(VAR_176="Table %VAR_3 not found" % VAR_42)
try:
VAR_251 = VAR_23.getHeaders()
VAR_252 = range(len(VAR_251))
if VAR_166:
VAR_296 = (
[(VAR_212, j) for (VAR_212, j) in enumerate(VAR_251) if j.name in VAR_166]
if VAR_166
else [(VAR_212, j) for (VAR_212, j) in enumerate(VAR_251)]
)
cols = []
VAR_252 = []
for col_name in VAR_166:
for (VAR_212, j) in VAR_296:
if col_name == j.name:
VAR_252.append(VAR_212)
VAR_251.append(j)
break
VAR_253 = VAR_23.getNumberOfRows()
VAR_254 = VAR_9.get("offset", 0)
VAR_255 = VAR_9.get("limit", None)
if not VAR_254:
VAR_254 = int(VAR_2.GET.get("offset", 0))
if not VAR_255:
VAR_255 = (
int(VAR_2.GET.get("limit"))
if VAR_2.GET.get("limit") is not None
else None
)
VAR_256 = VAR_254
VAR_257 = VAR_9.get("limit", VAR_253)
VAR_258 = min(VAR_253, VAR_256 + VAR_257)
if VAR_43 == "*":
VAR_297 = range(VAR_256, VAR_258)
VAR_298 = VAR_253
else:
VAR_299 = re.match(r"^(\VAR_10+)-(\VAR_213+)", VAR_43)
if VAR_299:
VAR_43 = "(%VAR_3==%s)" % (VAR_299.group(1), VAR_299.group(2))
try:
VAR_1.info(VAR_43)
VAR_297 = VAR_23.getWhereList(VAR_43, None, 0, VAR_253, 1)
VAR_298 = len(VAR_297)
hits = VAR_297[VAR_256:VAR_258]
except Exception:
return dict(VAR_176="Error executing VAR_43: %s" % VAR_43)
def FUNC_78(VAR_259, VAR_11):
VAR_300 = 0
VAR_301 = 1000
while VAR_300 < len(VAR_11):
VAR_301 = min(VAR_301, len(VAR_11) - VAR_300)
VAR_322 = VAR_259.slice(VAR_252, VAR_11[VAR_300 : idx + VAR_301])
VAR_300 += VAR_301
yield [
[col.values[row] for col in VAR_322.columns]
for row in range(0, len(VAR_322.rowNumbers))
]
VAR_260 = FUNC_78(VAR_23, VAR_297)
VAR_261 = {
"data": {
"column_types": [col.__class__.__name__ for col in VAR_251],
"columns": [col.name for col in VAR_251],
},
"meta": {
"rowCount": VAR_253,
"totalCount": VAR_298,
"limit": VAR_255,
"offset": VAR_254,
},
}
if not VAR_44:
VAR_302 = []
for VAR_253 in list(VAR_260):
VAR_302.extend(VAR_253)
VAR_261["data"]["rows"] = VAR_302
else:
VAR_261["data"]["lazy_rows"] = VAR_260
VAR_261["table"] = VAR_23
return VAR_261
finally:
if not VAR_44:
VAR_23.close()
VAR_45 = login_required()(FUNC_20(FUNC_53))
def FUNC_54(VAR_2, VAR_42, VAR_8=None, VAR_43=None, VAR_44=False, **VAR_9):
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
VAR_53 = VAR_8.getSharedResources()
VAR_23 = VAR_53.openTable(omero.model.OriginalFileI(VAR_42), VAR_24)
if not VAR_23:
return dict(VAR_176="Table %VAR_3 not found" % VAR_42)
try:
VAR_251 = VAR_23.getHeaders()
VAR_253 = VAR_23.getNumberOfRows()
VAR_261 = {
"columns": [
{
"name": col.name,
"description": col.description,
"type": col.__class__.__name__,
}
for col in VAR_251
],
"totalCount": VAR_253,
}
return VAR_261
finally:
if not VAR_44:
VAR_23.close()
VAR_46 = login_required()(FUNC_20(FUNC_54))
@login_required()
@FUNC_20
def FUNC_55(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
VAR_167 = FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **VAR_9)
if "error" in VAR_167:
return VAR_167
if len(VAR_167["data"]) < 1:
return dict(VAR_176="Could not retrieve bulk VAR_41 table")
VAR_168 = 0
VAR_169 = None
VAR_170 = sorted(VAR_167["data"], VAR_79=lambda VAR_30: x["file"], reverse=True)
VAR_171 = None
for VAR_246 in VAR_170:
VAR_171 = FUNC_53(VAR_2, VAR_246["file"], VAR_8, **VAR_9)
if "error" not in VAR_171:
VAR_169 = VAR_246
VAR_168 = VAR_246["file"]
break
if VAR_169 is None:
return dict(
VAR_176=VAR_171.get(
"error", "Could not retrieve matching bulk VAR_246 table"
)
)
VAR_171["id"] = VAR_168
VAR_171["annId"] = VAR_169["id"]
VAR_171["owner"] = VAR_169["owner"]
VAR_171["addedBy"] = VAR_169["addedBy"]
VAR_171["parentType"] = VAR_169["parentType"]
VAR_171["parentId"] = VAR_169["parentId"]
VAR_171["addedOn"] = VAR_169["addedOn"]
return VAR_171
class CLASS_1(View):
VAR_172 = LoginForm
VAR_173 = "OMERO.webapi"
@method_decorator(sensitive_post_parameters("password", "csrfmiddlewaretoken"))
def FUNC_71(self, *VAR_116, **VAR_9):
return super(CLASS_1, self).dispatch(*VAR_116, **VAR_9)
def FUNC_72(self, VAR_2, VAR_174=None):
return JsonResponse(
{"message": ("POST only with VAR_304, VAR_305, " "server and csrftoken")},
VAR_315=405,
)
def FUNC_73(self, VAR_2, VAR_8, VAR_175):
VAR_203 = VAR_8.getEventContext()
VAR_24 = {}
for VAR_167 in [
"sessionId",
"sessionUuid",
"userId",
"userName",
"groupId",
"groupName",
"isAdmin",
"eventId",
"eventType",
"memberOfGroups",
"leaderOfGroups",
]:
if hasattr(VAR_203, VAR_167):
VAR_24[VAR_167] = getattr(VAR_203, VAR_167)
return JsonResponse({"success": True, "eventContext": VAR_24})
def FUNC_74(self, VAR_2, VAR_176=None, VAR_177=None):
if VAR_176 is None and VAR_177 is not None:
VAR_303 = []
for VAR_33 in VAR_177:
for VAR_318 in VAR_33.errors:
VAR_303.append("%VAR_3: %s" % (VAR_33.label, VAR_318))
VAR_176 = " ".join(VAR_303)
elif VAR_176 is None:
VAR_176 = "Login failed. Reason unknown."
return JsonResponse({"message": VAR_176}, VAR_315=403)
def FUNC_75(self, VAR_2, VAR_174=None):
VAR_176 = None
VAR_177 = self.form_class(VAR_2.POST.copy())
if VAR_177.is_valid():
VAR_304 = VAR_177.cleaned_data["username"]
VAR_305 = VAR_177.cleaned_data["password"]
VAR_19 = VAR_177.cleaned_data["server"]
VAR_306 = settings.SECURE
VAR_175 = Connector(VAR_19, VAR_306)
VAR_307 = True
if settings.CHECK_VERSION:
VAR_307 = VAR_175.check_version(self.useragent)
if (
VAR_19 is not None
and VAR_304 is not None
and VAR_305 is not None
and VAR_307
):
VAR_8 = VAR_175.create_connection(
self.useragent, VAR_304, VAR_305, userip=get_client_ip(VAR_2)
)
if VAR_8 is not None:
try:
VAR_2.session["connector"] = VAR_175
try:
VAR_335 = settings.UPGRADES_URL
except Exception:
VAR_335 = VAR_8.getUpgradesUrl()
upgradeCheck(url=VAR_335)
return self.handle_logged_in(VAR_2, VAR_8, VAR_175)
finally:
VAR_8.close(hard=False)
if not VAR_175.is_server_up(self.useragent):
VAR_176 = "Server is not responding," " please contact administrator."
elif not settings.CHECK_VERSION:
VAR_176 = (
"Connection not available, please check your"
" credentials and version compatibility."
)
else:
if not VAR_307:
VAR_176 = (
"Client version does not VAR_299 server,"
" please contact administrator."
)
else:
VAR_176 = settings.LOGIN_INCORRECT_CREDENTIALS_TEXT
return self.handle_not_logged_in(VAR_2, VAR_176, VAR_177)
@login_required()
@FUNC_20
def FUNC_56(VAR_2, VAR_47=None, VAR_8=None, **VAR_9):
try:
VAR_92 = VAR_8.getObject("Image", VAR_47)
if VAR_92 is None:
return {"error": "No VAR_15 with id " + str(VAR_47)}
return {"rdefs": VAR_92.getAllRenderingDefs()}
except Exception:
VAR_1.debug(traceback.format_exc())
return {"error": "Failed to retrieve rdefs"}
|
import .re
import json
import base64
import .warnings
from functools import .wraps
import .omero
import .omero.clients
from past.builtins import unicode
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
JsonResponse,
HttpResponseForbidden,
)
from django.http import (
HttpResponseRedirect,
HttpResponseNotAllowed,
Http404,
StreamingHttpResponse,
HttpResponseNotFound,
)
from django.views.decorators.http import .require_POST
from django.views.decorators.debug import .sensitive_post_parameters
from django.utils.decorators import .method_decorator
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.conf import .settings
from wsgiref.util import FileWrapper
from omero.rtypes import .rlong, unwrap
from omero.constants.namespaces import NSBULKANNOTATIONS
from .util import .points_string_to_XY_list, xy_list_to_bbox
from .plategrid import PlateGrid
from omeroweb.version import .omeroweb_buildyear as build_year
from .marshal import .imageMarshal, shapeMarshal, rgb_int2rgba
from django.contrib.staticfiles.templatetags.staticfiles import .static
from django.views.generic import View
from django.shortcuts import .render
from omeroweb.webadmin.forms import LoginForm
from omeroweb.decorators import .get_client_ip, is_public_user
from omeroweb.webadmin.webadmin_utils import upgradeCheck
try:
from hashlib import .md5
except Exception:
from md5 import .md5
try:
import .long
except ImportError:
VAR_178 = int
from io import BytesIO
import .tempfile
from omero import ApiUsageException
from omero.util.decorators import .timeit, TimeIt
from omeroweb.httprsp import HttpJavascriptResponse, HttpJavascriptResponseServerError
from omeroweb.connector import Server
import glob
from omeroweb.webgateway.webgateway_cache import (
webgateway_cache,
CacheBase,
webgateway_tempfile,
)
import logging
import .os
import .traceback
import .time
import .zipfile
import .shutil
from omeroweb.decorators import login_required, ConnCleaningHttpResponse
from omeroweb.connector import Connector
from omeroweb.webgateway.util import .zip_archived_files, LUTS_IN_PNG
from omeroweb.webgateway.util import .get_longs, getIntOrDefault
VAR_0 = CacheBase()
VAR_1 = logging.getLogger(__name__)
try:
from PIL import Image
from PIL import ImageDraw
except Exception: # pragma: nocover
try:
import Image
import ImageDraw
except Exception:
VAR_1.error("No Pillow installed")
try:
import numpy
VAR_48 = True
except ImportError:
VAR_1.error("No numpy installed")
VAR_48 = False
def VAR_319(VAR_2):
return HttpResponse("Welcome to webgateway")
def FUNC_1(VAR_3):
return unicode(VAR_3).encode("utf-8")
class CLASS_0(object):
def __init__(self, VAR_49):
self._blitzcon = VAR_49
self.loggedIn = False
def FUNC_57(self):
self.loggedIn = True
def FUNC_58(self):
return self._blitzcon.isAdmin()
def FUNC_59(self):
return self._blitzcon.canBeAdmin()
def FUNC_60(self):
return self._blitzcon.getUserId()
def FUNC_61(self):
return self._blitzcon.getUser().omeName
def FUNC_62(self):
return self._blitzcon.getUser().firstName or self.getName()
def FUNC_2(VAR_4):
VAR_50 = []
VAR_51 = []
VAR_52 = []
for VAR_179 in VAR_4.split(","):
VAR_179 = chan.split("|", 1)
VAR_23 = VAR_179[0].strip()
VAR_68 = None
if VAR_23.find("$") >= 0:
VAR_23, VAR_68 = VAR_23.split("$")
try:
VAR_50.append(int(VAR_23))
VAR_262 = (None, None)
if len(VAR_179) > 1:
VAR_23 = VAR_179[1].strip()
if VAR_23.find("$") >= 0:
VAR_23, VAR_68 = VAR_23.split("$", 1)
VAR_23 = VAR_23.split(":")
if len(VAR_23) == 2:
try:
VAR_262 = [float(VAR_30) for VAR_30 in VAR_23]
except ValueError:
pass
VAR_51.append(VAR_262)
VAR_52.append(VAR_68)
except ValueError:
pass
VAR_1.debug(str(VAR_50) + "," + str(VAR_51) + "," + str(VAR_52))
return VAR_50, VAR_51, VAR_52
def FUNC_3(VAR_2, VAR_5=False):
VAR_53 = VAR_2.GET
VAR_54 = {}
for VAR_263 in ("z", "t", "q", "m", "zm", "x", "y", "p"):
if VAR_263 in VAR_53:
VAR_54[VAR_263] = VAR_53[VAR_263]
if "c" in VAR_53:
VAR_54["c"] = []
VAR_180 = FUNC_2(VAR_53["c"])
VAR_1.debug(VAR_180)
for VAR_212 in range(len(VAR_180[0])):
VAR_54["c"].append(
{
"a": abs(VAR_180[0][VAR_212]),
"i": VAR_180[0][VAR_212],
"s": VAR_180[1][VAR_212][0],
"e": VAR_180[1][VAR_212][1],
"c": VAR_180[2][VAR_212],
}
)
if VAR_5:
return "&".join(["%VAR_3=%s" % (VAR_30[0], VAR_30[1]) for VAR_30 in VAR_54.items()])
return VAR_54
@login_required()
def FUNC_4(VAR_2, VAR_6, VAR_7=None, VAR_8=None, **VAR_9):
return FUNC_6(VAR_2, VAR_6, VAR_10=VAR_7, **VAR_9)
def FUNC_5(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_55 = VAR_2.session.get("server_settings", {}).get("browser", {})
VAR_56 = VAR_55.get("thumb_default_size", 96)
VAR_57 = True
if VAR_10 is None:
VAR_7 = (VAR_56,)
else:
if VAR_11 is None:
VAR_7 = (int(VAR_10),)
else:
VAR_7 = (int(VAR_10), int(VAR_11))
if VAR_7 == (VAR_56,):
VAR_57 = False
VAR_58 = VAR_8.getUserId()
VAR_22 = getIntOrDefault(VAR_2, "z", None)
VAR_23 = getIntOrDefault(VAR_2, "t", None)
VAR_59 = getIntOrDefault(VAR_2, "rdefId", None)
VAR_60 = webgateway_cache.getThumb(VAR_2, VAR_19, VAR_58, VAR_6, VAR_7)
if VAR_60 is None:
VAR_181 = False
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is None:
VAR_1.debug("(b)Image %VAR_3 not found..." % (str(VAR_6)))
if VAR_12:
VAR_60 = VAR_12(VAR_7=size)
VAR_181 = True
else:
raise Http404("Failed to render thumbnail")
else:
VAR_60 = VAR_92.getThumbnail(
VAR_7=size, VAR_57=direct, VAR_59=rdefId, VAR_22=z, VAR_23=t
)
if VAR_60 is None:
VAR_1.debug("(VAR_203)Image %VAR_3 not found..." % (str(VAR_6)))
if VAR_12:
VAR_60 = VAR_12(VAR_7=size)
VAR_181 = True
else:
raise Http404("Failed to render thumbnail")
else:
VAR_181 = VAR_92._thumbInProgress
if not VAR_181:
webgateway_cache.setThumb(VAR_2, VAR_19, VAR_58, VAR_6, VAR_60, VAR_7)
else:
pass
return VAR_60
@login_required()
def FUNC_6(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_60 = FUNC_5(
VAR_2=request, VAR_6=iid, VAR_10=w, VAR_11=h, VAR_8=conn, VAR_12=_defcb, **VAR_9
)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
@login_required()
def FUNC_7(VAR_2, VAR_13, VAR_10=None, VAR_11=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_62 = VAR_8.getRoiService().findByRoi(VAR_178(VAR_13), None, VAR_8.SERVICE_OPTS)
if VAR_62 is None or VAR_62.rois is None or len(VAR_62.rois) == 0:
raise Http404
for VAR_241 in VAR_62.rois:
VAR_36 = VAR_241.image.id.val
VAR_63 = VAR_241.copyShapes()
VAR_63 = [VAR_3 for VAR_3 in VAR_63 if VAR_3 is not None]
if len(VAR_63) == 0:
raise Http404("No Shapes found for ROI %s" % VAR_13)
VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_15, VAR_16 = VAR_64
VAR_65 = None
if len(VAR_63) == 1:
VAR_65 = VAR_63[0]
else:
VAR_182 = VAR_15.getDefaultT()
VAR_183 = VAR_15.getDefaultZ()
VAR_184 = [
VAR_3
for VAR_3 in VAR_63
if unwrap(VAR_3.getTheT()) is None or unwrap(VAR_3.getTheT()) == VAR_182
]
if len(VAR_184) == 1:
VAR_65 = VAR_184[0]
else:
VAR_184 = [
VAR_3
for VAR_3 in VAR_184
if unwrap(VAR_3.getTheZ()) is None or unwrap(VAR_3.getTheZ()) == VAR_183
]
if len(VAR_184) > 0:
VAR_65 = VAR_184[0]
if VAR_65 is None and len(VAR_63) > 0:
VAR_65 = VAR_63[0]
return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)
@login_required()
def FUNC_8(VAR_2, VAR_14, VAR_10=None, VAR_11=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_66 = omero.sys.Parameters()
VAR_66.map = {"id": rlong(VAR_14)}
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_3 from Shape VAR_3 join fetch VAR_3.roi where VAR_3.id = :id",
VAR_66,
VAR_8.SERVICE_OPTS,
)
if VAR_65 is None:
raise Http404
VAR_36 = VAR_65.roi.image.id.val
VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_15, VAR_16 = VAR_64
return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)
def FUNC_9(VAR_2, VAR_8, VAR_15, VAR_3, VAR_16):
VAR_67 = 250
VAR_68 = VAR_2.GET.get("color", "fff")
VAR_69 = {
"f00": (255, 0, 0),
"0f0": (0, 255, 0),
"00f": (0, 0, 255),
"ff0": (255, 255, 0),
"fff": (255, 255, 255),
"000": (0, 0, 0),
}
VAR_70 = VAR_69["f00"]
if VAR_68 in VAR_69:
VAR_70 = VAR_69[VAR_68]
VAR_71 = (221, 221, 221)
VAR_72 = None # bounding box: (VAR_30, VAR_29, VAR_10, VAR_11)
VAR_65 = {}
VAR_73 = unwrap(VAR_3.getTheT())
VAR_73 = VAR_73 if VAR_73 is not None else VAR_15.getDefaultT()
VAR_74 = unwrap(VAR_3.getTheZ())
VAR_74 = VAR_74 if VAR_74 is not None else VAR_15.getDefaultZ()
if type(VAR_3) == omero.model.RectangleI:
VAR_65["type"] = "Rectangle"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_65["width"] = VAR_3.getWidth().getValue()
VAR_65["height"] = VAR_3.getHeight().getValue()
VAR_72 = (VAR_65["x"], VAR_65["y"], VAR_65["width"], VAR_65["height"])
elif type(VAR_3) == omero.model.MaskI:
VAR_65["type"] = "Mask"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_65["width"] = VAR_3.getWidth().getValue()
VAR_65["height"] = VAR_3.getHeight().getValue()
VAR_72 = (VAR_65["x"], VAR_65["y"], VAR_65["width"], VAR_65["height"])
elif type(VAR_3) == omero.model.EllipseI:
VAR_65["type"] = "Ellipse"
VAR_65["x"] = int(VAR_3.getX().getValue())
VAR_65["y"] = int(VAR_3.getY().getValue())
VAR_65["radiusX"] = int(VAR_3.getRadiusX().getValue())
VAR_65["radiusY"] = int(VAR_3.getRadiusY().getValue())
VAR_72 = (
VAR_65["x"] - VAR_65["radiusX"],
VAR_65["y"] - VAR_65["radiusY"],
2 * VAR_65["radiusX"],
2 * VAR_65["radiusY"],
)
elif type(VAR_3) == omero.model.PolylineI:
VAR_65["type"] = "PolyLine"
VAR_65["xyList"] = points_string_to_XY_list(VAR_3.getPoints().getValue())
VAR_72 = xy_list_to_bbox(VAR_65["xyList"])
elif type(VAR_3) == omero.model.LineI:
VAR_65["type"] = "Line"
VAR_65["x1"] = int(VAR_3.getX1().getValue())
VAR_65["x2"] = int(VAR_3.getX2().getValue())
VAR_65["y1"] = int(VAR_3.getY1().getValue())
VAR_65["y2"] = int(VAR_3.getY2().getValue())
VAR_30 = min(VAR_65["x1"], VAR_65["x2"])
VAR_29 = min(VAR_65["y1"], VAR_65["y2"])
VAR_72 = (
VAR_30,
VAR_29,
max(VAR_65["x1"], VAR_65["x2"]) - VAR_30,
max(VAR_65["y1"], VAR_65["y2"]) - VAR_29,
)
elif type(VAR_3) == omero.model.PointI:
VAR_65["type"] = "Point"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_72 = (VAR_65["x"] - 50, VAR_65["y"] - 50, 100, 100)
elif type(VAR_3) == omero.model.PolygonI:
VAR_65["type"] = "Polygon"
VAR_65["xyList"] = points_string_to_XY_list(VAR_3.getPoints().getValue())
VAR_72 = xy_list_to_bbox(VAR_65["xyList"])
elif type(VAR_3) == omero.model.LabelI:
VAR_65["type"] = "Label"
VAR_65["x"] = VAR_3.getX().getValue()
VAR_65["y"] = VAR_3.getY().getValue()
VAR_72 = (VAR_65["x"] - 50, VAR_65["y"] - 50, 100, 100)
else:
VAR_1.debug("Shape type not supported: %s" % str(type(VAR_3)))
VAR_30, VAR_29, VAR_10, VAR_11 = VAR_72
VAR_75 = max(VAR_10, VAR_11 * 3 // 2)
VAR_76 = VAR_75 * 2 // 3
VAR_77 = int(VAR_75 * 1.5)
VAR_78 = int(VAR_76 * 1.5)
if VAR_77 < VAR_67:
VAR_77 = VAR_67
VAR_78 = VAR_77 * 2 // 3
def FUNC_63(VAR_79):
try:
return VAR_8.getConfigService().getConfigValue(VAR_79)
except Exception:
VAR_1.warn(
"webgateway: FUNC_9() could not get"
" Config-Value for %s" % VAR_79
)
pass
VAR_80 = FUNC_63("omero.pixeldata.max_plane_width")
VAR_81 = FUNC_63("omero.pixeldata.max_plane_height")
if (
VAR_80 is None
or VAR_81 is None
or (VAR_77 > int(VAR_80))
or (VAR_78 > int(VAR_81))
):
VAR_185 = Image.new("RGB", (VAR_67, MAX_WIDTH * 2 // 3), VAR_71)
VAR_97 = ImageDraw.Draw(VAR_185)
VAR_97.text((10, 30), "Shape too large to \ngenerate thumbnail", VAR_101=(255, 0, 0))
VAR_54 = BytesIO()
VAR_185.save(VAR_54, "jpeg", quality=90)
return HttpResponse(VAR_54.getvalue(), content_type="image/jpeg")
VAR_82 = (VAR_77 - VAR_10) // 2
VAR_83 = (VAR_78 - VAR_11) // 2
VAR_84 = int(VAR_30 - VAR_82)
VAR_85 = int(VAR_29 - VAR_83)
VAR_86 = VAR_15.getSizeX()
VAR_87 = VAR_15.getSizeY()
VAR_88, VAR_89, VAR_90, VAR_91 = 0, 0, 0, 0
if VAR_84 < 0:
VAR_77 = VAR_77 + VAR_84
VAR_88 = abs(VAR_84)
newX = 0
if VAR_85 < 0:
VAR_78 = VAR_78 + VAR_85
VAR_90 = abs(VAR_85)
newY = 0
if VAR_77 + VAR_84 > VAR_86:
VAR_89 = (VAR_77 + VAR_84) - VAR_86
VAR_77 = newW - VAR_89
if VAR_78 + VAR_85 > VAR_87:
VAR_91 = (VAR_78 + VAR_85) - VAR_87
VAR_78 = newH - VAR_91
VAR_60 = VAR_15.renderJpegRegion(
VAR_74, VAR_73, VAR_84, VAR_85, VAR_77, VAR_78, VAR_113=None, VAR_98=VAR_16
)
VAR_92 = Image.open(BytesIO(VAR_60))
if VAR_88 != 0 or VAR_89 != 0 or VAR_90 != 0 or VAR_91 != 0:
VAR_186, VAR_187 = VAR_92.size
VAR_188 = VAR_186 + VAR_89 + VAR_88
VAR_189 = VAR_187 + VAR_91 + VAR_90
VAR_190 = Image.new("RGB", (VAR_188, VAR_189), VAR_71)
VAR_190.paste(VAR_92, (VAR_88, VAR_90))
VAR_92 = VAR_190
VAR_93, VAR_94 = VAR_92.size
VAR_95 = float(VAR_67) / VAR_93
VAR_96 = int(VAR_94 * VAR_95)
VAR_92 = VAR_92.resize((VAR_67, VAR_96))
VAR_97 = ImageDraw.Draw(VAR_92)
if VAR_65["type"] == "Rectangle":
VAR_191 = int(VAR_82 * VAR_95)
VAR_192 = int(VAR_83 * VAR_95)
VAR_193 = int((VAR_10 + VAR_82) * VAR_95)
VAR_194 = int((VAR_11 + VAR_83) * VAR_95)
VAR_97.rectangle((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.rectangle((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif VAR_65["type"] == "Line":
VAR_264 = (VAR_65["x1"] - VAR_84 + VAR_88) * VAR_95
VAR_265 = (VAR_65["x2"] - VAR_84 + VAR_88) * VAR_95
VAR_266 = (VAR_65["y1"] - VAR_85 + VAR_90) * VAR_95
VAR_267 = (VAR_65["y2"] - VAR_85 + VAR_90) * VAR_95
VAR_97.line((VAR_264, VAR_266, VAR_265, VAR_267), VAR_101=VAR_70, VAR_99=2)
elif VAR_65["type"] == "Ellipse":
VAR_191 = int(VAR_82 * VAR_95)
VAR_192 = int(VAR_83 * VAR_95)
VAR_193 = int((VAR_10 + VAR_82) * VAR_95)
VAR_194 = int((VAR_11 + VAR_83) * VAR_95)
VAR_97.ellipse((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.ellipse((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif VAR_65["type"] == "Point":
VAR_323 = 2
VAR_191 = (VAR_67 // 2) - VAR_323
VAR_192 = int(VAR_96 // 2) - VAR_323
VAR_193 = VAR_191 + (VAR_323 * 2)
VAR_194 = VAR_192 + (VAR_323 * 2)
VAR_97.ellipse((VAR_191, VAR_192, VAR_193, VAR_194), outline=VAR_70)
VAR_97.ellipse((VAR_191 - 1, VAR_192 - 1, VAR_193 + 1, VAR_194 + 1), outline=VAR_70)
elif "xyList" in VAR_65:
def FUNC_79(VAR_325):
VAR_30, VAR_29 = VAR_325
return (
int((VAR_30 - VAR_84 + VAR_88) * VAR_95),
int((VAR_29 - VAR_85 + VAR_90) * VAR_95),
)
VAR_326 = [FUNC_79(VAR_325) for VAR_325 in VAR_65["xyList"]]
VAR_327 = VAR_328 = None
for line in range(1, len(VAR_326)):
VAR_333, VAR_334 = VAR_326[line - 1]
VAR_327, VAR_328 = VAR_326[line]
VAR_97.line((VAR_333, VAR_334, VAR_327, VAR_328), VAR_101=VAR_70, VAR_99=2)
VAR_329, VAR_330 = VAR_326[0]
if VAR_65["type"] != "PolyLine":
if VAR_327 is None:
VAR_327 = VAR_329 + 1 # This will create VAR_167 visible dot
if VAR_328 is None:
VAR_328 = VAR_330 + 1
VAR_97.line((VAR_327, VAR_328, VAR_329, VAR_330), VAR_101=VAR_70, VAR_99=2)
VAR_54 = BytesIO()
VAR_98 = 0.9
try:
VAR_92.save(VAR_54, "jpeg", quality=int(VAR_98 * 100))
VAR_195 = VAR_54.getvalue()
finally:
VAR_54.close()
return HttpResponse(VAR_195, content_type="image/jpeg")
@login_required()
def FUNC_10(VAR_2, VAR_14, VAR_8=None, **VAR_9):
if not VAR_48:
raise NotImplementedError("numpy not installed")
VAR_66 = omero.sys.Parameters()
VAR_66.map = {"id": rlong(VAR_14)}
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_3 from Shape VAR_3 where VAR_3.id = :id", VAR_66, VAR_8.SERVICE_OPTS
)
if VAR_65 is None:
raise Http404("Shape ID: %VAR_3 not found" % VAR_14)
VAR_99 = int(VAR_65.getWidth().getValue())
VAR_100 = int(VAR_65.getHeight().getValue())
VAR_68 = unwrap(VAR_65.getFillColor())
VAR_101 = (255, 255, 0, 255)
if VAR_68 is not None:
VAR_68 = rgb_int2rgba(VAR_68)
VAR_101 = (VAR_68[0], VAR_68[1], VAR_68[2], int(VAR_68[3] * 255))
VAR_102 = VAR_65.getBytes()
VAR_103 = numpy.fromstring(VAR_102, dtype=numpy.uint8)
VAR_104 = numpy.unpackbits(VAR_103)
VAR_92 = Image.new("RGBA", VAR_7=(VAR_99, VAR_100), VAR_68=(0, 0, 0, 0))
VAR_30 = 0
VAR_29 = 0
for pix in VAR_104:
if pix == 1:
VAR_92.putpixel((VAR_30, VAR_29), VAR_101)
VAR_30 += 1
if VAR_30 > VAR_99 - 1:
VAR_30 = 0
VAR_29 += 1
VAR_54 = BytesIO()
VAR_92.save(VAR_54, "png", quality=int(100))
VAR_105 = VAR_54.getvalue()
return HttpResponse(VAR_105, content_type="image/png")
def FUNC_11(VAR_2):
VAR_53 = VAR_2.GET
VAR_54 = VAR_53.get("m", "_") + VAR_53.get("p", "_") + VAR_53.get("c", "_") + VAR_53.get("q", "_")
return VAR_54
def FUNC_12(VAR_2, VAR_17, VAR_18=0):
VAR_106 = None
if "maps" in VAR_2:
VAR_196 = VAR_2["maps"]
VAR_106 = []
try:
if isinstance(VAR_196, (unicode, str)):
VAR_196 = json.loads(VAR_196)
VAR_18 = max(len(VAR_196), VAR_18)
for VAR_203 in range(VAR_18):
VAR_308 = None
if len(VAR_196) > VAR_203:
VAR_282 = VAR_196[VAR_203].get(VAR_17)
if VAR_282 is not None:
VAR_308 = VAR_282.get("enabled") in (True, "true")
VAR_106.append(VAR_308)
except Exception:
VAR_1.debug("Invalid json for VAR_43 ?VAR_223=%s" % VAR_196)
VAR_106 = None
return VAR_106
def FUNC_13(
VAR_2, VAR_6, VAR_19=None, VAR_8=None, VAR_20=False, VAR_21=True
):
VAR_53 = VAR_2.GET
VAR_1.debug(
"Preparing Image:%VAR_53 VAR_20=%VAR_53 "
"retry=%VAR_53 VAR_2=%VAR_53 VAR_8=%s" % (VAR_6, VAR_20, VAR_21, VAR_53, str(VAR_8))
)
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is None:
return
VAR_107 = None
if "maps" in VAR_53:
VAR_197 = FUNC_12(VAR_53, "reverse", VAR_92.getSizeC())
VAR_107 = FUNC_12(VAR_53, "inverted", VAR_92.getSizeC())
if VAR_197 is not None and VAR_107 is not None:
VAR_107 = [
VAR_22[0] if VAR_22[0] is not None else VAR_22[1] for VAR_22 in zip(VAR_107, VAR_197)
]
try:
VAR_268 = [VAR_282.get("quantization") for VAR_282 in json.loads(VAR_53["maps"])]
VAR_92.setQuantizationMaps(VAR_268)
except Exception:
VAR_1.debug("Failed to set quantization maps")
if "c" in VAR_53:
VAR_1.debug("c=" + VAR_53["c"])
VAR_198, VAR_51, VAR_52 = FUNC_2(VAR_53["c"])
VAR_199 = range(1, VAR_92.getSizeC() + 1)
if VAR_20 and not VAR_92.setActiveChannels(
VAR_199, VAR_51, VAR_52, VAR_107
):
VAR_1.debug("Something bad happened while setting the active VAR_50...")
if not VAR_92.setActiveChannels(VAR_198, VAR_51, VAR_52, VAR_107):
VAR_1.debug("Something bad happened while setting the active VAR_50...")
if VAR_53.get("m", None) == "g":
VAR_92.setGreyscaleRenderingModel()
elif VAR_53.get("m", None) == "c":
VAR_92.setColorRenderingModel()
VAR_108 = VAR_53.get("p", None)
VAR_109, VAR_110 = None, None
if VAR_108 is not None and len(VAR_108.split("|")) > 1:
VAR_108, VAR_200 = VAR_108.split("|", 1)
try:
VAR_109, VAR_110 = [int(VAR_3) for VAR_3 in VAR_200.split(":")]
except ValueError:
pass
VAR_92.setProjection(VAR_108)
VAR_92.setProjectionRange(VAR_109, VAR_110)
VAR_92.setInvertedAxis(bool(VAR_53.get("ia", "0") == "1"))
VAR_16 = VAR_53.get("q", None)
if VAR_20:
"z" in VAR_53 and VAR_92.setDefaultZ(VAR_178(VAR_53["z"]) - 1)
"t" in VAR_53 and VAR_92.setDefaultT(VAR_178(VAR_53["t"]) - 1)
VAR_92.saveDefaults()
return (VAR_92, VAR_16)
@login_required()
def FUNC_14(VAR_2, VAR_6, VAR_22, VAR_23, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_111 = VAR_2.GET.get("tile", None)
VAR_112 = VAR_2.GET.get("region", None)
VAR_113 = None
if VAR_111:
try:
VAR_92._prepareRenderingEngine()
VAR_10, VAR_11 = VAR_92._re.getTileSize()
VAR_269 = VAR_92._re.getResolutionLevels() - 1
VAR_270 = VAR_111.split(",")
if len(VAR_270) > 4:
VAR_309 = [int(VAR_270[3]), int(VAR_270[4])]
VAR_310 = [VAR_10, VAR_11]
VAR_311 = 1024
try:
VAR_311 = int(
VAR_8.getConfigService().getConfigValue(
"omero.pixeldata.max_tile_length"
)
)
except Exception:
pass
for VAR_212, tile_length in enumerate(VAR_309):
if tile_length <= 0:
VAR_309[VAR_212] = VAR_310[VAR_212]
if tile_length > VAR_311:
VAR_309[VAR_212] = VAR_311
VAR_10, VAR_11 = VAR_309
VAR_271 = int(VAR_270[0])
if VAR_271 < 0:
VAR_231 = "Invalid resolution VAR_113 %VAR_3 < 0" % VAR_271
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
if VAR_269 == 0: # non pyramid file
if VAR_271 > 0:
VAR_231 = "Invalid resolution VAR_113 %VAR_3, non pyramid file" % VAR_271
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
else:
VAR_113 = None
else:
VAR_113 = VAR_269 - VAR_271
if VAR_113 < 0:
VAR_231 = (
"Invalid resolution VAR_113, \
%VAR_3 > number of available VAR_269 %VAR_3 "
% (VAR_271, VAR_269)
)
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
VAR_30 = int(VAR_270[1]) * VAR_10
VAR_29 = int(VAR_270[2]) * VAR_11
except Exception:
VAR_231 = "malformed VAR_111 argument, VAR_111=%s" % VAR_111
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
elif VAR_112:
try:
VAR_312 = VAR_112.split(",")
VAR_30 = int(VAR_312[0])
VAR_29 = int(VAR_312[1])
VAR_10 = int(VAR_312[2])
VAR_11 = int(VAR_312[3])
except Exception:
VAR_231 = "malformed VAR_112 argument, VAR_112=%s" % VAR_112
VAR_1.debug(VAR_231, exc_info=True)
return HttpResponseBadRequest(VAR_231)
else:
return HttpResponseBadRequest("tile or VAR_112 argument required")
VAR_60 = webgateway_cache.getImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderJpegRegion(
VAR_22, VAR_23, VAR_30, VAR_29, VAR_10, VAR_11, VAR_113=level, VAR_98=VAR_16
)
if VAR_60 is None:
raise Http404
webgateway_cache.setImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
@login_required()
def FUNC_15(VAR_2, VAR_6, VAR_22=None, VAR_23=None, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_60 = webgateway_cache.getImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderJpeg(VAR_22, VAR_23, VAR_98=VAR_16)
if VAR_60 is None:
raise Http404
webgateway_cache.setImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_114 = VAR_2.GET.get("format", "jpeg")
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
if "download" in VAR_9 and VAR_9["download"]:
if VAR_114 == "png":
VAR_212 = Image.open(BytesIO(VAR_60))
VAR_272 = BytesIO()
VAR_212.save(VAR_272, "png")
VAR_60 = VAR_272.getvalue()
VAR_272.close()
VAR_61 = HttpResponse(VAR_60, content_type="image/png")
elif VAR_114 == "tif":
VAR_212 = Image.open(BytesIO(VAR_60))
VAR_272 = BytesIO()
VAR_212.save(VAR_272, "tiff")
VAR_60 = VAR_272.getvalue()
VAR_272.close()
VAR_61 = HttpResponse(VAR_60, content_type="image/tiff")
VAR_201 = VAR_92.getName()
try:
VAR_201 = fileName.decode("utf8")
except AttributeError:
pass # python 3
VAR_201 = fileName.replace(",", ".").replace(" ", "_")
VAR_61["Content-Type"] = "application/force-download"
VAR_61["Content-Length"] = len(VAR_60)
VAR_61["Content-Disposition"] = "attachment; filename=%VAR_3.%s" % (VAR_201, VAR_114)
return VAR_61
@login_required()
def FUNC_16(VAR_2, VAR_24, VAR_25, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_115 = []
if VAR_24 == "p":
VAR_202 = VAR_8.getObject("Project", VAR_25)
if VAR_202 is None:
raise Http404
for VAR_213 in VAR_202.listChildren():
VAR_115.extend(list(VAR_213.listChildren()))
VAR_17 = VAR_202.getName()
elif VAR_24 == "d":
VAR_202 = VAR_8.getObject("Dataset", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.extend(list(VAR_202.listChildren()))
VAR_273 = list(filter(None, VAR_2.GET.get("selection", "").split(",")))
if len(VAR_273) > 0:
VAR_1.debug(VAR_273)
VAR_1.debug(VAR_115)
VAR_115 = [VAR_30 for VAR_30 in VAR_115 if str(VAR_30.getId()) in VAR_273]
VAR_1.debug(VAR_115)
if len(VAR_115) == 0:
raise Http404
VAR_17 = "%VAR_3-%s" % (VAR_202.getParent().getName(), VAR_202.getName())
elif VAR_24 == "w":
VAR_202 = VAR_8.getObject("Well", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.extend([VAR_30.getImage() for VAR_30 in VAR_202.listChildren()])
VAR_124 = VAR_202.getParent()
VAR_313 = "%VAR_3%s" % (
VAR_124.getRowLabels()[VAR_202.row],
VAR_124.getColumnLabels()[VAR_202.column],
)
VAR_17 = "%VAR_3-%VAR_3-%s" % (VAR_124.getParent().getName(), VAR_124.getName(), VAR_313)
else:
VAR_202 = VAR_8.getObject("Image", VAR_25)
if VAR_202 is None:
raise Http404
VAR_115.append(VAR_202)
VAR_115 = [VAR_30 for VAR_30 in VAR_115 if not VAR_30.requiresPixelsPyramid()]
if VAR_2.GET.get("dryrun", False):
VAR_54 = json.dumps(len(VAR_115))
VAR_203 = VAR_2.GET.get("callback", None)
if VAR_203 is not None and not VAR_9.get("_internal", False):
VAR_54 = "%VAR_3(%VAR_3)" % (VAR_203, VAR_54)
return HttpJavascriptResponse(VAR_54)
if len(VAR_115) == 0:
raise Http404
if len(VAR_115) == 1:
VAR_202 = VAR_115[0]
VAR_79 = (
"_".join((str(VAR_30.getId()) for VAR_30 in VAR_202.getAncestry()))
+ "_"
+ str(VAR_202.getId())
+ "_ome_tiff"
)
VAR_204 = 255 - len(str(VAR_202.getId())) - 10
VAR_205 = VAR_202.getName()[:VAR_204]
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(
str(VAR_202.getId()) + "-" + VAR_205 + ".ome.tiff", VAR_79=key
)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_115[0])
if VAR_209 is None:
try:
VAR_209 = VAR_115[0].exportOmeTiff()
except Exception:
VAR_1.debug("Failed to export VAR_15 (2)", exc_info=True)
VAR_209 = None
if VAR_209 is None:
webgateway_tempfile.abort(VAR_206)
raise Http404
webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_115[0], VAR_209)
if VAR_208 is None:
VAR_61 = HttpResponse(VAR_209, content_type="image/tiff")
VAR_61["Content-Disposition"] = 'attachment; filename="%VAR_3.ome.tiff"' % (
str(VAR_202.getId()) + "-" + VAR_205
)
VAR_61["Content-Length"] = len(VAR_209)
return VAR_61
else:
VAR_208.write(VAR_209)
VAR_208.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
else:
try:
VAR_274 = "+".join((str(VAR_30.getId()) for VAR_30 in VAR_115)).encode("utf-8")
VAR_79 = (
"_".join((str(VAR_30.getId()) for VAR_30 in VAR_115[0].getAncestry()))
+ "_"
+ md5(VAR_274).hexdigest()
+ "_ome_tiff_zip"
)
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_17 + ".zip", VAR_79=key)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
VAR_1.debug(VAR_206)
if VAR_208 is None:
VAR_208 = BytesIO()
VAR_275 = zipfile.ZipFile(VAR_208, "w", zipfile.ZIP_STORED)
for VAR_202 in VAR_115:
VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_202)
if VAR_209 is None:
VAR_209 = VAR_202.exportOmeTiff()
if VAR_209 is None:
continue
webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_202, VAR_209)
VAR_204 = 255 - len(str(VAR_202.getId())) - 10
VAR_205 = VAR_202.getName()[:VAR_204]
VAR_275.writestr(str(VAR_202.getId()) + "-" + VAR_205 + ".ome.tiff", VAR_209)
VAR_275.close()
if VAR_206 is None:
VAR_314 = VAR_208.getvalue()
VAR_61 = HttpResponse(VAR_314, content_type="application/zip")
VAR_61["Content-Disposition"] = 'attachment; filename="%VAR_3.zip"' % VAR_17
VAR_61["Content-Length"] = len(VAR_314)
return VAR_61
except Exception:
VAR_1.debug(traceback.format_exc())
raise
return HttpResponseRedirect(settings.STATIC_URL + "webgateway/tfiles/" + VAR_207)
@login_required()
def FUNC_17(VAR_2, VAR_6, VAR_26, VAR_27, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
try:
VAR_136 = {}
VAR_136["format"] = "video/" + VAR_2.GET.get("format", "quicktime")
VAR_136["fps"] = int(VAR_2.GET.get("fps", 4))
VAR_136["minsize"] = (512, 512, "Black")
VAR_210 = ".avi"
VAR_79 = "%VAR_3-%VAR_3-%VAR_3-%VAR_213-%VAR_3-%s" % (
VAR_6,
VAR_26,
VAR_27,
VAR_136["fps"],
FUNC_11(VAR_2),
VAR_2.GET.get("format", "quicktime"),
)
VAR_27 = int(VAR_27)
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_92.getName() + VAR_210, VAR_79=key)
VAR_1.debug(VAR_206, VAR_207, VAR_208)
if VAR_208 is True:
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
if "optsCB" in VAR_9:
VAR_136.update(VAR_9["optsCB"](VAR_92))
VAR_136.update(VAR_9.get("opts", {}))
VAR_1.debug(
"rendering VAR_280 for VAR_92 %VAR_3 with VAR_26 %VAR_3, VAR_27 %VAR_212 and VAR_136 %s"
% (VAR_6, VAR_26, VAR_27, VAR_136)
)
if VAR_206 is None:
VAR_276, VAR_277 = tempfile.mkstemp()
else:
VAR_277 = VAR_206 # os.path.join(VAR_206, VAR_92.getName())
if VAR_26.lower() == "z":
VAR_278, VAR_279 = VAR_92.createMovie(
VAR_277, 0, VAR_92.getSizeZ() - 1, VAR_27 - 1, VAR_27 - 1, VAR_136
)
else:
VAR_278, VAR_279 = VAR_92.createMovie(
VAR_277, VAR_27 - 1, VAR_27 - 1, 0, VAR_92.getSizeT() - 1, VAR_136
)
if VAR_278 is None and VAR_279 is None:
raise Http404
if VAR_206 is None:
VAR_280 = open(VAR_277).read()
os.close(VAR_276)
VAR_61 = HttpResponse(VAR_280, content_type=VAR_279)
VAR_61["Content-Disposition"] = 'attachment; filename="%s"' % (
VAR_92.getName() + VAR_210
)
VAR_61["Content-Length"] = len(VAR_280)
return VAR_61
else:
VAR_208.close()
return HttpResponseRedirect(
settings.STATIC_URL + "webgateway/tfiles/" + VAR_207
)
except Exception:
VAR_1.debug(traceback.format_exc())
raise
@login_required()
def FUNC_18(VAR_2, VAR_6, VAR_22, VAR_23, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_16 = compress_quality and float(VAR_16) or 0.9
VAR_60 = webgateway_cache.getSplitChannelImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)
if VAR_60 is None:
VAR_60 = VAR_92.renderSplitChannel(VAR_22, VAR_23, VAR_98=VAR_16)
if VAR_60 is None:
raise Http404
webgateway_cache.setSplitChannelImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)
VAR_61 = HttpResponse(VAR_60, content_type="image/jpeg")
return VAR_61
def VAR_211(VAR_28):
@wraps(VAR_28)
def FUNC_64(VAR_2, *VAR_116, **VAR_9):
VAR_211 = VAR_2.GET.getlist("debug")
if "slow" in VAR_211:
time.sleep(5)
if "fail" in VAR_211:
raise Http404
if "error" in VAR_211:
raise AttributeError("Debug requested error")
return VAR_28(VAR_2, *VAR_116, **VAR_9)
return FUNC_64
def FUNC_20(VAR_28):
@wraps(VAR_28)
def FUNC_64(VAR_2, *VAR_116, **VAR_9):
VAR_1.debug("jsonp")
try:
VAR_19 = VAR_9.get("server_id", None)
if VAR_19 is None and VAR_2.session.get("connector"):
VAR_19 = VAR_2.session["connector"].server_id
VAR_9["server_id"] = VAR_19
VAR_54 = VAR_28(VAR_2, *VAR_116, **VAR_9)
if VAR_9.get("_raw", False):
return VAR_54
if isinstance(VAR_54, HttpResponse):
return VAR_54
VAR_203 = VAR_2.GET.get("callback", None)
if VAR_203 is not None and not VAR_9.get("_internal", False):
VAR_54 = json.dumps(VAR_54)
VAR_54 = "%VAR_3(%VAR_3)" % (VAR_203, VAR_54)
return HttpJavascriptResponse(VAR_54)
if VAR_9.get("_internal", False):
return VAR_54
VAR_281 = type(VAR_54) is dict
return JsonResponse(VAR_54, VAR_281=safe)
except Exception as ex:
VAR_315 = 500
if isinstance(ex, omero.SecurityViolation):
VAR_315 = 403
elif isinstance(ex, omero.ApiUsageException):
VAR_315 = 400
VAR_316 = traceback.format_exc()
VAR_1.debug(VAR_316)
if VAR_9.get("_raw", False) or VAR_9.get("_internal", False):
raise
return JsonResponse(
{"message": str(ex), "stacktrace": VAR_316}, VAR_315=status
)
return FUNC_64
@VAR_211
@login_required()
def FUNC_21(VAR_2, VAR_6, VAR_22, VAR_23, VAR_29, VAR_8=None, VAR_10=1, **VAR_9):
if not VAR_10:
VAR_10 = 1
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
try:
VAR_117 = VAR_92.renderRowLinePlotGif(int(VAR_22), int(VAR_23), int(VAR_29), int(VAR_10))
except Exception:
VAR_1.debug("a", exc_info=True)
raise
if VAR_117 is None:
raise Http404
VAR_61 = HttpResponse(VAR_117, content_type="image/gif")
return VAR_61
@VAR_211
@login_required()
def FUNC_22(VAR_2, VAR_6, VAR_22, VAR_23, VAR_30, VAR_10=1, VAR_8=None, **VAR_9):
if not VAR_10:
VAR_10 = 1
VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_8=conn)
if VAR_64 is None:
raise Http404
VAR_92, VAR_16 = VAR_64
VAR_117 = VAR_92.renderColLinePlotGif(int(VAR_22), int(VAR_23), int(VAR_30), int(VAR_10))
if VAR_117 is None:
raise Http404
VAR_61 = HttpResponse(VAR_117, content_type="image/gif")
return VAR_61
@login_required()
@FUNC_20
def FUNC_23(VAR_2, VAR_8=None, VAR_31=False, **VAR_9):
VAR_6 = VAR_9["iid"]
VAR_79 = VAR_9.get("key", None)
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
if is_public_user(VAR_2):
return HttpResponseForbidden()
else:
return HttpResponseNotFound("Image:%VAR_3 not found" % VAR_6)
if VAR_2.GET.get("getDefaults") == "true":
VAR_15.resetDefaults(save=False)
VAR_54 = imageMarshal(VAR_15, VAR_79=key, VAR_2=request)
return VAR_54
@login_required()
@FUNC_20
def FUNC_24(VAR_2, VAR_8=None, VAR_31=False, **VAR_9):
VAR_118 = VAR_9["wid"]
VAR_119 = VAR_8.getObject("Well", VAR_118)
if VAR_119 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
VAR_54 = VAR_119.simpleMarshal(VAR_121=xtra)
return VAR_54
@login_required()
@FUNC_20
def FUNC_25(VAR_2, VAR_32, VAR_33=0, VAR_8=None, **VAR_9):
try:
VAR_33 = VAR_178(VAR_33 or 0)
except ValueError:
VAR_33 = 0
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
VAR_122 = getIntOrDefault(VAR_2, "size", None)
VAR_1.debug(VAR_122)
VAR_19 = VAR_9["server_id"]
def FUNC_66(VAR_6):
if VAR_122 is not None:
return reverse(VAR_120, VAR_116=(VAR_6, VAR_122))
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_123 = PlateGrid(VAR_8, VAR_32, VAR_33, VAR_9.get("urlprefix", FUNC_66))
VAR_124 = VAR_123.plate
if VAR_124 is None:
return Http404
VAR_125 = "plategrid-%VAR_213-%s" % (VAR_33, VAR_122)
VAR_54 = webgateway_cache.getJson(VAR_2, VAR_19, VAR_124, VAR_125)
if VAR_54 is None:
VAR_54 = VAR_123.metadata
webgateway_cache.setJson(VAR_2, VAR_19, VAR_124, json.dumps(VAR_54), VAR_125)
else:
VAR_54 = json.loads(VAR_54)
return VAR_54
@login_required()
@FUNC_20
def FUNC_26(VAR_2, VAR_10=None, VAR_8=None, **VAR_9):
VAR_55 = VAR_2.session.get("server_settings", {}).get("browser", {})
VAR_56 = VAR_55.get("thumb_default_size", 96)
if VAR_10 is None:
VAR_10 = VAR_56
VAR_126 = get_longs(VAR_2, "id")
VAR_126 = list(set(VAR_126)) # remove any duplicates
if len(VAR_126) == 1:
VAR_6 = VAR_126[0]
try:
VAR_162 = FUNC_5(VAR_2, VAR_6, VAR_10=w, VAR_8=conn)
return {
VAR_6: "data:VAR_15/VAR_195;base64,%s"
% base64.b64encode(VAR_162).decode("utf-8")
}
except Exception:
return {VAR_6: None}
VAR_1.debug("Image ids: %r" % VAR_126)
if len(VAR_126) > settings.THUMBNAILS_BATCH:
return HttpJavascriptResponseServerError(
"Max %VAR_3 VAR_127 at VAR_167 time." % settings.THUMBNAILS_BATCH
)
VAR_127 = VAR_8.getThumbnailSet([rlong(VAR_212) for VAR_212 in VAR_126], VAR_10)
VAR_54 = dict()
for VAR_212 in VAR_126:
VAR_54[VAR_212] = None
try:
VAR_23 = VAR_127[VAR_212]
if len(VAR_23) > 0:
VAR_54[VAR_212] = "data:VAR_15/VAR_195;base64,%s" % base64.b64encode(VAR_23).decode(
"utf-8"
)
except KeyError:
VAR_1.error("Thumbnail not available. (VAR_92 id: %VAR_213)" % VAR_212)
except Exception:
VAR_1.error(traceback.format_exc())
return VAR_54
@login_required()
@FUNC_20
def FUNC_27(VAR_2, VAR_6, VAR_10=None, VAR_11=None, VAR_8=None, VAR_12=None, **VAR_9):
VAR_60 = FUNC_5(
VAR_2=request, VAR_6=iid, VAR_10=w, VAR_11=h, VAR_8=conn, VAR_12=_defcb, **VAR_9
)
VAR_54 = "data:VAR_15/VAR_195;base64,%s" % base64.b64encode(VAR_60).decode("utf-8")
return VAR_54
@login_required()
@FUNC_20
def FUNC_28(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_128 = VAR_8.getObject("Dataset", VAR_34)
if VAR_128 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {
"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65),
"tiled": VAR_2.GET.get("tiled", False),
}
return [VAR_30.simpleMarshal(VAR_121=xtra) for VAR_30 in VAR_128.listChildren()]
@login_required()
@FUNC_20
def FUNC_29(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_119 = VAR_8.getObject("Well", VAR_34)
VAR_129 = getIntOrDefault(VAR_2, "run", None)
if VAR_119 is None:
return HttpJavascriptResponseServerError('""')
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
def FUNC_65(VAR_6):
return reverse(VAR_120, VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
def FUNC_67(VAR_10):
VAR_213 = {}
for VAR_30, VAR_108 in (["x", VAR_10.getPosX()], ["y", VAR_10.getPosY()]):
if VAR_108 is not None:
VAR_213[VAR_30] = {"value": VAR_108.getValue(), "unit": str(VAR_108.getUnit())}
return VAR_213
VAR_130 = []
for ws in VAR_119.listChildren():
if (
VAR_129 is not None
and ws.plateAcquisition is not None
and ws.plateAcquisition.id.val != VAR_129
):
continue
VAR_92 = ws.getImage()
if VAR_92 is not None:
VAR_282 = VAR_92.simpleMarshal(VAR_121=xtra)
VAR_27 = FUNC_67(ws)
if len(VAR_27.keys()) > 0:
VAR_282["position"] = VAR_27
VAR_130.append(VAR_282)
return VAR_130
@login_required()
@FUNC_20
def FUNC_30(VAR_2, VAR_32, VAR_8=None, **VAR_9):
VAR_131 = VAR_8.getObject("Project", VAR_32)
if VAR_131 is None:
return HttpJavascriptResponse("[]")
return [VAR_30.simpleMarshal(VAR_121={"childCount": 0}) for VAR_30 in VAR_131.listChildren()]
@login_required()
@FUNC_20
def FUNC_31(VAR_2, VAR_34, VAR_8=None, **VAR_9):
VAR_132 = VAR_8.getObject("Dataset", VAR_34)
return VAR_132.simpleMarshal()
@login_required()
@FUNC_20
def FUNC_32(VAR_2, VAR_8=None, **VAR_9):
VAR_54 = []
for VAR_133 in VAR_8.listProjects():
VAR_54.append({"id": VAR_133.id, "name": VAR_133.name, "description": VAR_133.description or ""})
return VAR_54
@login_required()
@FUNC_20
def FUNC_33(VAR_2, VAR_32, VAR_8=None, **VAR_9):
VAR_133 = VAR_8.getObject("Project", VAR_32)
VAR_54 = VAR_133.simpleMarshal()
return VAR_54
@FUNC_20
def FUNC_34(VAR_2, **VAR_9):
VAR_134 = settings.OPEN_WITH
VAR_135 = []
for ow in VAR_134:
if len(ow) < 2:
continue
VAR_214 = {}
viewer["id"] = ow[0]
try:
VAR_214["url"] = reverse(ow[1])
except NoReverseMatch:
VAR_214["url"] = ow[1]
try:
if len(ow) > 2:
if "supported_objects" in ow[2]:
VAR_214["supported_objects"] = ow[2]["supported_objects"]
if "target" in ow[2]:
VAR_214["target"] = ow[2]["target"]
if "script_url" in ow[2]:
if ow[2]["script_url"].startswith("http"):
VAR_214["script_url"] = ow[2]["script_url"]
else:
VAR_214["script_url"] = static(ow[2]["script_url"])
if "label" in ow[2]:
VAR_214["label"] = ow[2]["label"]
except Exception:
pass
VAR_135.append(VAR_214)
return {"open_with_options": VAR_135}
def FUNC_35(VAR_2):
try:
VAR_53 = VAR_2.GET
VAR_136 = {
"search": unicode(VAR_53.get("text", "")).encode("utf8"),
"ctx": VAR_53.get("ctx", ""),
"grabData": not not VAR_53.get("grabData", False),
"parents": not not bool(VAR_53.get("parents", False)),
"start": int(VAR_53.get("start", 0)),
"limit": int(VAR_53.get("limit", 0)),
"key": VAR_53.get("key", None),
}
VAR_215 = VAR_53.get("author", "")
if VAR_215:
VAR_136["search"] += " VAR_215:" + VAR_215
return VAR_136
except Exception:
VAR_1.error(traceback.format_exc())
return {}
@TimeIt(logging.INFO)
@login_required()
@FUNC_20
def FUNC_36(VAR_2, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_136 = FUNC_35(VAR_2)
VAR_54 = []
VAR_1.debug("searchObjects(%VAR_3)" % (VAR_136["search"]))
def FUNC_65(VAR_6):
return reverse("webgateway_render_thumbnail", VAR_116=(VAR_6,))
VAR_121 = {"thumbUrlPrefix": VAR_9.get("urlprefix", FUNC_65)}
try:
if VAR_136["ctx"] == "imgs":
VAR_283 = VAR_8.searchObjects(["image"], VAR_136["search"], VAR_8.SERVICE_OPTS)
else:
VAR_283 = VAR_8.searchObjects(None, VAR_136["search"], VAR_8.SERVICE_OPTS)
except ApiUsageException:
return HttpJavascriptResponseServerError('"parse exception"')
def FUNC_68():
VAR_54 = []
if VAR_136["grabData"] and VAR_136["ctx"] == "imgs":
VAR_284 = min(VAR_136["start"], len(VAR_283) - 1)
if VAR_136["limit"] == 0:
VAR_317 = len(VAR_283)
else:
VAR_317 = min(len(VAR_283), VAR_284 + VAR_136["limit"])
for VAR_212 in range(VAR_284, VAR_317):
VAR_318 = VAR_283[VAR_212]
try:
VAR_54.append(
FUNC_23(
VAR_2,
VAR_19,
VAR_6=VAR_318.id,
VAR_79=VAR_136["key"],
VAR_8=conn,
VAR_31=True,
)
)
except AttributeError as VAR_30:
VAR_1.debug(
"(VAR_6 %VAR_212) ignoring Attribute Error: %s" % (VAR_318.id, str(VAR_30))
)
pass
except omero.ServerError as VAR_30:
VAR_1.debug("(VAR_6 %VAR_212) ignoring Server Error: %s" % (VAR_318.id, str(VAR_30)))
return VAR_54
else:
return [VAR_30.simpleMarshal(VAR_121=xtra, parents=VAR_136["parents"]) for VAR_30 in VAR_283]
VAR_54 = timeit(FUNC_68)()
VAR_1.debug(VAR_54)
return VAR_54
@require_POST
@login_required()
def FUNC_37(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_64 = FUNC_13(
VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn, VAR_20=True
)
if VAR_64 is None:
VAR_139 = "false"
else:
VAR_58 = VAR_64[0]._conn.getEventContext().userId
webgateway_cache.invalidateObject(VAR_19, VAR_58, VAR_64[0])
VAR_64[0].getThumbnail()
VAR_139 = "true"
if VAR_2.GET.get("callback", None):
VAR_139 = "%VAR_3(%VAR_3)" % (VAR_2.GET["callback"], VAR_139)
return HttpJavascriptResponse(VAR_139)
@login_required()
@FUNC_20
def FUNC_38(VAR_2, VAR_8=None, **VAR_9):
VAR_137 = VAR_8.getScriptService()
VAR_138 = VAR_137.getScriptsByMimetype("text/VAR_30-lut")
VAR_54 = []
for lut in VAR_138:
VAR_216 = lut.path.val + lut.name.val
VAR_217 = LUTS_IN_PNG.index(VAR_216) if VAR_216 in LUTS_IN_PNG else -1
VAR_54.append(
{
"id": lut.id.val,
"path": lut.path.val,
"name": lut.name.val,
"size": unwrap(lut.size),
"png_index": VAR_217,
}
)
VAR_54.sort(VAR_79=lambda VAR_30: x["name"].lower())
return {"luts": VAR_54, "png_luts": LUTS_IN_PNG}
@login_required()
def FUNC_39(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_139 = "false"
VAR_53 = VAR_2.GET
if VAR_8 is None:
VAR_92 = None
else:
VAR_92 = VAR_8.getObject("Image", VAR_6)
if VAR_92 is not None:
VAR_115 = []
for VAR_132 in VAR_92.getProject().listChildren():
VAR_115.extend(VAR_132.listChildren())
VAR_218 = VAR_92.getPrimaryPixels().getPixelsType().getValue()
VAR_219 = VAR_92.getSizeC()
VAR_220 = [VAR_30.getLabel() for VAR_30 in VAR_92.getChannels()]
VAR_220.sort()
def FUNC_76(VAR_212):
if VAR_178(VAR_212.getId()) == VAR_178(VAR_6):
return False
VAR_285 = VAR_212.getPrimaryPixels()
if (
VAR_285 is None
or VAR_212.getPrimaryPixels().getPixelsType().getValue() != VAR_218
or VAR_212.getSizeC() != VAR_219
):
return False
VAR_286 = [VAR_30.getLabel() for VAR_30 in VAR_212.getChannels()]
VAR_286.sort()
if VAR_286 != VAR_220:
return False
return True
VAR_115 = filter(FUNC_76, VAR_115)
VAR_139 = json.dumps([VAR_30.getId() for VAR_30 in VAR_115])
if VAR_53.get("callback", None):
VAR_139 = "%VAR_3(%VAR_3)" % (VAR_53["callback"], VAR_139)
return HttpJavascriptResponse(VAR_139)
@require_POST
@login_required()
@FUNC_20
def FUNC_40(VAR_2, VAR_35=False, VAR_8=None, **VAR_9):
VAR_53 = VAR_2.POST
VAR_140 = VAR_53.getlist("toids")
VAR_141 = str(VAR_53.get("to_type", "image"))
VAR_141 = to_type.title()
if VAR_141 == "Acquisition":
VAR_141 = "PlateAcquisition"
if len(VAR_140) == 0:
raise Http404(
"Need to specify objects in VAR_2, E.g."
" ?totype=VAR_128&VAR_140=1&VAR_140=2"
)
VAR_140 = [int(id) for id in VAR_140]
VAR_142 = VAR_8.getRenderingSettingsService()
VAR_8.SERVICE_OPTS.setOmeroGroup("-1")
VAR_143 = VAR_8.getObject(VAR_141, VAR_140[0])
if VAR_143 is not None:
VAR_221 = VAR_143.getDetails().group.id.val
VAR_8.SERVICE_OPTS.setOmeroGroup(VAR_221)
if VAR_35:
VAR_54 = VAR_142.resetDefaultsByOwnerInSet(VAR_141, VAR_140, VAR_8.SERVICE_OPTS)
else:
VAR_54 = VAR_142.resetDefaultsInSet(VAR_141, VAR_140, VAR_8.SERVICE_OPTS)
return VAR_54
@login_required()
@FUNC_20
def FUNC_41(VAR_2, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_139 = False
VAR_144 = VAR_2.GET.get("fromid", None)
VAR_140 = VAR_2.POST.getlist("toids")
VAR_141 = str(VAR_2.POST.get("to_type", "image"))
VAR_145 = None
if VAR_141 not in ("dataset", "plate", "acquisition"):
VAR_141 = "Image" # default is VAR_15
if VAR_144 is not None and len(VAR_140) == 0:
VAR_2.session.modified = True
VAR_2.session["fromid"] = VAR_144
if VAR_2.session.get("rdef") is not None:
del VAR_2.session["rdef"]
return True
VAR_53 = VAR_2.GET or VAR_2.POST
if VAR_53.get("c") is not None:
VAR_145 = {"c": str(VAR_53.get("c"))} # VAR_50
if VAR_53.get("maps"):
try:
VAR_145["maps"] = json.loads(VAR_53.get("maps"))
except Exception:
pass
if VAR_53.get("pixel_range"):
VAR_145["pixel_range"] = str(VAR_53.get("pixel_range"))
if VAR_53.get("m"):
VAR_145["m"] = str(VAR_53.get("m")) # model (grey)
if VAR_53.get("z"):
VAR_145["z"] = str(VAR_53.get("z")) # VAR_22 & VAR_23 VAR_27
if VAR_53.get("t"):
VAR_145["t"] = str(VAR_53.get("t"))
VAR_36 = VAR_2.GET.get("imageId", VAR_2.POST.get("imageId", None))
if VAR_36:
VAR_145["imageId"] = int(VAR_36)
if VAR_2.method == "GET":
VAR_2.session.modified = True
VAR_2.session["rdef"] = VAR_145
if VAR_2.session.get("fromid") is not None:
del VAR_2.session["fromid"]
return True
if VAR_144 is None:
VAR_144 = VAR_2.session.get("fromid", None)
def FUNC_69(VAR_15):
VAR_54 = {}
VAR_222 = []
VAR_223 = []
for VAR_212, ch in enumerate(VAR_15.getChannels()):
VAR_287 = "" if ch.isActive() else "-"
VAR_288 = ch.getWindowStart()
VAR_289 = ch.getWindowEnd()
VAR_68 = ch.getLut()
VAR_223.append({"inverted": {"enabled": ch.isInverted()}})
if not VAR_68 or len(VAR_68) == 0:
VAR_68 = ch.getColor().getHtml()
VAR_222.append("%VAR_3%VAR_3|%VAR_3:%VAR_3$%s" % (VAR_287, VAR_212 + 1, VAR_288, VAR_289, VAR_68))
VAR_54["c"] = ",".join(VAR_222)
VAR_54["maps"] = VAR_223
VAR_54["m"] = "g" if VAR_15.isGreyscaleRenderingModel() else "c"
VAR_54["z"] = VAR_15.getDefaultZ() + 1
VAR_54["t"] = VAR_15.getDefaultT() + 1
return VAR_54
def FUNC_70(VAR_15, VAR_145):
VAR_107 = FUNC_12(VAR_145, "inverted", VAR_15.getSizeC())
VAR_50, VAR_51, VAR_52 = FUNC_2(VAR_145["c"])
VAR_15.setActiveChannels(VAR_50, VAR_51, VAR_52, VAR_107)
if VAR_145["m"] == "g":
VAR_15.setGreyscaleRenderingModel()
else:
VAR_15.setColorRenderingModel()
if "z" in VAR_145:
VAR_15._re.setDefaultZ(VAR_178(VAR_145["z"]) - 1)
if "t" in VAR_145:
VAR_15._re.setDefaultT(VAR_178(VAR_145["t"]) - 1)
VAR_15.saveDefaults()
if VAR_145 is None:
VAR_145 = VAR_2.session.get("rdef")
if VAR_2.method == "POST":
VAR_224 = None
VAR_225 = None
if VAR_144 is None:
if VAR_145 is not None and len(VAR_140) > 0:
VAR_225 = VAR_8.getObject("Image", VAR_145["imageId"])
if VAR_225 is not None:
VAR_224 = FUNC_69(VAR_225)
FUNC_70(VAR_225, VAR_145)
VAR_144 = VAR_225.getId()
try:
VAR_144 = VAR_178(VAR_144)
VAR_140 = [VAR_178(VAR_30) for VAR_30 in VAR_140]
except TypeError:
VAR_144 = None
except ValueError:
VAR_144 = None
if VAR_144 is not None and len(VAR_140) > 0:
VAR_290 = VAR_8.getObject("Image", VAR_144)
VAR_291 = VAR_290.getOwner().getId()
VAR_139 = VAR_8.applySettingsToSet(VAR_144, VAR_141, VAR_140)
if VAR_139 and True in VAR_139:
for VAR_6 in VAR_139[True]:
VAR_92 = VAR_8.getObject("Image", VAR_6)
VAR_92 is not None and webgateway_cache.invalidateObject(
VAR_19, VAR_291, VAR_92
)
if VAR_141 == "Image" and VAR_144 not in VAR_140:
if VAR_224 is not None and VAR_225 is not None:
FUNC_70(VAR_225, VAR_224)
return VAR_139
else:
return HttpResponseNotAllowed(["POST"])
@login_required()
@FUNC_20
def FUNC_42(VAR_2, VAR_8=None, **VAR_9):
VAR_145 = VAR_2.session.get("rdef")
VAR_15 = None
if VAR_145 is None:
VAR_144 = VAR_2.session.get("fromid", None)
if VAR_144 is not None:
VAR_15 = VAR_8.getObject("Image", VAR_144)
if VAR_15 is not None:
VAR_54 = imageMarshal(VAR_15, VAR_2=request)
VAR_222 = []
VAR_223 = []
for VAR_212, ch in enumerate(VAR_54["channels"]):
VAR_287 = ch["active"] and str(VAR_212 + 1) or "-%s" % (VAR_212 + 1)
VAR_68 = ch.get("lut") or ch["color"]
VAR_222.append(
"%VAR_3|%VAR_3:%VAR_3$%s"
% (VAR_287, ch["window"]["start"], ch["window"]["end"], VAR_68)
)
VAR_223.append(
{
"inverted": {"enabled": ch["inverted"]},
"quantization": {
"coefficient": ch["coefficient"],
"family": ch["family"],
},
}
)
VAR_145 = {
"c": (",".join(VAR_222)),
"m": VAR_54["rdefs"]["model"],
"pixel_range": "%VAR_3:%s" % (VAR_54["pixel_range"][0], VAR_54["pixel_range"][1]),
"maps": VAR_223,
}
return {"rdef": VAR_145}
@login_required()
def FUNC_43(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_19 = VAR_2.session["connector"].server_id
VAR_146 = Server.get(VAR_19).server
VAR_147 = FUNC_3(VAR_2)
VAR_55 = VAR_2.session.get("server_settings", {}).get("viewer", {})
VAR_148 = VAR_55.get("interpolate_pixels", True)
VAR_149 = VAR_55.get("roi_limit", 2000)
try:
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
VAR_1.debug("(VAR_167)Image %VAR_3 not found..." % (str(VAR_6)))
raise Http404
VAR_226 = None
VAR_227 = None
VAR_228 = None
VAR_229 = None
if hasattr(settings, "SHARING_OPENGRAPH"):
VAR_226 = settings.SHARING_OPENGRAPH.get(VAR_146)
VAR_1.debug("Open Graph VAR_308: %s", VAR_226)
if hasattr(settings, "SHARING_TWITTER"):
VAR_227 = settings.SHARING_TWITTER.get(VAR_146)
VAR_1.debug("Twitter VAR_308: %s", VAR_227)
if VAR_226 or VAR_227:
VAR_292 = {"iid": VAR_6}
VAR_120 = VAR_9.get("thumbprefix", "webgateway_render_thumbnail")
VAR_228 = VAR_2.build_absolute_uri(reverse(VAR_120, VAR_9=VAR_292))
VAR_229 = VAR_2.build_absolute_uri(
reverse("webgateway_full_viewer", VAR_9=VAR_292)
)
VAR_213 = {
"blitzcon": VAR_8,
"image": VAR_15,
"opts": VAR_147,
"interpolate": VAR_148,
"build_year": build_year,
"roiLimit": VAR_149,
"roiCount": VAR_15.getROICount(),
"viewport_server": VAR_9.get(
"viewport_server",
reverse("webgateway"),
).rstrip("/"),
"opengraph": VAR_226,
"twitter": VAR_227,
"image_preview": VAR_228,
"page_url": VAR_229,
"object": "image:%i" % int(VAR_6),
}
VAR_230 = VAR_9.get("template", "webgateway/viewport/omero_image.html")
VAR_61 = render(VAR_2, VAR_230, VAR_213)
except omero.SecurityViolation:
VAR_1.warn("SecurityViolation in Image:%s", VAR_6)
VAR_1.warn(traceback.format_exc())
raise Http404
return HttpResponse(VAR_61)
@login_required()
def FUNC_44(VAR_2, VAR_6=None, VAR_8=None, **VAR_9):
VAR_114 = VAR_2.GET.get("format", "png")
if VAR_114 not in ("jpeg", "png", "tif"):
VAR_114 = "png"
VAR_150 = []
VAR_151 = []
if VAR_6 is None:
VAR_150 = VAR_2.GET.getlist("image")
if len(VAR_150) == 0:
VAR_151 = VAR_2.GET.getlist("well")
if len(VAR_151) == 0:
return HttpResponseServerError(
"No VAR_152 or VAR_153 specified in VAR_2."
" Use ?VAR_15=123 or ?VAR_119=123"
)
else:
VAR_150 = [VAR_6]
VAR_152 = []
if VAR_150:
VAR_152 = list(VAR_8.getObjects("Image", VAR_150))
elif VAR_151:
try:
VAR_319 = int(VAR_2.GET.get("index", 0))
except ValueError:
VAR_319 = 0
for VAR_10 in VAR_8.getObjects("Well", VAR_151):
VAR_152.append(VAR_10.getWellSample(VAR_319).image())
if len(VAR_152) == 0:
VAR_231 = "Cannot download as %VAR_3. Images (ids: %VAR_3) not found." % (VAR_114, VAR_150)
VAR_1.debug(VAR_231)
return HttpResponseServerError(VAR_231)
if len(VAR_152) == 1:
VAR_60 = VAR_152[0].renderJpeg()
if VAR_60 is None:
raise Http404
VAR_61 = HttpResponse(VAR_60, VAR_279="image/jpeg")
VAR_61["Content-Length"] = len(VAR_60)
VAR_61["Content-Disposition"] = "attachment; filename=%VAR_3.jpg" % (
VAR_152[0].getName().replace(" ", "_")
)
else:
VAR_232 = tempfile.NamedTemporaryFile(suffix=".download_as")
def FUNC_77(VAR_233, VAR_234, VAR_235):
VAR_17 = os.path.basename(VAR_233)
VAR_293 = "%VAR_3.%s" % (VAR_17, VAR_234)
VAR_293 = os.path.join(VAR_235, VAR_293)
VAR_212 = 1
VAR_17 = VAR_293[: -(len(VAR_234) + 1)]
while os.path.exists(VAR_293):
imgName = "%s_(%VAR_213).%s" % (VAR_17, VAR_212, VAR_234)
VAR_212 += 1
return VAR_293
try:
VAR_294 = tempfile.mkdtemp()
VAR_1.debug("download_as dir: %s" % VAR_294)
try:
for VAR_92 in VAR_152:
VAR_22 = VAR_23 = None
try:
VAR_331 = VAR_92.renderImage(VAR_22, VAR_23)
VAR_332 = FUNC_77(VAR_92.getName(), VAR_114, VAR_294)
VAR_331.save(VAR_332)
finally:
VAR_92._re.close()
VAR_320 = zipfile.ZipFile(VAR_232, "w", zipfile.ZIP_DEFLATED)
try:
VAR_324 = os.path.join(VAR_294, "*")
for VAR_17 in glob.glob(VAR_324):
VAR_320.write(VAR_17, os.path.basename(VAR_17))
finally:
VAR_320.close()
finally:
shutil.rmtree(VAR_294, ignore_errors=True)
VAR_240 = VAR_2.GET.get("zipname", "Download_as_%s" % VAR_114)
VAR_240 = VAR_240.replace(" ", "_")
if not VAR_240.endswith(".zip"):
VAR_240 = "%VAR_3.zip" % VAR_240
VAR_61 = StreamingHttpResponse(FileWrapper(VAR_232))
VAR_61["Content-Length"] = VAR_232.tell()
VAR_61["Content-Disposition"] = "attachment; filename=%s" % VAR_240
VAR_232.seek(0)
except Exception:
VAR_232.close()
VAR_321 = traceback.format_exc()
VAR_1.error(VAR_321)
return HttpResponseServerError("Cannot download file (id:%VAR_3)" % VAR_6)
VAR_61["Content-Type"] = "application/force-download"
return VAR_61
@login_required(doConnectionCleanup=False)
def FUNC_45(VAR_2, VAR_6=None, VAR_8=None, **VAR_9):
VAR_150 = []
VAR_151 = []
VAR_150 = VAR_2.GET.getlist("image")
VAR_151 = VAR_2.GET.getlist("well")
if VAR_6 is None:
if len(VAR_150) == 0 and len(VAR_151) == 0:
return HttpResponseServerError(
"No VAR_152 or VAR_153 specified in VAR_2."
" Use ?VAR_15=123 or ?VAR_119=123"
)
else:
VAR_150 = [VAR_6]
VAR_152 = list()
VAR_153 = list()
if VAR_150:
VAR_152 = list(VAR_8.getObjects("Image", VAR_150))
elif VAR_151:
try:
VAR_319 = int(VAR_2.GET.get("index", 0))
except ValueError:
VAR_319 = 0
VAR_153 = VAR_8.getObjects("Well", VAR_151)
for VAR_10 in VAR_153:
VAR_152.append(VAR_10.getWellSample(VAR_319).image())
if len(VAR_152) == 0:
VAR_236 = (
"Cannot download archived file because Images not "
"found (ids: %VAR_3)" % (VAR_150)
)
VAR_1.debug(VAR_236)
return HttpResponseServerError(VAR_236)
for ob in VAR_153:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
for ob in VAR_152:
VAR_119 = None
try:
VAR_119 = ob.getParent().getParent()
except Exception:
if hasattr(ob, "canDownload"):
if not ob.canDownload():
return HttpResponseNotFound()
else:
if VAR_119 and isinstance(VAR_119, omero.gateway.WellWrapper):
if hasattr(VAR_119, "canDownload"):
if not VAR_119.canDownload():
return HttpResponseNotFound()
VAR_154 = {}
for VAR_15 in VAR_152:
for VAR_28 in VAR_15.getImportedImageFiles():
VAR_154[VAR_28.getId()] = VAR_28
VAR_155 = list(VAR_154.values())
if len(VAR_155) == 0:
VAR_236 = (
"Tried downloading archived VAR_155 from VAR_15 with no" " VAR_155 archived."
)
VAR_1.debug(VAR_236)
return HttpResponseServerError(VAR_236)
if len(VAR_155) == 1:
VAR_237 = VAR_155[0]
VAR_61 = ConnCleaningHttpResponse(
VAR_237.getFileInChunks(buf=settings.CHUNK_SIZE)
)
VAR_61.conn = VAR_8
VAR_61["Content-Length"] = VAR_237.getSize()
VAR_238 = VAR_237.getName().replace(" ", "_").replace(",", ".")
VAR_61["Content-Disposition"] = "attachment; filename=%s" % (VAR_238)
else:
VAR_239 = sum(VAR_28.size for VAR_28 in VAR_155)
if VAR_239 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_236 = (
"Total VAR_7 of VAR_155 %VAR_213 is larger than %VAR_213. "
"Try requesting fewer VAR_155."
% (VAR_239, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE)
)
VAR_1.warn(VAR_236)
return HttpResponseForbidden(VAR_236)
VAR_232 = tempfile.NamedTemporaryFile(suffix=".archive")
VAR_240 = VAR_2.GET.get("zipname", VAR_15.getName())
try:
VAR_240 = zip_archived_files(VAR_152, VAR_232, VAR_240, buf=settings.CHUNK_SIZE)
VAR_295 = FileWrapper(VAR_232)
VAR_61 = ConnCleaningHttpResponse(VAR_295)
VAR_61.conn = VAR_8
VAR_61["Content-Length"] = VAR_232.tell()
VAR_61["Content-Disposition"] = "attachment; filename=%s" % VAR_240
VAR_232.seek(0)
except Exception:
VAR_232.close()
VAR_236 = "Cannot download file (id:%VAR_3)" % (VAR_6)
VAR_1.error(VAR_236, exc_info=True)
return HttpResponseServerError(VAR_236)
VAR_61["Content-Type"] = "application/force-download"
return VAR_61
@login_required()
@FUNC_20
def FUNC_46(VAR_2, VAR_6, VAR_8=None, **VAR_9):
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
raise Http404
VAR_156 = VAR_15.getImportedImageFilePaths()
return {"repo": VAR_156["server_paths"], "client": VAR_156["client_paths"]}
@login_required()
@FUNC_20
def FUNC_47(VAR_2, VAR_13, VAR_14, VAR_8=None, **VAR_9):
VAR_13 = int(VAR_13)
VAR_14 = int(VAR_14)
VAR_65 = VAR_8.getQueryService().findByQuery(
"select VAR_65 from Roi as VAR_241 "
"join VAR_241.shapes as VAR_65 "
"where VAR_241.id = %VAR_213 and VAR_65.id = %d" % (VAR_13, VAR_14),
None,
)
VAR_1.debug("Shape: %r" % VAR_65)
if VAR_65 is None:
VAR_1.debug("No such VAR_65: %r" % VAR_14)
raise Http404
return JsonResponse(shapeMarshal(VAR_65))
@login_required()
@FUNC_20
def FUNC_48(VAR_2, VAR_36, VAR_8=None, **VAR_9):
VAR_157 = []
VAR_158 = VAR_8.getRoiService()
VAR_62 = VAR_158.findByImage(VAR_178(VAR_36), None, VAR_8.SERVICE_OPTS)
for VAR_53 in VAR_62.rois:
VAR_241 = {}
roi["id"] = VAR_53.getId().getValue()
VAR_63 = []
for VAR_3 in VAR_53.copyShapes():
if VAR_3 is None: # seems possible in some situations
continue
VAR_63.append(shapeMarshal(VAR_3))
VAR_63.sort(VAR_79=lambda VAR_30: "%03d%03d" % (VAR_30.get("theZ", -1), VAR_30.get("theT", -1)))
VAR_241["shapes"] = VAR_63
VAR_157.append(VAR_241)
VAR_157.sort(VAR_79=lambda VAR_30: x["id"])
return VAR_157
@login_required()
def FUNC_49(VAR_2, VAR_6, VAR_37, VAR_8=None, **VAR_9):
VAR_15 = VAR_8.getObject("Image", VAR_6)
if VAR_15 is None:
raise Http404
VAR_159, VAR_160 = VAR_8.getMaxPlaneSize()
VAR_86 = VAR_15.getSizeX()
VAR_87 = VAR_15.getSizeY()
if (VAR_86 * VAR_87) > (VAR_159 * VAR_160):
VAR_231 = "Histogram not supported for 'big' VAR_152 (over %VAR_3 * %VAR_3 pixels)" % (
VAR_159,
VAR_160,
)
return JsonResponse({"error": VAR_231})
VAR_74 = int(VAR_2.GET.get("theZ", 0))
VAR_73 = int(VAR_2.GET.get("theT", 0))
VAR_37 = int(VAR_37)
VAR_161 = int(VAR_2.GET.get("bins", 256))
VAR_162 = VAR_15.getHistogram([VAR_37], VAR_161, VAR_74=theZ, VAR_73=theT)
VAR_163 = VAR_162[VAR_37]
return JsonResponse({"data": VAR_163})
@login_required(FUNC_58=True)
@FUNC_20
def FUNC_50(VAR_2, VAR_38, VAR_8=None, **VAR_9):
if VAR_2.method == "POST":
VAR_8.setGroupNameForSession("system")
VAR_175 = VAR_2.session["connector"]
VAR_175 = Connector(VAR_175.server_id, VAR_175.is_secure)
VAR_242 = VAR_8.getSessionService().getSession(VAR_8._sessionUuid)
VAR_243 = VAR_242.getTimeToIdle().val
VAR_175.omero_session_key = VAR_8.suConn(VAR_38, VAR_243=ttl)._sessionUuid
VAR_2.session["connector"] = VAR_175
VAR_8.revertGroupForSession()
VAR_8.close()
return True
else:
VAR_244 = {
"url": reverse("webgateway_su", VAR_116=[VAR_38]),
"submit": "Do you want to FUNC_50 to %s" % VAR_38,
}
VAR_230 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_230, VAR_244)
def FUNC_51(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
warnings.warn("Deprecated. Use FUNC_52()", DeprecationWarning)
return FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **VAR_9)
def FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
VAR_164 = VAR_8.getQueryService()
VAR_39 = objtype.split(".")
VAR_66 = omero.sys.ParametersI()
VAR_66.addId(VAR_40)
VAR_66.addString("ns", NSBULKANNOTATIONS)
VAR_66.addString("mt", "OMERO.tables")
VAR_43 = "select obj0 from %VAR_3 obj0\n" % VAR_39[0]
for VAR_212, VAR_23 in enumerate(VAR_39[1:]):
VAR_43 += "join fetch VAR_202%VAR_213.%VAR_3 VAR_202%VAR_213\n" % (VAR_212, VAR_23, VAR_212 + 1)
VAR_43 += """
left outer join fetch obj0.annotationLinks VAR_165
left outer join fetch VAR_165.child as VAR_28
left outer join fetch VAR_165.parent
left outer join fetch VAR_28.file
join fetch VAR_165.details.owner
join fetch VAR_165.details.creationEvent
where VAR_202%VAR_213.id=:id and
(VAR_28.ns=:ns or VAR_28.file.mimetype=:mt)""" % (
len(VAR_39) - 1
)
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
try:
VAR_245 = VAR_164.findAllByQuery(VAR_43, VAR_66, VAR_24)
except omero.QueryException:
return dict(VAR_176="%VAR_3 cannot be queried" % VAR_39, VAR_43=query)
VAR_162 = []
VAR_165 = [link for VAR_202 in VAR_245 for link in VAR_202.copyAnnotationLinks()]
for link in VAR_165:
VAR_246 = link.child
if not isinstance(VAR_246, omero.model.FileAnnotation):
continue
VAR_247 = VAR_246.details.owner
VAR_248 = "%VAR_3 %s" % (unwrap(VAR_247.firstName), unwrap(VAR_247.lastName))
VAR_249 = link.details.owner
VAR_250 = "%VAR_3 %s" % (unwrap(VAR_249.firstName), unwrap(VAR_249.lastName))
VAR_162.append(
dict(
id=VAR_246.id.val,
file=VAR_246.file.id.val,
parentType=VAR_39[0],
parentId=link.parent.id.val,
VAR_247=VAR_248,
VAR_249=VAR_250,
addedOn=unwrap(link.details.creationEvent._time),
)
)
return dict(VAR_162=data)
VAR_41 = login_required()(FUNC_20(FUNC_52))
def FUNC_53(VAR_2, VAR_42, VAR_8=None, VAR_43=None, VAR_44=False, **VAR_9):
if VAR_43 is None:
VAR_43 = VAR_2.GET.get("query")
if not VAR_43:
return dict(VAR_176="Must specify VAR_43 parameter, use * to retrieve all")
VAR_166 = VAR_2.GET.getlist("col_names")
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
VAR_53 = VAR_8.getSharedResources()
VAR_23 = VAR_53.openTable(omero.model.OriginalFileI(VAR_42), VAR_24)
if not VAR_23:
return dict(VAR_176="Table %VAR_3 not found" % VAR_42)
try:
VAR_251 = VAR_23.getHeaders()
VAR_252 = range(len(VAR_251))
if VAR_166:
VAR_296 = (
[(VAR_212, j) for (VAR_212, j) in enumerate(VAR_251) if j.name in VAR_166]
if VAR_166
else [(VAR_212, j) for (VAR_212, j) in enumerate(VAR_251)]
)
cols = []
VAR_252 = []
for col_name in VAR_166:
for (VAR_212, j) in VAR_296:
if col_name == j.name:
VAR_252.append(VAR_212)
VAR_251.append(j)
break
VAR_253 = VAR_23.getNumberOfRows()
VAR_254 = VAR_9.get("offset", 0)
VAR_255 = VAR_9.get("limit", None)
if not VAR_254:
VAR_254 = int(VAR_2.GET.get("offset", 0))
if not VAR_255:
VAR_255 = (
int(VAR_2.GET.get("limit"))
if VAR_2.GET.get("limit") is not None
else VAR_253
)
VAR_256 = VAR_254
VAR_257 = VAR_255
VAR_258 = min(VAR_253, VAR_256 + VAR_257)
if VAR_43 == "*":
VAR_297 = range(VAR_256, VAR_258)
VAR_298 = VAR_253
else:
VAR_299 = re.match(r"^(\VAR_10+)-(\VAR_213+)", VAR_43)
if VAR_299:
VAR_43 = "(%VAR_3==%s)" % (VAR_299.group(1), VAR_299.group(2))
try:
VAR_1.info(VAR_43)
VAR_297 = VAR_23.getWhereList(VAR_43, None, 0, VAR_253, 1)
VAR_298 = len(VAR_297)
hits = VAR_297[VAR_256:VAR_258]
except Exception:
return dict(VAR_176="Error executing VAR_43: %s" % VAR_43)
def FUNC_78(VAR_259, VAR_11):
VAR_300 = 0
VAR_301 = 1000
while VAR_300 < len(VAR_11):
VAR_301 = min(VAR_301, len(VAR_11) - VAR_300)
VAR_322 = VAR_259.slice(VAR_252, VAR_11[VAR_300 : idx + VAR_301])
VAR_300 += VAR_301
yield [
[col.values[row] for col in VAR_322.columns]
for row in range(0, len(VAR_322.rowNumbers))
]
VAR_260 = FUNC_78(VAR_23, VAR_297)
VAR_261 = {
"data": {
"column_types": [col.__class__.__name__ for col in VAR_251],
"columns": [col.name for col in VAR_251],
},
"meta": {
"rowCount": VAR_253,
"totalCount": VAR_298,
"limit": VAR_255,
"offset": VAR_254,
},
}
if not VAR_44:
VAR_302 = []
for VAR_253 in list(VAR_260):
VAR_302.extend(VAR_253)
VAR_261["data"]["rows"] = VAR_302
else:
VAR_261["data"]["lazy_rows"] = VAR_260
VAR_261["table"] = VAR_23
return VAR_261
finally:
if not VAR_44:
VAR_23.close()
VAR_45 = login_required()(FUNC_20(FUNC_53))
def FUNC_54(VAR_2, VAR_42, VAR_8=None, VAR_43=None, VAR_44=False, **VAR_9):
VAR_24 = VAR_8.createServiceOptsDict()
VAR_24.setOmeroGroup("-1")
VAR_53 = VAR_8.getSharedResources()
VAR_23 = VAR_53.openTable(omero.model.OriginalFileI(VAR_42), VAR_24)
if not VAR_23:
return dict(VAR_176="Table %VAR_3 not found" % VAR_42)
try:
VAR_251 = VAR_23.getHeaders()
VAR_253 = VAR_23.getNumberOfRows()
VAR_261 = {
"columns": [
{
"name": col.name,
"description": col.description,
"type": col.__class__.__name__,
}
for col in VAR_251
],
"totalCount": VAR_253,
}
return VAR_261
finally:
if not VAR_44:
VAR_23.close()
VAR_46 = login_required()(FUNC_20(FUNC_54))
@login_required()
@FUNC_20
def FUNC_55(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):
VAR_167 = FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **VAR_9)
if "error" in VAR_167:
return VAR_167
if len(VAR_167["data"]) < 1:
return dict(VAR_176="Could not retrieve bulk VAR_41 table")
VAR_168 = 0
VAR_169 = None
VAR_170 = sorted(VAR_167["data"], VAR_79=lambda VAR_30: x["file"], reverse=True)
VAR_171 = None
for VAR_246 in VAR_170:
VAR_171 = FUNC_53(VAR_2, VAR_246["file"], VAR_8, **VAR_9)
if "error" not in VAR_171:
VAR_169 = VAR_246
VAR_168 = VAR_246["file"]
break
if VAR_169 is None:
return dict(
VAR_176=VAR_171.get(
"error", "Could not retrieve matching bulk VAR_246 table"
)
)
VAR_171["id"] = VAR_168
VAR_171["annId"] = VAR_169["id"]
VAR_171["owner"] = VAR_169["owner"]
VAR_171["addedBy"] = VAR_169["addedBy"]
VAR_171["parentType"] = VAR_169["parentType"]
VAR_171["parentId"] = VAR_169["parentId"]
VAR_171["addedOn"] = VAR_169["addedOn"]
return VAR_171
class CLASS_1(View):
VAR_172 = LoginForm
VAR_173 = "OMERO.webapi"
@method_decorator(sensitive_post_parameters("password", "csrfmiddlewaretoken"))
def FUNC_71(self, *VAR_116, **VAR_9):
return super(CLASS_1, self).dispatch(*VAR_116, **VAR_9)
def FUNC_72(self, VAR_2, VAR_174=None):
return JsonResponse(
{"message": ("POST only with VAR_304, VAR_305, " "server and csrftoken")},
VAR_315=405,
)
def FUNC_73(self, VAR_2, VAR_8, VAR_175):
VAR_203 = VAR_8.getEventContext()
VAR_24 = {}
for VAR_167 in [
"sessionId",
"sessionUuid",
"userId",
"userName",
"groupId",
"groupName",
"isAdmin",
"eventId",
"eventType",
"memberOfGroups",
"leaderOfGroups",
]:
if hasattr(VAR_203, VAR_167):
VAR_24[VAR_167] = getattr(VAR_203, VAR_167)
return JsonResponse({"success": True, "eventContext": VAR_24})
def FUNC_74(self, VAR_2, VAR_176=None, VAR_177=None):
if VAR_176 is None and VAR_177 is not None:
VAR_303 = []
for VAR_33 in VAR_177:
for VAR_318 in VAR_33.errors:
VAR_303.append("%VAR_3: %s" % (VAR_33.label, VAR_318))
VAR_176 = " ".join(VAR_303)
elif VAR_176 is None:
VAR_176 = "Login failed. Reason unknown."
return JsonResponse({"message": VAR_176}, VAR_315=403)
def FUNC_75(self, VAR_2, VAR_174=None):
VAR_176 = None
VAR_177 = self.form_class(VAR_2.POST.copy())
if VAR_177.is_valid():
VAR_304 = VAR_177.cleaned_data["username"]
VAR_305 = VAR_177.cleaned_data["password"]
VAR_19 = VAR_177.cleaned_data["server"]
VAR_306 = settings.SECURE
VAR_175 = Connector(VAR_19, VAR_306)
VAR_307 = True
if settings.CHECK_VERSION:
VAR_307 = VAR_175.check_version(self.useragent)
if (
VAR_19 is not None
and VAR_304 is not None
and VAR_305 is not None
and VAR_307
):
VAR_8 = VAR_175.create_connection(
self.useragent, VAR_304, VAR_305, userip=get_client_ip(VAR_2)
)
if VAR_8 is not None:
try:
VAR_2.session["connector"] = VAR_175
try:
VAR_335 = settings.UPGRADES_URL
except Exception:
VAR_335 = VAR_8.getUpgradesUrl()
upgradeCheck(url=VAR_335)
return self.handle_logged_in(VAR_2, VAR_8, VAR_175)
finally:
VAR_8.close(hard=False)
if not VAR_175.is_server_up(self.useragent):
VAR_176 = "Server is not responding," " please contact administrator."
elif not settings.CHECK_VERSION:
VAR_176 = (
"Connection not available, please check your"
" credentials and version compatibility."
)
else:
if not VAR_307:
VAR_176 = (
"Client version does not VAR_299 server,"
" please contact administrator."
)
else:
VAR_176 = settings.LOGIN_INCORRECT_CREDENTIALS_TEXT
return self.handle_not_logged_in(VAR_2, VAR_176, VAR_177)
@login_required()
@FUNC_20
def FUNC_56(VAR_2, VAR_47=None, VAR_8=None, **VAR_9):
try:
VAR_92 = VAR_8.getObject("Image", VAR_47)
if VAR_92 is None:
return {"error": "No VAR_15 with id " + str(VAR_47)}
return {"rdefs": VAR_92.getAllRenderingDefs()}
except Exception:
VAR_1.debug(traceback.format_exc())
return {"error": "Failed to retrieve rdefs"}
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
24,
39,
58,
63,
68,
71,
76,
78,
79,
80,
81,
87,
94,
99,
102,
112,
115,
120,
121,
125,
126,
129,
130,
136,
141,
145,
148,
151,
153,
157,
161,
163,
167,
171,
173,
177,
181,
183,
187,
191,
193,
197,
201,
203,
204,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
231,
239,
244,
246,
248,
250,
258,
261,
262,
275,
276,
292,
300,
311,
312,
325,
326,
332,
341,
342,
346,
354,
357,
372,
402,
403,
409,
421,
422,
431,
432,
436,
441,
444,
449,
451,
457,
473,
476,
478,
479,
487,
488,
496,
499,
501,
506,
508,
509,
514,
518,
532,
534,
555,
602,
603,
605,
608,
611,
615,
616,
626,
635,
642,
647,
648,
649,
667,
668,
673,
674,
682,
683,
688,
696,
710,
719,
722,
723,
730,
732,
733,
741,
747,
756,
757,
761,
779,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
794,
795,
796,
797,
798,
810,
814,
815,
821,
825,
829,
830,
840,
848,
856,
857,
866,
885,
887,
893,
898,
903,
908,
911,
916,
934,
935,
942,
951,
952,
953,
954,
955,
956,
958,
962,
966,
972,
974,
988,
991,
1000,
1027,
1038,
1039,
1048,
1051,
1052,
1061,
1080,
1085,
1093,
1110,
1111,
1123,
1172,
1174,
1191,
1198,
1251,
1252,
1253,
1254,
1269,
1270,
1275,
1286,
1287,
1301,
1307,
1314,
1315,
1323,
1337,
1338,
1351,
1355,
1359,
1360,
1368,
1390,
1391,
1395,
1399,
1410,
1412,
1413,
1418,
1422,
1440,
1444,
1445,
1446,
1450,
1451,
1452,
1465,
1467,
1468,
1479,
1489,
1505,
1506,
1516,
1526,
1538,
1539,
1546,
1552,
1558,
1566,
1567,
1574,
1580,
1586,
1589,
1593,
1594,
1607,
1612,
1614,
1618,
1621,
1628,
1629,
1636,
1647,
1670,
1679,
1680,
1687,
1699,
1700,
1707,
1713,
1718,
1721,
1727,
1728,
1735,
1741,
1747,
1750,
1752,
1759,
1762,
1777,
1778,
1785,
1791,
1796,
1797,
1807,
1808,
1815,
1820,
1825,
1826,
1833,
1839,
1843,
1844,
1861,
1862,
1870,
1874,
1879,
1883,
1884,
1897,
1901,
1920,
1921,
1938,
1948,
1949,
1952,
1958,
1962,
1973,
1995,
1999,
2000,
2009,
2029,
2030,
2036,
2058,
2059,
2068,
2074,
2081,
2083,
2087,
2092,
2108,
2111,
2115,
2116,
2125,
2128,
2135,
2141,
2143,
2145,
2146,
2152,
2157,
2159,
2160,
2174,
2180,
2183,
2188,
2191,
2192,
2199,
2200,
2203,
2221,
2225,
2229,
2230,
2233,
2234,
2254,
2258,
2269,
2270,
2277,
2278,
2282,
2286,
2287,
2305,
2306,
2307,
2312,
2315,
2316,
2329,
2357,
2359,
2360,
2367,
2375,
2378,
2383,
2389,
2394,
2398,
2402,
2410,
2420,
2430,
2438,
2439,
2449,
2463,
2474,
2479,
2491,
2496,
2503,
2515,
2517,
2527,
2532,
2533,
2538,
2544,
2547,
2548,
2555,
2568,
2588,
2589,
2594,
2608,
2609,
2615,
2622,
2630,
2643,
2646,
2649,
2650,
2662,
2665,
2666,
2674,
2680,
2681,
2698,
2699,
2708,
2709,
2711,
2715,
2721,
2725,
2726,
2728,
2730,
2731,
2750,
2755,
2756,
2759,
2761,
2762,
2770,
2796,
2797,
2801,
2802,
2810,
2819,
2833,
2834,
2835,
2836,
2838,
2843,
2858,
2861,
2866,
2868,
2890,
2891,
2893,
2894,
2900,
2926,
2929,
2934,
2952,
2954,
2968,
2980,
2984,
2986,
2993,
2998,
3000,
3013,
3016,
3023,
3028,
3029,
3031,
3032,
3036,
3041,
3045,
3061,
3062,
3064,
3065,
3074,
3083,
3100,
3103,
3104,
3105,
3130,
3131,
3134,
3137,
3142,
3149,
3170,
3174,
3177,
3183,
3190,
3193,
3197,
3207,
3209,
3210,
3226,
3227,
3228,
3237,
3238,
3255,
3256,
3262,
3265,
3275,
3278,
3283,
123,
132,
133,
134,
135,
228,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
344,
345,
346,
347,
348,
349,
350,
351,
352,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
425,
426,
427,
428,
429,
482,
483,
484,
485,
511,
512,
513,
514,
515,
516,
517,
760,
817,
818,
819,
820,
821,
822,
823,
824,
832,
833,
834,
861,
862,
863,
864,
865,
866,
867,
868,
869,
870,
871,
872,
873,
938,
939,
940,
941,
942,
943,
944,
945,
946,
947,
948,
949,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1114,
1115,
1116,
1117,
1118,
1119,
1120,
1121,
1122,
1123,
1124,
1125,
1126,
1127,
1128,
1129,
1130,
1131,
1132,
1273,
1274,
1275,
1276,
1277,
1278,
1279,
1280,
1281,
1282,
1283,
1363,
1364,
1365,
1366,
1367,
1368,
1369,
1370,
1371,
1372,
1373,
1374,
1375,
1393,
1394,
1395,
1396,
1397,
1398,
1415,
1416,
1417,
1418,
1419,
1420,
1421,
1472,
1473,
1474,
1475,
1476,
1477,
1478,
1479,
1480,
1481,
1482,
1483,
1484,
1485,
1486,
1487,
1488,
1510,
1511,
1512,
1513,
1514,
1515,
1516,
1517,
1518,
1519,
1520,
1521,
1522,
1523,
1524,
1525,
1543,
1544,
1545,
1546,
1547,
1548,
1549,
1550,
1551,
1571,
1572,
1573,
1574,
1575,
1576,
1577,
1578,
1579,
1598,
1633,
1634,
1635,
1636,
1637,
1638,
1639,
1640,
1684,
1685,
1686,
1687,
1688,
1689,
1690,
1691,
1692,
1693,
1704,
1705,
1706,
1707,
1708,
1709,
1710,
1711,
1712,
1732,
1733,
1734,
1735,
1736,
1737,
1738,
1739,
1740,
1782,
1783,
1784,
1785,
1786,
1787,
1788,
1789,
1790,
1801,
1802,
1803,
1804,
1812,
1813,
1814,
1815,
1816,
1817,
1818,
1819,
1830,
1831,
1832,
1833,
1834,
1835,
1836,
1837,
1838,
1847,
1848,
1849,
1886,
1887,
1888,
1889,
1890,
1891,
1892,
1893,
1894,
1895,
1896,
1897,
1898,
1899,
1900,
1926,
1927,
1928,
1929,
1930,
1931,
1932,
1933,
1934,
1935,
1936,
1937,
1938,
1939,
1940,
1941,
1942,
1943,
2004,
2005,
2006,
2007,
2008,
2009,
2010,
2011,
2012,
2013,
2014,
2034,
2035,
2036,
2037,
2038,
2039,
2040,
2062,
2063,
2064,
2065,
2066,
2067,
2068,
2069,
2070,
2071,
2072,
2073,
2121,
2122,
2123,
2124,
2125,
2126,
2127,
2164,
2165,
2166,
2167,
2168,
2169,
2170,
2171,
2172,
2173,
2174,
2175,
2176,
2177,
2178,
2179,
2320,
2321,
2322,
2323,
2363,
2364,
2365,
2366,
2367,
2368,
2369,
2370,
2371,
2372,
2373,
2374,
2442,
2443,
2444,
2445,
2551,
2552,
2553,
2554,
2670,
2671,
2672,
2673,
2703,
2704,
2705,
2734,
2735,
2736,
2737,
2766,
2767,
2768,
2769,
2770,
2771,
2772,
2773,
2774,
2775,
2776,
2777,
2804,
2805,
2806,
2807,
2808,
2809,
2810,
2811,
2812,
2813,
2814,
2815,
2816,
2817,
2818,
2819,
2820,
2821,
2822,
2823,
2824,
2825,
2826,
2827,
2828,
2829,
2830,
2831,
2896,
2897,
2898,
2899,
2900,
2901,
2902,
2903,
2904,
2905,
2906,
2907,
2908,
2909,
2910,
2911,
2912,
2913,
2914,
2915,
2916,
2917,
2918,
2919,
2920,
3069,
3070,
3071,
3072,
3073,
3074,
3075,
3076,
3077,
3078,
3079,
3080,
3081,
3082,
3083,
3084,
3085,
3086,
3087,
3088,
3089,
3090,
3091,
3092,
3093,
3094,
3095,
3096,
3133,
3260,
3261,
3262,
3263,
3264,
3265,
3266,
3267,
3268,
3269,
3270,
3271,
3272,
138,
139,
140,
141,
142,
143,
144,
150,
155,
156,
157,
158,
159,
160,
165,
166,
167,
168,
169,
170,
175,
176,
177,
178,
179,
180,
185,
186,
187,
188,
189,
190,
195,
196,
197,
198,
199,
200,
3140,
3144,
3151,
3172,
3173,
3174,
3175,
3176,
3177,
3178,
3179,
3180,
3181,
3195,
3196,
3197,
3198,
3199
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
24,
39,
58,
63,
68,
71,
76,
78,
79,
80,
81,
87,
94,
99,
102,
112,
115,
120,
121,
125,
126,
129,
130,
136,
141,
145,
148,
151,
153,
157,
161,
163,
167,
171,
173,
177,
181,
183,
187,
191,
193,
197,
201,
203,
204,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
231,
239,
244,
246,
248,
250,
258,
261,
262,
275,
276,
292,
300,
311,
312,
325,
326,
332,
341,
342,
346,
354,
357,
372,
402,
403,
409,
421,
422,
431,
432,
436,
441,
444,
449,
451,
457,
473,
476,
478,
479,
487,
488,
496,
499,
501,
506,
508,
509,
514,
518,
532,
534,
555,
602,
603,
605,
608,
611,
615,
616,
626,
635,
642,
647,
648,
649,
667,
668,
673,
674,
682,
683,
688,
696,
710,
719,
722,
723,
730,
732,
733,
741,
747,
756,
757,
761,
779,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
794,
795,
796,
797,
798,
810,
814,
815,
821,
825,
829,
830,
840,
848,
856,
857,
866,
885,
887,
893,
898,
903,
908,
911,
916,
934,
935,
942,
951,
952,
953,
954,
955,
956,
958,
962,
966,
972,
974,
988,
991,
1000,
1027,
1038,
1039,
1048,
1051,
1052,
1061,
1080,
1085,
1093,
1110,
1111,
1123,
1172,
1174,
1191,
1198,
1251,
1252,
1253,
1254,
1269,
1270,
1275,
1286,
1287,
1301,
1307,
1314,
1315,
1323,
1337,
1338,
1351,
1355,
1359,
1360,
1368,
1390,
1391,
1395,
1399,
1410,
1412,
1413,
1418,
1422,
1440,
1444,
1445,
1446,
1450,
1451,
1452,
1465,
1467,
1468,
1479,
1489,
1505,
1506,
1516,
1526,
1538,
1539,
1546,
1552,
1558,
1566,
1567,
1574,
1580,
1586,
1589,
1593,
1594,
1607,
1612,
1614,
1618,
1621,
1628,
1629,
1636,
1647,
1670,
1679,
1680,
1687,
1699,
1700,
1707,
1713,
1718,
1721,
1727,
1728,
1735,
1741,
1747,
1750,
1752,
1759,
1762,
1777,
1778,
1785,
1791,
1796,
1797,
1807,
1808,
1815,
1820,
1825,
1826,
1833,
1839,
1843,
1844,
1861,
1862,
1870,
1874,
1879,
1883,
1884,
1897,
1901,
1920,
1921,
1938,
1948,
1949,
1952,
1958,
1962,
1973,
1995,
1999,
2000,
2009,
2029,
2030,
2036,
2058,
2059,
2068,
2074,
2081,
2083,
2087,
2092,
2108,
2111,
2115,
2116,
2125,
2128,
2135,
2141,
2143,
2145,
2146,
2152,
2157,
2159,
2160,
2174,
2180,
2183,
2188,
2191,
2192,
2199,
2200,
2203,
2221,
2225,
2229,
2230,
2233,
2234,
2254,
2258,
2269,
2270,
2277,
2278,
2282,
2286,
2287,
2305,
2306,
2307,
2312,
2315,
2316,
2329,
2357,
2359,
2360,
2367,
2375,
2378,
2383,
2389,
2394,
2398,
2402,
2410,
2420,
2430,
2438,
2439,
2449,
2463,
2474,
2479,
2491,
2496,
2503,
2515,
2517,
2527,
2532,
2533,
2538,
2544,
2547,
2548,
2555,
2568,
2588,
2589,
2594,
2608,
2609,
2615,
2622,
2630,
2643,
2646,
2649,
2650,
2662,
2665,
2666,
2674,
2680,
2681,
2698,
2699,
2708,
2709,
2711,
2715,
2721,
2725,
2726,
2728,
2730,
2731,
2750,
2755,
2756,
2759,
2761,
2762,
2770,
2796,
2797,
2801,
2802,
2810,
2819,
2833,
2834,
2835,
2836,
2838,
2843,
2858,
2861,
2866,
2868,
2890,
2891,
2893,
2894,
2900,
2926,
2929,
2934,
2952,
2954,
2968,
2980,
2984,
2986,
2993,
2998,
3000,
3013,
3016,
3023,
3028,
3029,
3031,
3032,
3036,
3041,
3045,
3061,
3062,
3064,
3065,
3074,
3083,
3100,
3103,
3104,
3105,
3130,
3131,
3134,
3137,
3142,
3149,
3170,
3174,
3177,
3183,
3190,
3193,
3197,
3207,
3209,
3210,
3226,
3227,
3228,
3237,
3238,
3255,
3256,
3262,
3265,
3275,
3278,
3283,
123,
132,
133,
134,
135,
228,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
344,
345,
346,
347,
348,
349,
350,
351,
352,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
425,
426,
427,
428,
429,
482,
483,
484,
485,
511,
512,
513,
514,
515,
516,
517,
760,
817,
818,
819,
820,
821,
822,
823,
824,
832,
833,
834,
861,
862,
863,
864,
865,
866,
867,
868,
869,
870,
871,
872,
873,
938,
939,
940,
941,
942,
943,
944,
945,
946,
947,
948,
949,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1114,
1115,
1116,
1117,
1118,
1119,
1120,
1121,
1122,
1123,
1124,
1125,
1126,
1127,
1128,
1129,
1130,
1131,
1132,
1273,
1274,
1275,
1276,
1277,
1278,
1279,
1280,
1281,
1282,
1283,
1363,
1364,
1365,
1366,
1367,
1368,
1369,
1370,
1371,
1372,
1373,
1374,
1375,
1393,
1394,
1395,
1396,
1397,
1398,
1415,
1416,
1417,
1418,
1419,
1420,
1421,
1472,
1473,
1474,
1475,
1476,
1477,
1478,
1479,
1480,
1481,
1482,
1483,
1484,
1485,
1486,
1487,
1488,
1510,
1511,
1512,
1513,
1514,
1515,
1516,
1517,
1518,
1519,
1520,
1521,
1522,
1523,
1524,
1525,
1543,
1544,
1545,
1546,
1547,
1548,
1549,
1550,
1551,
1571,
1572,
1573,
1574,
1575,
1576,
1577,
1578,
1579,
1598,
1633,
1634,
1635,
1636,
1637,
1638,
1639,
1640,
1684,
1685,
1686,
1687,
1688,
1689,
1690,
1691,
1692,
1693,
1704,
1705,
1706,
1707,
1708,
1709,
1710,
1711,
1712,
1732,
1733,
1734,
1735,
1736,
1737,
1738,
1739,
1740,
1782,
1783,
1784,
1785,
1786,
1787,
1788,
1789,
1790,
1801,
1802,
1803,
1804,
1812,
1813,
1814,
1815,
1816,
1817,
1818,
1819,
1830,
1831,
1832,
1833,
1834,
1835,
1836,
1837,
1838,
1847,
1848,
1849,
1886,
1887,
1888,
1889,
1890,
1891,
1892,
1893,
1894,
1895,
1896,
1897,
1898,
1899,
1900,
1926,
1927,
1928,
1929,
1930,
1931,
1932,
1933,
1934,
1935,
1936,
1937,
1938,
1939,
1940,
1941,
1942,
1943,
2004,
2005,
2006,
2007,
2008,
2009,
2010,
2011,
2012,
2013,
2014,
2034,
2035,
2036,
2037,
2038,
2039,
2040,
2062,
2063,
2064,
2065,
2066,
2067,
2068,
2069,
2070,
2071,
2072,
2073,
2121,
2122,
2123,
2124,
2125,
2126,
2127,
2164,
2165,
2166,
2167,
2168,
2169,
2170,
2171,
2172,
2173,
2174,
2175,
2176,
2177,
2178,
2179,
2320,
2321,
2322,
2323,
2363,
2364,
2365,
2366,
2367,
2368,
2369,
2370,
2371,
2372,
2373,
2374,
2442,
2443,
2444,
2445,
2551,
2552,
2553,
2554,
2670,
2671,
2672,
2673,
2703,
2704,
2705,
2734,
2735,
2736,
2737,
2766,
2767,
2768,
2769,
2770,
2771,
2772,
2773,
2774,
2775,
2776,
2777,
2804,
2805,
2806,
2807,
2808,
2809,
2810,
2811,
2812,
2813,
2814,
2815,
2816,
2817,
2818,
2819,
2820,
2821,
2822,
2823,
2824,
2825,
2826,
2827,
2828,
2829,
2830,
2831,
2896,
2897,
2898,
2899,
2900,
2901,
2902,
2903,
2904,
2905,
2906,
2907,
2908,
2909,
2910,
2911,
2912,
2913,
2914,
2915,
2916,
2917,
2918,
2919,
2920,
3069,
3070,
3071,
3072,
3073,
3074,
3075,
3076,
3077,
3078,
3079,
3080,
3081,
3082,
3083,
3084,
3085,
3086,
3087,
3088,
3089,
3090,
3091,
3092,
3093,
3094,
3095,
3096,
3133,
3260,
3261,
3262,
3263,
3264,
3265,
3266,
3267,
3268,
3269,
3270,
3271,
3272,
138,
139,
140,
141,
142,
143,
144,
150,
155,
156,
157,
158,
159,
160,
165,
166,
167,
168,
169,
170,
175,
176,
177,
178,
179,
180,
185,
186,
187,
188,
189,
190,
195,
196,
197,
198,
199,
200,
3140,
3144,
3151,
3172,
3173,
3174,
3175,
3176,
3177,
3178,
3179,
3180,
3181,
3195,
3196,
3197,
3198,
3199
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class DirectoryTestCase(unittest.HomeserverTestCase):
""" Tests the directory service. """
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = hs.get_directory_handler()
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return hs
def test_get_local_association(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
result = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result)
def test_get_remote_association(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
result = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
query_type="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def test_incoming_fed_query(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
response = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
class TestCreateAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.handler = hs.get_directory_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def test_create_alias_joined_room(self):
"""A user can create an alias for a room they're in."""
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def test_create_alias_other_room(self):
"""A user cannot create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, other_room_id,
),
synapse.api.errors.SynapseError,
)
def test_create_alias_admin(self):
"""An admin can create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, other_room_id,
)
)
class TestDeleteAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def _create_alias(self, user):
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], user
)
)
def test_delete_alias_not_allowed(self):
"""A user that doesn't meet the expected guidelines cannot delete an alias."""
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def test_delete_alias_creator(self):
"""An alias creator can delete their own alias."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_admin(self):
"""A server admin can delete an alias created by another user."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias as the admin.
result = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_sufficient_power(self):
"""A user with a sufficient power level should be able to delete an alias."""
self._create_alias(self.admin_user)
# Increase the user's power level.
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
# They can now delete the alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CanonicalAliasTestCase(unittest.HomeserverTestCase):
"""Test modifications of the canonical alias when delete aliases.
"""
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def _add_alias(self, alias: str) -> RoomAlias:
"""Add an alias to the test room."""
room_alias = RoomAlias.from_string(alias)
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
room_alias, self.room_id, ["test"], self.admin_user
)
)
return room_alias
def _set_canonical_alias(self, content):
"""Configure the canonical alias state on the room."""
self.helper.send_state(
self.room_id, "m.room.canonical_alias", content, tok=self.admin_user_tok,
)
def _get_canonical_alias(self):
"""Get the canonical alias state of the room."""
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def test_remove_alias(self):
"""Removing an alias that is the canonical alias should remove it there too."""
# Set this new alias as the canonical alias for this room
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
# Finally, delete the alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
data = self._get_canonical_alias()
self.assertNotIn("alias", data["content"])
self.assertNotIn("alt_aliases", data["content"])
def test_remove_other_alias(self):
"""Removing an alias listed as in alt_aliases should remove it there too."""
# Create a second alias.
other_test_alias = "#test2:test"
other_room_alias = self._add_alias(other_test_alias)
# Set the alias as the canonical alias for this room.
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, other_test_alias],
}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(
data["content"]["alt_aliases"], [self.test_alias, other_test_alias]
)
# Delete the second alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), other_room_alias
)
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
class TestCreateAliasACL(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
# We cheekily override the config to add custom alias creation rules
config = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
config["room_list_publication_rules"] = []
rd_config = RoomDirectoryConfig()
rd_config.read_config(config)
self.hs.config.is_alias_creation_allowed = rd_config.is_alias_creation_allowed
return hs
def test_denied(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(403, channel.code, channel.result)
def test_allowed(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(200, channel.code, channel.result)
class TestRoomListSearchDisabled(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(200, channel.code, channel.result)
self.room_list_handler = hs.get_room_list_handler()
self.directory_handler = hs.get_directory_handler()
return hs
def test_disabling_room_list(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
# Room list is enabled so we should get some results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
# Room list disabled so we should get no results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) == 0)
# Room list disabled so we shouldn't be allowed to publish rooms
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(403, channel.code, channel.result)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class DirectoryTestCase(unittest.HomeserverTestCase):
""" Tests the directory service. """
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = hs.get_directory_handler()
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return hs
def test_get_local_association(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
result = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result)
def test_get_remote_association(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
result = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
query_type="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def test_incoming_fed_query(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
response = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
class TestCreateAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.handler = hs.get_directory_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def test_create_alias_joined_room(self):
"""A user can create an alias for a room they're in."""
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def test_create_alias_other_room(self):
"""A user cannot create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, other_room_id,
),
synapse.api.errors.SynapseError,
)
def test_create_alias_admin(self):
"""An admin can create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, other_room_id,
)
)
class TestDeleteAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def _create_alias(self, user):
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], user
)
)
def test_delete_alias_not_allowed(self):
"""A user that doesn't meet the expected guidelines cannot delete an alias."""
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def test_delete_alias_creator(self):
"""An alias creator can delete their own alias."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_admin(self):
"""A server admin can delete an alias created by another user."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias as the admin.
result = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_sufficient_power(self):
"""A user with a sufficient power level should be able to delete an alias."""
self._create_alias(self.admin_user)
# Increase the user's power level.
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
# They can now delete the alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CanonicalAliasTestCase(unittest.HomeserverTestCase):
"""Test modifications of the canonical alias when delete aliases.
"""
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def _add_alias(self, alias: str) -> RoomAlias:
"""Add an alias to the test room."""
room_alias = RoomAlias.from_string(alias)
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
room_alias, self.room_id, ["test"], self.admin_user
)
)
return room_alias
def _set_canonical_alias(self, content):
"""Configure the canonical alias state on the room."""
self.helper.send_state(
self.room_id, "m.room.canonical_alias", content, tok=self.admin_user_tok,
)
def _get_canonical_alias(self):
"""Get the canonical alias state of the room."""
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def test_remove_alias(self):
"""Removing an alias that is the canonical alias should remove it there too."""
# Set this new alias as the canonical alias for this room
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
# Finally, delete the alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
data = self._get_canonical_alias()
self.assertNotIn("alias", data["content"])
self.assertNotIn("alt_aliases", data["content"])
def test_remove_other_alias(self):
"""Removing an alias listed as in alt_aliases should remove it there too."""
# Create a second alias.
other_test_alias = "#test2:test"
other_room_alias = self._add_alias(other_test_alias)
# Set the alias as the canonical alias for this room.
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, other_test_alias],
}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(
data["content"]["alt_aliases"], [self.test_alias, other_test_alias]
)
# Delete the second alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), other_room_alias
)
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
class TestCreateAliasACL(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
# We cheekily override the config to add custom alias creation rules
config = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
config["room_list_publication_rules"] = []
rd_config = RoomDirectoryConfig()
rd_config.read_config(config)
self.hs.config.is_alias_creation_allowed = rd_config.is_alias_creation_allowed
return hs
def test_denied(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(403, channel.code, channel.result)
def test_allowed(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(200, channel.code, channel.result)
class TestRoomListSearchDisabled(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(200, channel.code, channel.result)
self.room_list_handler = hs.get_room_list_handler()
self.directory_handler = hs.get_directory_handler()
return hs
def test_disabling_room_list(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
# Room list is enabled so we should get some results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
# Room list disabled so we should get no results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) == 0)
# Room list disabled so we shouldn't be allowed to publish rooms
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(403, channel.code, channel.result)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
45
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
45
]
} |
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def FUNC_21(VAR_8, VAR_9):
self.query_handlers[VAR_8] = VAR_9
self.mock_registry.register_query_handler = FUNC_21
VAR_3 = self.setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = VAR_3.get_directory_handler()
self.store = VAR_3.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return VAR_3
def FUNC_1(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
VAR_10 = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, VAR_10)
def FUNC_2(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
VAR_10 = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, VAR_10
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
VAR_8="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def FUNC_3(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
VAR_11 = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, VAR_11)
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.handler = VAR_3.get_directory_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_5(self):
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def FUNC_6(self):
VAR_12 = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, VAR_12,
),
synapse.api.errors.SynapseError,
)
def FUNC_7(self):
VAR_12 = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, VAR_12,
)
)
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_8(self, VAR_4):
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], VAR_4
)
)
def FUNC_9(self):
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def FUNC_10(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_11(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_12(self):
self._create_alias(self.admin_user)
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def FUNC_13(self, VAR_5: str) -> RoomAlias:
VAR_13 = RoomAlias.from_string(VAR_5)
self.get_success(
self.store.create_room_alias_association(
VAR_13, self.room_id, ["test"], self.admin_user
)
)
return VAR_13
def FUNC_14(self, VAR_6):
self.helper.send_state(
self.room_id, "m.room.canonical_alias", VAR_6, tok=self.admin_user_tok,
)
def FUNC_15(self):
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def FUNC_16(self):
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
VAR_14 = self._get_canonical_alias()
self.assertNotIn("alias", VAR_14["content"])
self.assertNotIn("alt_aliases", VAR_14["content"])
def FUNC_17(self):
VAR_15 = "#test2:test"
VAR_16 = self._add_alias(VAR_15)
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, VAR_15],
}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(
VAR_14["content"]["alt_aliases"], [self.test_alias, VAR_15]
)
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), VAR_16
)
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
class CLASS_4(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_17 = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
VAR_17["room_list_publication_rules"] = []
VAR_18 = RoomDirectoryConfig()
VAR_18.read_config(VAR_17)
self.hs.config.is_alias_creation_allowed = VAR_18.is_alias_creation_allowed
return VAR_3
def FUNC_18(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
def FUNC_19(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
class CLASS_5(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.room_list_handler = VAR_3.get_room_list_handler()
self.directory_handler = VAR_3.get_directory_handler()
return VAR_3
def FUNC_20(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) == 0)
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
|
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def FUNC_21(VAR_8, VAR_9):
self.query_handlers[VAR_8] = VAR_9
self.mock_registry.register_query_handler = FUNC_21
VAR_3 = self.setup_test_homeserver(
federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = VAR_3.get_directory_handler()
self.store = VAR_3.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return VAR_3
def FUNC_1(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
VAR_10 = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, VAR_10)
def FUNC_2(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
VAR_10 = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, VAR_10
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
VAR_8="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def FUNC_3(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
VAR_11 = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, VAR_11)
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.handler = VAR_3.get_directory_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_5(self):
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def FUNC_6(self):
VAR_12 = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, VAR_12,
),
synapse.api.errors.SynapseError,
)
def FUNC_7(self):
VAR_12 = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, VAR_12,
)
)
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_8(self, VAR_4):
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], VAR_4
)
)
def FUNC_9(self):
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def FUNC_10(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_11(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_12(self):
self._create_alias(self.admin_user)
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def FUNC_13(self, VAR_5: str) -> RoomAlias:
VAR_13 = RoomAlias.from_string(VAR_5)
self.get_success(
self.store.create_room_alias_association(
VAR_13, self.room_id, ["test"], self.admin_user
)
)
return VAR_13
def FUNC_14(self, VAR_6):
self.helper.send_state(
self.room_id, "m.room.canonical_alias", VAR_6, tok=self.admin_user_tok,
)
def FUNC_15(self):
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def FUNC_16(self):
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
VAR_14 = self._get_canonical_alias()
self.assertNotIn("alias", VAR_14["content"])
self.assertNotIn("alt_aliases", VAR_14["content"])
def FUNC_17(self):
VAR_15 = "#test2:test"
VAR_16 = self._add_alias(VAR_15)
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, VAR_15],
}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(
VAR_14["content"]["alt_aliases"], [self.test_alias, VAR_15]
)
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), VAR_16
)
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
class CLASS_4(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_17 = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
VAR_17["room_list_publication_rules"] = []
VAR_18 = RoomDirectoryConfig()
VAR_18.read_config(VAR_17)
self.hs.config.is_alias_creation_allowed = VAR_18.is_alias_creation_allowed
return VAR_3
def FUNC_18(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
def FUNC_19(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
class CLASS_5(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.room_list_handler = VAR_3.get_room_list_handler()
self.directory_handler = VAR_3.get_directory_handler()
return VAR_3
def FUNC_20(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) == 0)
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
25,
28,
29,
32,
36,
38,
41,
43,
50,
52,
54,
58,
60,
67,
69,
71,
76,
78,
89,
96,
100,
102,
103,
111,
114,
115,
118,
119,
123,
126,
127,
131,
139,
145,
152,
158,
164,
165,
173,
178,
179,
182,
183,
187,
190,
191,
195,
197,
203,
213,
216,
218,
219,
226,
227,
232,
235,
237,
238,
245,
246,
251,
255,
256,
263,
264,
271,
272,
277,
278,
282,
289,
294,
295,
298,
299,
303,
306,
310,
311,
318,
324,
332,
335,
339,
343,
344,
350,
354,
357,
360,
361,
368,
374,
375,
381,
385,
386,
389,
391,
393,
399,
402,
404,
406,
409,
416,
419,
426,
427,
430,
432,
435,
440,
443,
445,
449,
450,
454,
457,
458,
462,
463,
469,
31,
280,
281,
133,
141,
154,
205,
215,
234,
253,
308,
320,
326,
334,
356
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
25,
28,
29,
32,
36,
38,
41,
43,
50,
52,
54,
58,
60,
67,
69,
71,
76,
78,
89,
96,
100,
102,
103,
111,
114,
115,
118,
119,
123,
126,
127,
131,
139,
145,
152,
158,
164,
165,
173,
178,
179,
182,
183,
187,
190,
191,
195,
197,
203,
213,
216,
218,
219,
226,
227,
232,
235,
237,
238,
245,
246,
251,
255,
256,
263,
264,
271,
272,
277,
278,
282,
289,
294,
295,
298,
299,
303,
306,
310,
311,
318,
324,
332,
335,
339,
343,
344,
350,
354,
357,
360,
361,
368,
374,
375,
381,
385,
386,
389,
391,
393,
399,
402,
404,
406,
409,
416,
419,
426,
427,
430,
432,
435,
440,
443,
445,
449,
450,
454,
457,
458,
462,
463,
469,
31,
280,
281,
133,
141,
154,
205,
215,
234,
253,
308,
320,
326,
334,
356
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from twisted.internet.defer import Deferred
import synapse.rest.admin
from synapse.logging.context import make_deferred_yieldable
from synapse.rest.client.v1 import login, room
from synapse.rest.client.v2_alpha import receipts
from tests.unittest import HomeserverTestCase, override_config
class HTTPPusherTests(HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
receipts.register_servlets,
]
user_id = True
hijack_auth = False
def make_homeserver(self, reactor, clock):
self.push_attempts = []
m = Mock()
def post_json_get_json(url, body):
d = Deferred()
self.push_attempts.append((d, url, body))
return make_deferred_yieldable(d)
m.post_json_get_json = post_json_get_json
config = self.default_config()
config["start_pushers"] = True
hs = self.setup_test_homeserver(config=config, proxied_http_client=m)
return hs
def test_sends_http(self):
"""
The HTTP pusher will send pushes for each message to a HTTP endpoint
when configured to do so.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# The other user sends some messages
self.helper.send(room, body="Hi!", tok=other_access_token)
self.helper.send(room, body="There!", tok=other_access_token)
# Get the stream ordering before it gets sent
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
last_stream_ordering = pushers[0]["last_stream_ordering"]
# Advance time a bit, so the pusher will register something has happened
self.pump()
# It hasn't succeeded yet, so the stream ordering shouldn't have moved
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertEqual(last_stream_ordering, pushers[0]["last_stream_ordering"])
# One push was attempted to be sent -- it'll be the first message
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["content"]["body"], "Hi!"
)
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# The stream ordering has increased
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering)
last_stream_ordering = pushers[0]["last_stream_ordering"]
# Now it'll try and send the second push message, which will be the second one
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(
self.push_attempts[1][2]["notification"]["content"]["body"], "There!"
)
# Make the second push succeed
self.push_attempts[1][0].callback({})
self.pump()
# The stream ordering has increased, again
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering)
def test_sends_high_priority_for_encrypted(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to an encrypted message.
This will happen both in 1:1 rooms and larger rooms.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send an encrypted event
# I know there'd normally be set-up of an encrypted room first
# but this will do for our purposes
self.helper.send_event(
room,
"m.room.encrypted",
content={
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "6lImKbzK51MzWLwHh8tUM3UBBSBrLlgup/OOCGTvumM",
"ciphertext": "AwgAErABoRxwpMipdgiwXgu46rHiWQ0DmRj0qUlPrMraBUDk"
"leTnJRljpuc7IOhsYbLY3uo2WI0ab/ob41sV+3JEIhODJPqH"
"TK7cEZaIL+/up9e+dT9VGF5kRTWinzjkeqO8FU5kfdRjm+3w"
"0sy3o1OCpXXCfO+faPhbV/0HuK4ndx1G+myNfK1Nk/CxfMcT"
"BT+zDS/Df/QePAHVbrr9uuGB7fW8ogW/ulnydgZPRluusFGv"
"J3+cg9LoPpZPAmv5Me3ec7NtdlfN0oDZ0gk3TiNkkhsxDG9Y"
"YcNzl78USI0q8+kOV26Bu5dOBpU4WOuojXZHJlP5lMgdzLLl"
"EQ0",
"session_id": "IigqfNWLL+ez/Is+Duwp2s4HuCZhFG9b9CZKTYHtQ4A",
"device_id": "AHQDUSTAAA",
},
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Add yet another person — we want to make this room not a 1:1
# (as encrypted messages in a 1:1 currently have tweaks applied
# so it doesn't properly exercise the condition of all encrypted
# messages need to be high).
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Check no push notifications are sent regarding the membership changes
# (that would confuse the test)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
# Send another encrypted event
self.helper.send_event(
room,
"m.room.encrypted",
content={
"ciphertext": "AwgAEoABtEuic/2DF6oIpNH+q/PonzlhXOVho8dTv0tzFr5m"
"9vTo50yabx3nxsRlP2WxSqa8I07YftP+EKWCWJvTkg6o7zXq"
"6CK+GVvLQOVgK50SfvjHqJXN+z1VEqj+5mkZVN/cAgJzoxcH"
"zFHkwDPJC8kQs47IHd8EO9KBUK4v6+NQ1uE/BIak4qAf9aS/"
"kI+f0gjn9IY9K6LXlah82A/iRyrIrxkCkE/n0VfvLhaWFecC"
"sAWTcMLoF6fh1Jpke95mljbmFSpsSd/eEQw",
"device_id": "SRCFTWTHXO",
"session_id": "eMA+bhGczuTz1C5cJR1YbmrnnC6Goni4lbvS5vJ1nG4",
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "rC/XSIAiYrVGSuaHMop8/pTZbku4sQKBZwRwukgnN1c",
},
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high")
def test_sends_high_priority_for_one_to_one_only(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message in a one-to-one room.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(room, body="Hi!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority — this is a one-to-one room
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Yet another user joins
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Check no push notifications are sent regarding the membership changes
# (that would confuse the test)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
# Send another event
self.helper.send(room, body="Welcome!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_sends_high_priority_for_mention(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message containing the user's display name.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other users join
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(room, body="Oh, user, hello!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Send another event, this time with no mention
self.helper.send(room, body="Are you there?", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_sends_high_priority_for_atroom(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message that contains @room.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room (as other_user so the power levels are compatible with
# other_user sending @room).
room = self.helper.create_room_as(other_user_id, tok=other_access_token)
# The other users join
self.helper.join(room=room, user=user_id, tok=access_token)
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(
room,
body="@room eeek! There's a spider on the table!",
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Send another event, this time as someone without the power of @room
self.helper.send(
room, body="@room the spider is gone", tok=yet_another_access_token
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_push_unread_count_group_by_room(self):
"""
The HTTP pusher will group unread count by number of unread rooms.
"""
# Carry out common push count tests and setup
self._test_push_unread_count()
# Carry out our option-value specific test
#
# This push should still only contain an unread count of 1 (for 1 unread room)
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 1
)
@override_config({"push": {"group_unread_count_by_room": False}})
def test_push_unread_count_message_count(self):
"""
The HTTP pusher will send the total unread message count.
"""
# Carry out common push count tests and setup
self._test_push_unread_count()
# Carry out our option-value specific test
#
# We're counting every unread message, so there should now be 4 since the
# last read receipt
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 4
)
def _test_push_unread_count(self):
"""
Tests that the correct unread count appears in sent push notifications
Note that:
* Sending messages will cause push notifications to go out to relevant users
* Sending a read receipt will cause a "badge update" notification to go out to
the user that sent the receipt
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("other_user", "pass")
other_access_token = self.login("other_user", "pass")
# Create a room (as other_user)
room_id = self.helper.create_room_as(other_user_id, tok=other_access_token)
# The user to get notified joins
self.helper.join(room=room_id, user=user_id, tok=access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
response = self.helper.send(
room_id, body="Hello there!", tok=other_access_token
)
# To get an unread count, the user who is getting notified has to have a read
# position in the room. We'll set the read position to this event in a moment
first_message_event_id = response["event_id"]
# Advance time a bit (so the pusher will register something has happened) and
# make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
# Check that the unread count for the room is 0
#
# The unread count is zero as the user has no read receipt in the room yet
self.assertEqual(
self.push_attempts[0][2]["notification"]["counts"]["unread"], 0
)
# Now set the user's read receipt position to the first event
#
# This will actually trigger a new notification to be sent out so that
# even if the user does not receive another message, their unread
# count goes down
request, channel = self.make_request(
"POST",
"/rooms/%s/receipt/m.read/%s" % (room_id, first_message_event_id),
{},
access_token=access_token,
)
self.assertEqual(channel.code, 200, channel.json_body)
# Advance time and make the push succeed
self.push_attempts[1][0].callback({})
self.pump()
# Unread count is still zero as we've read the only message in the room
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(
self.push_attempts[1][2]["notification"]["counts"]["unread"], 0
)
# Send another message
self.helper.send(
room_id, body="How's the weather today?", tok=other_access_token
)
# Advance time and make the push succeed
self.push_attempts[2][0].callback({})
self.pump()
# This push should contain an unread count of 1 as there's now been one
# message since our last read receipt
self.assertEqual(len(self.push_attempts), 3)
self.assertEqual(
self.push_attempts[2][2]["notification"]["counts"]["unread"], 1
)
# Since we're grouping by room, sending more messages shouldn't increase the
# unread count, as they're all being sent in the same room
self.helper.send(room_id, body="Hello?", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[3][0].callback({})
self.helper.send(room_id, body="Hello??", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[4][0].callback({})
self.helper.send(room_id, body="HELLO???", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[5][0].callback({})
self.assertEqual(len(self.push_attempts), 6)
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from twisted.internet.defer import Deferred
import synapse.rest.admin
from synapse.logging.context import make_deferred_yieldable
from synapse.rest.client.v1 import login, room
from synapse.rest.client.v2_alpha import receipts
from tests.unittest import HomeserverTestCase, override_config
class HTTPPusherTests(HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
receipts.register_servlets,
]
user_id = True
hijack_auth = False
def make_homeserver(self, reactor, clock):
self.push_attempts = []
m = Mock()
def post_json_get_json(url, body):
d = Deferred()
self.push_attempts.append((d, url, body))
return make_deferred_yieldable(d)
m.post_json_get_json = post_json_get_json
config = self.default_config()
config["start_pushers"] = True
hs = self.setup_test_homeserver(
config=config, proxied_blacklisted_http_client=m
)
return hs
def test_sends_http(self):
"""
The HTTP pusher will send pushes for each message to a HTTP endpoint
when configured to do so.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# The other user sends some messages
self.helper.send(room, body="Hi!", tok=other_access_token)
self.helper.send(room, body="There!", tok=other_access_token)
# Get the stream ordering before it gets sent
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
last_stream_ordering = pushers[0]["last_stream_ordering"]
# Advance time a bit, so the pusher will register something has happened
self.pump()
# It hasn't succeeded yet, so the stream ordering shouldn't have moved
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertEqual(last_stream_ordering, pushers[0]["last_stream_ordering"])
# One push was attempted to be sent -- it'll be the first message
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["content"]["body"], "Hi!"
)
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# The stream ordering has increased
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering)
last_stream_ordering = pushers[0]["last_stream_ordering"]
# Now it'll try and send the second push message, which will be the second one
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(
self.push_attempts[1][2]["notification"]["content"]["body"], "There!"
)
# Make the second push succeed
self.push_attempts[1][0].callback({})
self.pump()
# The stream ordering has increased, again
pushers = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": user_id})
)
pushers = list(pushers)
self.assertEqual(len(pushers), 1)
self.assertTrue(pushers[0]["last_stream_ordering"] > last_stream_ordering)
def test_sends_high_priority_for_encrypted(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to an encrypted message.
This will happen both in 1:1 rooms and larger rooms.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send an encrypted event
# I know there'd normally be set-up of an encrypted room first
# but this will do for our purposes
self.helper.send_event(
room,
"m.room.encrypted",
content={
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "6lImKbzK51MzWLwHh8tUM3UBBSBrLlgup/OOCGTvumM",
"ciphertext": "AwgAErABoRxwpMipdgiwXgu46rHiWQ0DmRj0qUlPrMraBUDk"
"leTnJRljpuc7IOhsYbLY3uo2WI0ab/ob41sV+3JEIhODJPqH"
"TK7cEZaIL+/up9e+dT9VGF5kRTWinzjkeqO8FU5kfdRjm+3w"
"0sy3o1OCpXXCfO+faPhbV/0HuK4ndx1G+myNfK1Nk/CxfMcT"
"BT+zDS/Df/QePAHVbrr9uuGB7fW8ogW/ulnydgZPRluusFGv"
"J3+cg9LoPpZPAmv5Me3ec7NtdlfN0oDZ0gk3TiNkkhsxDG9Y"
"YcNzl78USI0q8+kOV26Bu5dOBpU4WOuojXZHJlP5lMgdzLLl"
"EQ0",
"session_id": "IigqfNWLL+ez/Is+Duwp2s4HuCZhFG9b9CZKTYHtQ4A",
"device_id": "AHQDUSTAAA",
},
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Add yet another person — we want to make this room not a 1:1
# (as encrypted messages in a 1:1 currently have tweaks applied
# so it doesn't properly exercise the condition of all encrypted
# messages need to be high).
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Check no push notifications are sent regarding the membership changes
# (that would confuse the test)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
# Send another encrypted event
self.helper.send_event(
room,
"m.room.encrypted",
content={
"ciphertext": "AwgAEoABtEuic/2DF6oIpNH+q/PonzlhXOVho8dTv0tzFr5m"
"9vTo50yabx3nxsRlP2WxSqa8I07YftP+EKWCWJvTkg6o7zXq"
"6CK+GVvLQOVgK50SfvjHqJXN+z1VEqj+5mkZVN/cAgJzoxcH"
"zFHkwDPJC8kQs47IHd8EO9KBUK4v6+NQ1uE/BIak4qAf9aS/"
"kI+f0gjn9IY9K6LXlah82A/iRyrIrxkCkE/n0VfvLhaWFecC"
"sAWTcMLoF6fh1Jpke95mljbmFSpsSd/eEQw",
"device_id": "SRCFTWTHXO",
"session_id": "eMA+bhGczuTz1C5cJR1YbmrnnC6Goni4lbvS5vJ1nG4",
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "rC/XSIAiYrVGSuaHMop8/pTZbku4sQKBZwRwukgnN1c",
},
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high")
def test_sends_high_priority_for_one_to_one_only(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message in a one-to-one room.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(room, body="Hi!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority — this is a one-to-one room
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Yet another user joins
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Check no push notifications are sent regarding the membership changes
# (that would confuse the test)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
# Send another event
self.helper.send(room, body="Welcome!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_sends_high_priority_for_mention(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message containing the user's display name.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other users join
self.helper.join(room=room, user=other_user_id, tok=other_access_token)
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(room, body="Oh, user, hello!", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Send another event, this time with no mention
self.helper.send(room, body="Are you there?", tok=other_access_token)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_sends_high_priority_for_atroom(self):
"""
The HTTP pusher will send pushes at high priority if they correspond
to a message that contains @room.
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("otheruser", "pass")
other_access_token = self.login("otheruser", "pass")
# Register a third user
yet_another_user_id = self.register_user("yetanotheruser", "pass")
yet_another_access_token = self.login("yetanotheruser", "pass")
# Create a room (as other_user so the power levels are compatible with
# other_user sending @room).
room = self.helper.create_room_as(other_user_id, tok=other_access_token)
# The other users join
self.helper.join(room=room, user=user_id, tok=access_token)
self.helper.join(
room=room, user=yet_another_user_id, tok=yet_another_access_token
)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
self.helper.send(
room,
body="@room eeek! There's a spider on the table!",
tok=other_access_token,
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
# Make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it with high priority
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
# Send another event, this time as someone without the power of @room
self.helper.send(
room, body="@room the spider is gone", tok=yet_another_access_token
)
# Advance time a bit, so the pusher will register something has happened
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
# check that this is low-priority
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def test_push_unread_count_group_by_room(self):
"""
The HTTP pusher will group unread count by number of unread rooms.
"""
# Carry out common push count tests and setup
self._test_push_unread_count()
# Carry out our option-value specific test
#
# This push should still only contain an unread count of 1 (for 1 unread room)
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 1
)
@override_config({"push": {"group_unread_count_by_room": False}})
def test_push_unread_count_message_count(self):
"""
The HTTP pusher will send the total unread message count.
"""
# Carry out common push count tests and setup
self._test_push_unread_count()
# Carry out our option-value specific test
#
# We're counting every unread message, so there should now be 4 since the
# last read receipt
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 4
)
def _test_push_unread_count(self):
"""
Tests that the correct unread count appears in sent push notifications
Note that:
* Sending messages will cause push notifications to go out to relevant users
* Sending a read receipt will cause a "badge update" notification to go out to
the user that sent the receipt
"""
# Register the user who gets notified
user_id = self.register_user("user", "pass")
access_token = self.login("user", "pass")
# Register the user who sends the message
other_user_id = self.register_user("other_user", "pass")
other_access_token = self.login("other_user", "pass")
# Create a room (as other_user)
room_id = self.helper.create_room_as(other_user_id, tok=other_access_token)
# The user to get notified joins
self.helper.join(room=room_id, user=user_id, tok=access_token)
# Register the pusher
user_tuple = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_tuple.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
# Send a message
response = self.helper.send(
room_id, body="Hello there!", tok=other_access_token
)
# To get an unread count, the user who is getting notified has to have a read
# position in the room. We'll set the read position to this event in a moment
first_message_event_id = response["event_id"]
# Advance time a bit (so the pusher will register something has happened) and
# make the push succeed
self.push_attempts[0][0].callback({})
self.pump()
# Check our push made it
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
# Check that the unread count for the room is 0
#
# The unread count is zero as the user has no read receipt in the room yet
self.assertEqual(
self.push_attempts[0][2]["notification"]["counts"]["unread"], 0
)
# Now set the user's read receipt position to the first event
#
# This will actually trigger a new notification to be sent out so that
# even if the user does not receive another message, their unread
# count goes down
request, channel = self.make_request(
"POST",
"/rooms/%s/receipt/m.read/%s" % (room_id, first_message_event_id),
{},
access_token=access_token,
)
self.assertEqual(channel.code, 200, channel.json_body)
# Advance time and make the push succeed
self.push_attempts[1][0].callback({})
self.pump()
# Unread count is still zero as we've read the only message in the room
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(
self.push_attempts[1][2]["notification"]["counts"]["unread"], 0
)
# Send another message
self.helper.send(
room_id, body="How's the weather today?", tok=other_access_token
)
# Advance time and make the push succeed
self.push_attempts[2][0].callback({})
self.pump()
# This push should contain an unread count of 1 as there's now been one
# message since our last read receipt
self.assertEqual(len(self.push_attempts), 3)
self.assertEqual(
self.push_attempts[2][2]["notification"]["counts"]["unread"], 1
)
# Since we're grouping by room, sending more messages shouldn't increase the
# unread count, as they're all being sent in the same room
self.helper.send(room_id, body="Hello?", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[3][0].callback({})
self.helper.send(room_id, body="Hello??", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[4][0].callback({})
self.helper.send(room_id, body="HELLO???", tok=other_access_token)
# Advance time and make the push succeed
self.pump()
self.push_attempts[5][0].callback({})
self.assertEqual(len(self.push_attempts), 6)
| open_redirect | {
"code": [
" hs = self.setup_test_homeserver(config=config, proxied_http_client=m)"
],
"line_no": [
52
]
} | {
"code": [
" hs = self.setup_test_homeserver(",
" )"
],
"line_no": [
52,
54
]
} |
from mock import Mock
from twisted.internet.defer import Deferred
import synapse.rest.admin
from synapse.logging.context import .make_deferred_yieldable
from synapse.rest.client.v1 import login, VAR_15
from synapse.rest.client.v2_alpha import receipts
from tests.unittest import HomeserverTestCase, override_config
class CLASS_0(HomeserverTestCase):
VAR_0 = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
VAR_15.register_servlets,
login.register_servlets,
receipts.register_servlets,
]
VAR_1 = True
VAR_2 = False
def FUNC_0(self, VAR_3, VAR_4):
self.push_attempts = []
VAR_5 = Mock()
def FUNC_9(VAR_6, VAR_7):
VAR_25 = Deferred()
self.push_attempts.append((VAR_25, VAR_6, VAR_7))
return make_deferred_yieldable(VAR_25)
VAR_5.post_json_get_json = FUNC_9
VAR_8 = self.default_config()
VAR_8["start_pushers"] = True
VAR_9 = self.setup_test_homeserver(VAR_8=config, proxied_http_client=VAR_5)
return VAR_9
def FUNC_1(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
self.helper.send(VAR_15, VAR_7="Hi!", tok=VAR_12)
self.helper.send(VAR_15, VAR_7="There!", tok=VAR_12)
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
VAR_17 = VAR_16[0]["last_stream_ordering"]
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertEqual(VAR_17, VAR_16[0]["last_stream_ordering"])
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["content"]["body"], "Hi!"
)
self.push_attempts[0][0].callback({})
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertTrue(VAR_16[0]["last_stream_ordering"] > VAR_17)
VAR_17 = VAR_16[0]["last_stream_ordering"]
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(
self.push_attempts[1][2]["notification"]["content"]["body"], "There!"
)
self.push_attempts[1][0].callback({})
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertTrue(VAR_16[0]["last_stream_ordering"] > VAR_17)
def FUNC_2(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send_event(
VAR_15,
"m.room.encrypted",
content={
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "6lImKbzK51MzWLwHh8tUM3UBBSBrLlgup/OOCGTvumM",
"ciphertext": "AwgAErABoRxwpMipdgiwXgu46rHiWQ0DmRj0qUlPrMraBUDk"
"leTnJRljpuc7IOhsYbLY3uo2WI0ab/ob41sV+3JEIhODJPqH"
"TK7cEZaIL+/up9e+dT9VGF5kRTWinzjkeqO8FU5kfdRjm+3w"
"0sy3o1OCpXXCfO+faPhbV/0HuK4ndx1G+myNfK1Nk/CxfMcT"
"BT+zDS/Df/QePAHVbrr9uuGB7fW8ogW/ulnydgZPRluusFGv"
"J3+cg9LoPpZPAmv5Me3ec7NtdlfN0oDZ0gk3TiNkkhsxDG9Y"
"YcNzl78USI0q8+kOV26Bu5dOBpU4WOuojXZHJlP5lMgdzLLl"
"EQ0",
"session_id": "IigqfNWLL+ez/Is+Duwp2s4HuCZhFG9b9CZKTYHtQ4A",
"device_id": "AHQDUSTAAA",
},
tok=VAR_12,
)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.helper.send_event(
VAR_15,
"m.room.encrypted",
content={
"ciphertext": "AwgAEoABtEuic/2DF6oIpNH+q/PonzlhXOVho8dTv0tzFr5m"
"9vTo50yabx3nxsRlP2WxSqa8I07YftP+EKWCWJvTkg6o7zXq"
"6CK+GVvLQOVgK50SfvjHqJXN+z1VEqj+5mkZVN/cAgJzoxcH"
"zFHkwDPJC8kQs47IHd8EO9KBUK4v6+NQ1uE/BIak4qAf9aS/"
"kI+f0gjn9IY9K6LXlah82A/iRyrIrxkCkE/n0VfvLhaWFecC"
"sAWTcMLoF6fh1Jpke95mljbmFSpsSd/eEQw",
"device_id": "SRCFTWTHXO",
"session_id": "eMA+bhGczuTz1C5cJR1YbmrnnC6Goni4lbvS5vJ1nG4",
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "rC/XSIAiYrVGSuaHMop8/pTZbku4sQKBZwRwukgnN1c",
},
tok=VAR_12,
)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high")
def FUNC_3(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(VAR_15, VAR_7="Hi!", tok=VAR_12)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.helper.send(VAR_15, VAR_7="Welcome!", tok=VAR_12)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_4(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(VAR_15, VAR_7="Oh, user, hello!", tok=VAR_12)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.send(VAR_15, VAR_7="Are you there?", tok=VAR_12)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_5(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_11, tok=VAR_12)
self.helper.join(VAR_15=room, user=VAR_1, tok=VAR_10)
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(
VAR_15,
VAR_7="@VAR_15 eeek! There's a spider on the table!",
tok=VAR_12,
)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.send(
VAR_15, VAR_7="@VAR_15 the spider is gone", tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_6(self):
self._test_push_unread_count()
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 1
)
@override_config({"push": {"group_unread_count_by_room": False}})
def FUNC_7(self):
self._test_push_unread_count()
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 4
)
def FUNC_8(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("other_user", "pass")
VAR_12 = self.login("other_user", "pass")
VAR_20 = self.helper.create_room_as(VAR_11, tok=VAR_12)
self.helper.join(VAR_15=VAR_20, user=VAR_1, tok=VAR_10)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
VAR_21 = self.helper.send(
VAR_20, VAR_7="Hello there!", tok=VAR_12
)
VAR_22 = VAR_21["event_id"]
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["counts"]["unread"], 0
)
VAR_23, VAR_24 = self.make_request(
"POST",
"/rooms/%s/receipt/VAR_5.read/%s" % (VAR_20, VAR_22),
{},
VAR_10=access_token,
)
self.assertEqual(VAR_24.code, 200, VAR_24.json_body)
self.push_attempts[1][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(
self.push_attempts[1][2]["notification"]["counts"]["unread"], 0
)
self.helper.send(
VAR_20, VAR_7="How's the weather today?", tok=VAR_12
)
self.push_attempts[2][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 3)
self.assertEqual(
self.push_attempts[2][2]["notification"]["counts"]["unread"], 1
)
self.helper.send(VAR_20, VAR_7="Hello?", tok=VAR_12)
self.pump()
self.push_attempts[3][0].callback({})
self.helper.send(VAR_20, VAR_7="Hello??", tok=VAR_12)
self.pump()
self.push_attempts[4][0].callback({})
self.helper.send(VAR_20, VAR_7="HELLO???", tok=VAR_12)
self.pump()
self.push_attempts[5][0].callback({})
self.assertEqual(len(self.push_attempts), 6)
|
from mock import Mock
from twisted.internet.defer import Deferred
import synapse.rest.admin
from synapse.logging.context import .make_deferred_yieldable
from synapse.rest.client.v1 import login, VAR_15
from synapse.rest.client.v2_alpha import receipts
from tests.unittest import HomeserverTestCase, override_config
class CLASS_0(HomeserverTestCase):
VAR_0 = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
VAR_15.register_servlets,
login.register_servlets,
receipts.register_servlets,
]
VAR_1 = True
VAR_2 = False
def FUNC_0(self, VAR_3, VAR_4):
self.push_attempts = []
VAR_5 = Mock()
def FUNC_9(VAR_6, VAR_7):
VAR_25 = Deferred()
self.push_attempts.append((VAR_25, VAR_6, VAR_7))
return make_deferred_yieldable(VAR_25)
VAR_5.post_json_get_json = FUNC_9
VAR_8 = self.default_config()
VAR_8["start_pushers"] = True
VAR_9 = self.setup_test_homeserver(
VAR_8=config, proxied_blacklisted_http_client=VAR_5
)
return VAR_9
def FUNC_1(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
self.helper.send(VAR_15, VAR_7="Hi!", tok=VAR_12)
self.helper.send(VAR_15, VAR_7="There!", tok=VAR_12)
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
VAR_17 = VAR_16[0]["last_stream_ordering"]
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertEqual(VAR_17, VAR_16[0]["last_stream_ordering"])
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["content"]["body"], "Hi!"
)
self.push_attempts[0][0].callback({})
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertTrue(VAR_16[0]["last_stream_ordering"] > VAR_17)
VAR_17 = VAR_16[0]["last_stream_ordering"]
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(
self.push_attempts[1][2]["notification"]["content"]["body"], "There!"
)
self.push_attempts[1][0].callback({})
self.pump()
VAR_16 = self.get_success(
self.hs.get_datastore().get_pushers_by({"user_name": VAR_1})
)
VAR_16 = list(VAR_16)
self.assertEqual(len(VAR_16), 1)
self.assertTrue(VAR_16[0]["last_stream_ordering"] > VAR_17)
def FUNC_2(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send_event(
VAR_15,
"m.room.encrypted",
content={
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "6lImKbzK51MzWLwHh8tUM3UBBSBrLlgup/OOCGTvumM",
"ciphertext": "AwgAErABoRxwpMipdgiwXgu46rHiWQ0DmRj0qUlPrMraBUDk"
"leTnJRljpuc7IOhsYbLY3uo2WI0ab/ob41sV+3JEIhODJPqH"
"TK7cEZaIL+/up9e+dT9VGF5kRTWinzjkeqO8FU5kfdRjm+3w"
"0sy3o1OCpXXCfO+faPhbV/0HuK4ndx1G+myNfK1Nk/CxfMcT"
"BT+zDS/Df/QePAHVbrr9uuGB7fW8ogW/ulnydgZPRluusFGv"
"J3+cg9LoPpZPAmv5Me3ec7NtdlfN0oDZ0gk3TiNkkhsxDG9Y"
"YcNzl78USI0q8+kOV26Bu5dOBpU4WOuojXZHJlP5lMgdzLLl"
"EQ0",
"session_id": "IigqfNWLL+ez/Is+Duwp2s4HuCZhFG9b9CZKTYHtQ4A",
"device_id": "AHQDUSTAAA",
},
tok=VAR_12,
)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.helper.send_event(
VAR_15,
"m.room.encrypted",
content={
"ciphertext": "AwgAEoABtEuic/2DF6oIpNH+q/PonzlhXOVho8dTv0tzFr5m"
"9vTo50yabx3nxsRlP2WxSqa8I07YftP+EKWCWJvTkg6o7zXq"
"6CK+GVvLQOVgK50SfvjHqJXN+z1VEqj+5mkZVN/cAgJzoxcH"
"zFHkwDPJC8kQs47IHd8EO9KBUK4v6+NQ1uE/BIak4qAf9aS/"
"kI+f0gjn9IY9K6LXlah82A/iRyrIrxkCkE/n0VfvLhaWFecC"
"sAWTcMLoF6fh1Jpke95mljbmFSpsSd/eEQw",
"device_id": "SRCFTWTHXO",
"session_id": "eMA+bhGczuTz1C5cJR1YbmrnnC6Goni4lbvS5vJ1nG4",
"algorithm": "m.megolm.v1.aes-sha2",
"sender_key": "rC/XSIAiYrVGSuaHMop8/pTZbku4sQKBZwRwukgnN1c",
},
tok=VAR_12,
)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "high")
def FUNC_3(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(VAR_15, VAR_7="Hi!", tok=VAR_12)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.helper.send(VAR_15, VAR_7="Welcome!", tok=VAR_12)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_4(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_1, tok=VAR_10)
self.helper.join(VAR_15=room, user=VAR_11, tok=VAR_12)
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(VAR_15, VAR_7="Oh, user, hello!", tok=VAR_12)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.send(VAR_15, VAR_7="Are you there?", tok=VAR_12)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_5(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("otheruser", "pass")
VAR_12 = self.login("otheruser", "pass")
VAR_18 = self.register_user("yetanotheruser", "pass")
VAR_19 = self.login("yetanotheruser", "pass")
VAR_15 = self.helper.create_room_as(VAR_11, tok=VAR_12)
self.helper.join(VAR_15=room, user=VAR_1, tok=VAR_10)
self.helper.join(
VAR_15=room, user=VAR_18, tok=VAR_19
)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
self.helper.send(
VAR_15,
VAR_7="@VAR_15 eeek! There's a spider on the table!",
tok=VAR_12,
)
self.pump()
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(self.push_attempts[0][2]["notification"]["prio"], "high")
self.helper.send(
VAR_15, VAR_7="@VAR_15 the spider is gone", tok=VAR_19
)
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(self.push_attempts[1][1], "example.com")
self.assertEqual(self.push_attempts[1][2]["notification"]["prio"], "low")
def FUNC_6(self):
self._test_push_unread_count()
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 1
)
@override_config({"push": {"group_unread_count_by_room": False}})
def FUNC_7(self):
self._test_push_unread_count()
self.assertEqual(
self.push_attempts[5][2]["notification"]["counts"]["unread"], 4
)
def FUNC_8(self):
VAR_1 = self.register_user("user", "pass")
VAR_10 = self.login("user", "pass")
VAR_11 = self.register_user("other_user", "pass")
VAR_12 = self.login("other_user", "pass")
VAR_20 = self.helper.create_room_as(VAR_11, tok=VAR_12)
self.helper.join(VAR_15=VAR_20, user=VAR_1, tok=VAR_10)
VAR_13 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_10)
)
VAR_14 = VAR_13.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_1=user_id,
VAR_10=VAR_14,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "example.com"},
)
)
VAR_21 = self.helper.send(
VAR_20, VAR_7="Hello there!", tok=VAR_12
)
VAR_22 = VAR_21["event_id"]
self.push_attempts[0][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 1)
self.assertEqual(self.push_attempts[0][1], "example.com")
self.assertEqual(
self.push_attempts[0][2]["notification"]["counts"]["unread"], 0
)
VAR_23, VAR_24 = self.make_request(
"POST",
"/rooms/%s/receipt/VAR_5.read/%s" % (VAR_20, VAR_22),
{},
VAR_10=access_token,
)
self.assertEqual(VAR_24.code, 200, VAR_24.json_body)
self.push_attempts[1][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 2)
self.assertEqual(
self.push_attempts[1][2]["notification"]["counts"]["unread"], 0
)
self.helper.send(
VAR_20, VAR_7="How's the weather today?", tok=VAR_12
)
self.push_attempts[2][0].callback({})
self.pump()
self.assertEqual(len(self.push_attempts), 3)
self.assertEqual(
self.push_attempts[2][2]["notification"]["counts"]["unread"], 1
)
self.helper.send(VAR_20, VAR_7="Hello?", tok=VAR_12)
self.pump()
self.push_attempts[3][0].callback({})
self.helper.send(VAR_20, VAR_7="Hello??", tok=VAR_12)
self.pump()
self.push_attempts[4][0].callback({})
self.helper.send(VAR_20, VAR_7="HELLO???", tok=VAR_12)
self.pump()
self.push_attempts[5][0].callback({})
self.assertEqual(len(self.push_attempts), 6)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
23,
25,
26,
36,
39,
41,
46,
48,
51,
53,
55,
61,
64,
65,
68,
69,
74,
88,
89,
91,
92,
94,
95,
98,
99,
106,
107,
109,
110,
117,
118,
124,
125,
128,
129,
137,
138,
144,
145,
148,
149,
156,
163,
166,
167,
170,
171,
174,
175,
177,
178,
180,
181,
186,
200,
201,
202,
203,
223,
224,
226,
227,
230,
231,
235,
236,
237,
238,
239,
243,
244,
245,
248,
249,
267,
268,
273,
279,
282,
283,
286,
287,
290,
291,
293,
294,
296,
297,
302,
316,
317,
319,
320,
322,
323,
326,
327,
331,
332,
336,
337,
338,
341,
342,
344,
345,
349,
350,
352,
358,
361,
362,
365,
366,
369,
370,
372,
373,
378,
379,
384,
398,
399,
401,
402,
404,
405,
408,
409,
413,
414,
416,
417,
421,
422,
424,
430,
433,
434,
437,
438,
441,
442,
443,
445,
446,
451,
452,
457,
471,
472,
478,
479,
481,
482,
485,
486,
490,
491,
495,
496,
500,
501,
503,
508,
510,
511,
512,
513,
517,
523,
525,
526,
527,
528,
529,
533,
537,
543,
546,
547,
550,
551,
553,
554,
556,
557,
562,
576,
577,
581,
582,
584,
585,
586,
589,
590,
593,
594,
595,
596,
600,
601,
602,
603,
604,
605,
613,
614,
617,
618,
623,
624,
628,
629,
632,
633,
634,
639,
640,
641,
643,
644,
647,
649,
650,
653,
655,
656,
659,
661,
57,
58,
59,
60,
158,
159,
160,
161,
162,
275,
276,
277,
278,
354,
355,
356,
357,
426,
427,
428,
429,
505,
506,
507,
520,
521,
522,
535,
536,
537,
538,
539,
540,
541,
542
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
23,
25,
26,
36,
39,
41,
46,
48,
51,
55,
57,
63,
66,
67,
70,
71,
76,
90,
91,
93,
94,
96,
97,
100,
101,
108,
109,
111,
112,
119,
120,
126,
127,
130,
131,
139,
140,
146,
147,
150,
151,
158,
165,
168,
169,
172,
173,
176,
177,
179,
180,
182,
183,
188,
202,
203,
204,
205,
225,
226,
228,
229,
232,
233,
237,
238,
239,
240,
241,
245,
246,
247,
250,
251,
269,
270,
275,
281,
284,
285,
288,
289,
292,
293,
295,
296,
298,
299,
304,
318,
319,
321,
322,
324,
325,
328,
329,
333,
334,
338,
339,
340,
343,
344,
346,
347,
351,
352,
354,
360,
363,
364,
367,
368,
371,
372,
374,
375,
380,
381,
386,
400,
401,
403,
404,
406,
407,
410,
411,
415,
416,
418,
419,
423,
424,
426,
432,
435,
436,
439,
440,
443,
444,
445,
447,
448,
453,
454,
459,
473,
474,
480,
481,
483,
484,
487,
488,
492,
493,
497,
498,
502,
503,
505,
510,
512,
513,
514,
515,
519,
525,
527,
528,
529,
530,
531,
535,
539,
545,
548,
549,
552,
553,
555,
556,
558,
559,
564,
578,
579,
583,
584,
586,
587,
588,
591,
592,
595,
596,
597,
598,
602,
603,
604,
605,
606,
607,
615,
616,
619,
620,
625,
626,
630,
631,
634,
635,
636,
641,
642,
643,
645,
646,
649,
651,
652,
655,
657,
658,
661,
663,
59,
60,
61,
62,
160,
161,
162,
163,
164,
277,
278,
279,
280,
356,
357,
358,
359,
428,
429,
430,
431,
507,
508,
509,
522,
523,
524,
537,
538,
539,
540,
541,
542,
543,
544
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file provides some classes for setting up (partially-populated)
# homeservers; either as a full homeserver as a real application, or a small
# partial one for unit test mocking.
# Imports required for the default HomeServer() implementation
import abc
import functools
import logging
import os
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
T = TypeVar("T", bound=Callable[..., Any])
def cache_in_self(builder: T) -> T:
"""Wraps a function called e.g. `get_foo`, checking if `self.foo` exists and
returning if so. If not, calls the given function and sets `self.foo` to it.
Also ensures that dependency cycles throw an exception correctly, rather
than overflowing the stack.
"""
if not builder.__name__.startswith("get_"):
raise Exception(
"@cache_in_self can only be used on functions starting with `get_`"
)
# get_attr -> _attr
depname = builder.__name__[len("get") :]
building = [False]
@functools.wraps(builder)
def _get(self):
try:
return getattr(self, depname)
except AttributeError:
pass
# Prevent cyclic dependencies from deadlocking
if building[0]:
raise ValueError("Cyclic dependency while building %s" % (depname,))
building[0] = True
try:
dep = builder(self)
setattr(self, depname, dep)
finally:
building[0] = False
return dep
# We cast here as we need to tell mypy that `_get` has the same signature as
# `builder`.
return cast(T, _get)
class HomeServer(metaclass=abc.ABCMeta):
"""A basic homeserver object without lazy component builders.
This will need all of the components it requires to either be passed as
constructor arguments, or the relevant methods overriding to create them.
Typically this would only be used for unit tests.
Dependencies should be added by creating a `def get_<depname>(self)`
function, wrapping it in `@cache_in_self`.
Attributes:
config (synapse.config.homeserver.HomeserverConfig):
_listening_services (list[twisted.internet.tcp.Port]): TCP ports that
we are listening on to provide HTTP services.
"""
REQUIRED_ON_BACKGROUND_TASK_STARTUP = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
# This is overridden in derived application classes
# (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be
# instantiated during setup() for future return by get_datastore()
DATASTORE_CLASS = abc.abstractproperty()
def __init__(
self,
hostname: str,
config: HomeServerConfig,
reactor=None,
version_string="Synapse",
):
"""
Args:
hostname : The hostname for the server.
config: The full config for the homeserver.
"""
if not reactor:
from twisted.internet import reactor as _reactor
reactor = _reactor
self._reactor = reactor
self.hostname = hostname
# the key we use to sign events and requests
self.signing_key = config.key.signing_key[0]
self.config = config
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = config.worker_name or "master"
self.version_string = version_string
self.datastores = None # type: Optional[Databases]
def get_instance_id(self) -> str:
"""A unique ID for this synapse process instance.
This is used to distinguish running instances in worker-based
deployments.
"""
return self._instance_id
def get_instance_name(self) -> str:
"""A unique name for this synapse process.
Used to identify the process over replication and in config. Does not
change over restarts.
"""
return self._instance_name
def setup(self) -> None:
logger.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
logger.info("Finished setting up.")
# Register background tasks required by this server. This must be done
# somewhat manually due to the background tasks not being registered
# unless handlers are instantiated.
if self.config.run_background_tasks:
self.setup_background_tasks()
def setup_background_tasks(self) -> None:
"""
Some handlers have side effects on instantiation (like registering
background updates). This function causes them to be fetched, and
therefore instantiated, to run those side effects.
"""
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def get_reactor(self) -> twisted.internet.base.ReactorBase:
"""
Fetch the Twisted reactor in use by this HomeServer.
"""
return self._reactor
def get_ip_from_request(self, request) -> str:
# X-Forwarded-For is handled by our custom request type.
return request.getClientIP()
def is_mine(self, domain_specific_string: DomainSpecificString) -> bool:
return domain_specific_string.domain == self.hostname
def is_mine_id(self, string: str) -> bool:
return string.split(":", 1)[1] == self.hostname
@cache_in_self
def get_clock(self) -> Clock:
return Clock(self._reactor)
def get_datastore(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def get_datastores(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def get_config(self) -> HomeServerConfig:
return self.config
@cache_in_self
def get_distributor(self) -> Distributor:
return Distributor()
@cache_in_self
def get_registration_ratelimiter(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@cache_in_self
def get_federation_client(self) -> FederationClient:
return FederationClient(self)
@cache_in_self
def get_federation_server(self) -> FederationServer:
return FederationServer(self)
@cache_in_self
def get_notifier(self) -> Notifier:
return Notifier(self)
@cache_in_self
def get_auth(self) -> Auth:
return Auth(self)
@cache_in_self
def get_http_client_context_factory(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@cache_in_self
def get_simple_http_client(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@cache_in_self
def get_proxied_http_client(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_room_creation_handler(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@cache_in_self
def get_room_shutdown_handler(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@cache_in_self
def get_sendmail(self) -> sendmail:
return sendmail
@cache_in_self
def get_state_handler(self) -> StateHandler:
return StateHandler(self)
@cache_in_self
def get_state_resolution_handler(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@cache_in_self
def get_presence_handler(self) -> PresenceHandler:
return PresenceHandler(self)
@cache_in_self
def get_typing_handler(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@cache_in_self
def get_sso_handler(self) -> SsoHandler:
return SsoHandler(self)
@cache_in_self
def get_sync_handler(self) -> SyncHandler:
return SyncHandler(self)
@cache_in_self
def get_room_list_handler(self) -> RoomListHandler:
return RoomListHandler(self)
@cache_in_self
def get_auth_handler(self) -> AuthHandler:
return AuthHandler(self)
@cache_in_self
def get_macaroon_generator(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@cache_in_self
def get_device_handler(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@cache_in_self
def get_device_message_handler(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@cache_in_self
def get_directory_handler(self) -> DirectoryHandler:
return DirectoryHandler(self)
@cache_in_self
def get_e2e_keys_handler(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@cache_in_self
def get_e2e_room_keys_handler(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@cache_in_self
def get_acme_handler(self) -> AcmeHandler:
return AcmeHandler(self)
@cache_in_self
def get_admin_handler(self) -> AdminHandler:
return AdminHandler(self)
@cache_in_self
def get_application_service_api(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@cache_in_self
def get_application_service_scheduler(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@cache_in_self
def get_application_service_handler(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@cache_in_self
def get_event_handler(self) -> EventHandler:
return EventHandler(self)
@cache_in_self
def get_event_stream_handler(self) -> EventStreamHandler:
return EventStreamHandler(self)
@cache_in_self
def get_federation_handler(self) -> FederationHandler:
return FederationHandler(self)
@cache_in_self
def get_identity_handler(self) -> IdentityHandler:
return IdentityHandler(self)
@cache_in_self
def get_initial_sync_handler(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@cache_in_self
def get_profile_handler(self):
return ProfileHandler(self)
@cache_in_self
def get_event_creation_handler(self) -> EventCreationHandler:
return EventCreationHandler(self)
@cache_in_self
def get_deactivate_account_handler(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@cache_in_self
def get_search_handler(self) -> SearchHandler:
return SearchHandler(self)
@cache_in_self
def get_set_password_handler(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@cache_in_self
def get_event_sources(self) -> EventSources:
return EventSources(self)
@cache_in_self
def get_keyring(self) -> Keyring:
return Keyring(self)
@cache_in_self
def get_event_builder_factory(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@cache_in_self
def get_filtering(self) -> Filtering:
return Filtering(self)
@cache_in_self
def get_pusherpool(self) -> PusherPool:
return PusherPool(self)
@cache_in_self
def get_http_client(self) -> MatrixFederationHttpClient:
tls_client_options_factory = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, tls_client_options_factory)
@cache_in_self
def get_media_repository_resource(self) -> MediaRepositoryResource:
# build the media repo resource. This indirects through the HomeServer
# to ensure that we only have a single instance of
return MediaRepositoryResource(self)
@cache_in_self
def get_media_repository(self) -> MediaRepository:
return MediaRepository(self)
@cache_in_self
def get_federation_transport_client(self) -> TransportLayerClient:
return TransportLayerClient(self)
@cache_in_self
def get_federation_sender(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@cache_in_self
def get_receipts_handler(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@cache_in_self
def get_read_marker_handler(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@cache_in_self
def get_tcp_replication(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@cache_in_self
def get_action_generator(self) -> ActionGenerator:
return ActionGenerator(self)
@cache_in_self
def get_user_directory_handler(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@cache_in_self
def get_groups_local_handler(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@cache_in_self
def get_groups_server_handler(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@cache_in_self
def get_groups_attestation_signing(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@cache_in_self
def get_groups_attestation_renewer(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@cache_in_self
def get_secrets(self) -> Secrets:
return Secrets()
@cache_in_self
def get_stats_handler(self) -> StatsHandler:
return StatsHandler(self)
@cache_in_self
def get_spam_checker(self):
return SpamChecker(self)
@cache_in_self
def get_third_party_event_rules(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@cache_in_self
def get_room_member_handler(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@cache_in_self
def get_federation_registry(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@cache_in_self
def get_server_notices_manager(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@cache_in_self
def get_server_notices_sender(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@cache_in_self
def get_message_handler(self) -> MessageHandler:
return MessageHandler(self)
@cache_in_self
def get_pagination_handler(self) -> PaginationHandler:
return PaginationHandler(self)
@cache_in_self
def get_room_context_handler(self) -> RoomContextHandler:
return RoomContextHandler(self)
@cache_in_self
def get_registration_handler(self) -> RegistrationHandler:
return RegistrationHandler(self)
@cache_in_self
def get_account_validity_handler(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@cache_in_self
def get_cas_handler(self) -> CasHandler:
return CasHandler(self)
@cache_in_self
def get_saml_handler(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@cache_in_self
def get_oidc_handler(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@cache_in_self
def get_event_client_serializer(self) -> EventClientSerializer:
return EventClientSerializer(self)
@cache_in_self
def get_password_policy_handler(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@cache_in_self
def get_storage(self) -> Storage:
return Storage(self, self.get_datastores())
@cache_in_self
def get_replication_streamer(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@cache_in_self
def get_replication_data_handler(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@cache_in_self
def get_replication_streams(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@cache_in_self
def get_federation_ratelimiter(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), config=self.config.rc_federation)
@cache_in_self
def get_module_api(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def remove_pusher(self, app_id: str, push_key: str, user_id: str):
return await self.get_pusherpool().remove_pusher(app_id, push_key, user_id)
def should_send_federation(self) -> bool:
"Should this server be sending federation traffic directly?"
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file provides some classes for setting up (partially-populated)
# homeservers; either as a full homeserver as a real application, or a small
# partial one for unit test mocking.
# Imports required for the default HomeServer() implementation
import abc
import functools
import logging
import os
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
T = TypeVar("T", bound=Callable[..., Any])
def cache_in_self(builder: T) -> T:
"""Wraps a function called e.g. `get_foo`, checking if `self.foo` exists and
returning if so. If not, calls the given function and sets `self.foo` to it.
Also ensures that dependency cycles throw an exception correctly, rather
than overflowing the stack.
"""
if not builder.__name__.startswith("get_"):
raise Exception(
"@cache_in_self can only be used on functions starting with `get_`"
)
# get_attr -> _attr
depname = builder.__name__[len("get") :]
building = [False]
@functools.wraps(builder)
def _get(self):
try:
return getattr(self, depname)
except AttributeError:
pass
# Prevent cyclic dependencies from deadlocking
if building[0]:
raise ValueError("Cyclic dependency while building %s" % (depname,))
building[0] = True
try:
dep = builder(self)
setattr(self, depname, dep)
finally:
building[0] = False
return dep
# We cast here as we need to tell mypy that `_get` has the same signature as
# `builder`.
return cast(T, _get)
class HomeServer(metaclass=abc.ABCMeta):
"""A basic homeserver object without lazy component builders.
This will need all of the components it requires to either be passed as
constructor arguments, or the relevant methods overriding to create them.
Typically this would only be used for unit tests.
Dependencies should be added by creating a `def get_<depname>(self)`
function, wrapping it in `@cache_in_self`.
Attributes:
config (synapse.config.homeserver.HomeserverConfig):
_listening_services (list[twisted.internet.tcp.Port]): TCP ports that
we are listening on to provide HTTP services.
"""
REQUIRED_ON_BACKGROUND_TASK_STARTUP = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
# This is overridden in derived application classes
# (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be
# instantiated during setup() for future return by get_datastore()
DATASTORE_CLASS = abc.abstractproperty()
def __init__(
self,
hostname: str,
config: HomeServerConfig,
reactor=None,
version_string="Synapse",
):
"""
Args:
hostname : The hostname for the server.
config: The full config for the homeserver.
"""
if not reactor:
from twisted.internet import reactor as _reactor
reactor = _reactor
self._reactor = reactor
self.hostname = hostname
# the key we use to sign events and requests
self.signing_key = config.key.signing_key[0]
self.config = config
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = config.worker_name or "master"
self.version_string = version_string
self.datastores = None # type: Optional[Databases]
def get_instance_id(self) -> str:
"""A unique ID for this synapse process instance.
This is used to distinguish running instances in worker-based
deployments.
"""
return self._instance_id
def get_instance_name(self) -> str:
"""A unique name for this synapse process.
Used to identify the process over replication and in config. Does not
change over restarts.
"""
return self._instance_name
def setup(self) -> None:
logger.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
logger.info("Finished setting up.")
# Register background tasks required by this server. This must be done
# somewhat manually due to the background tasks not being registered
# unless handlers are instantiated.
if self.config.run_background_tasks:
self.setup_background_tasks()
def setup_background_tasks(self) -> None:
"""
Some handlers have side effects on instantiation (like registering
background updates). This function causes them to be fetched, and
therefore instantiated, to run those side effects.
"""
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def get_reactor(self) -> twisted.internet.base.ReactorBase:
"""
Fetch the Twisted reactor in use by this HomeServer.
"""
return self._reactor
def get_ip_from_request(self, request) -> str:
# X-Forwarded-For is handled by our custom request type.
return request.getClientIP()
def is_mine(self, domain_specific_string: DomainSpecificString) -> bool:
return domain_specific_string.domain == self.hostname
def is_mine_id(self, string: str) -> bool:
return string.split(":", 1)[1] == self.hostname
@cache_in_self
def get_clock(self) -> Clock:
return Clock(self._reactor)
def get_datastore(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def get_datastores(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def get_config(self) -> HomeServerConfig:
return self.config
@cache_in_self
def get_distributor(self) -> Distributor:
return Distributor()
@cache_in_self
def get_registration_ratelimiter(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@cache_in_self
def get_federation_client(self) -> FederationClient:
return FederationClient(self)
@cache_in_self
def get_federation_server(self) -> FederationServer:
return FederationServer(self)
@cache_in_self
def get_notifier(self) -> Notifier:
return Notifier(self)
@cache_in_self
def get_auth(self) -> Auth:
return Auth(self)
@cache_in_self
def get_http_client_context_factory(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@cache_in_self
def get_simple_http_client(self) -> SimpleHttpClient:
"""
An HTTP client with no special configuration.
"""
return SimpleHttpClient(self)
@cache_in_self
def get_proxied_http_client(self) -> SimpleHttpClient:
"""
An HTTP client that uses configured HTTP(S) proxies.
"""
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_proxied_blacklisted_http_client(self) -> SimpleHttpClient:
"""
An HTTP client that uses configured HTTP(S) proxies and blacklists IPs
based on the IP range blacklist.
"""
return SimpleHttpClient(
self,
ip_blacklist=self.config.ip_range_blacklist,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_federation_http_client(self) -> MatrixFederationHttpClient:
"""
An HTTP client for federation.
"""
tls_client_options_factory = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, tls_client_options_factory)
@cache_in_self
def get_room_creation_handler(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@cache_in_self
def get_room_shutdown_handler(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@cache_in_self
def get_sendmail(self) -> sendmail:
return sendmail
@cache_in_self
def get_state_handler(self) -> StateHandler:
return StateHandler(self)
@cache_in_self
def get_state_resolution_handler(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@cache_in_self
def get_presence_handler(self) -> PresenceHandler:
return PresenceHandler(self)
@cache_in_self
def get_typing_handler(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@cache_in_self
def get_sso_handler(self) -> SsoHandler:
return SsoHandler(self)
@cache_in_self
def get_sync_handler(self) -> SyncHandler:
return SyncHandler(self)
@cache_in_self
def get_room_list_handler(self) -> RoomListHandler:
return RoomListHandler(self)
@cache_in_self
def get_auth_handler(self) -> AuthHandler:
return AuthHandler(self)
@cache_in_self
def get_macaroon_generator(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@cache_in_self
def get_device_handler(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@cache_in_self
def get_device_message_handler(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@cache_in_self
def get_directory_handler(self) -> DirectoryHandler:
return DirectoryHandler(self)
@cache_in_self
def get_e2e_keys_handler(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@cache_in_self
def get_e2e_room_keys_handler(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@cache_in_self
def get_acme_handler(self) -> AcmeHandler:
return AcmeHandler(self)
@cache_in_self
def get_admin_handler(self) -> AdminHandler:
return AdminHandler(self)
@cache_in_self
def get_application_service_api(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@cache_in_self
def get_application_service_scheduler(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@cache_in_self
def get_application_service_handler(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@cache_in_self
def get_event_handler(self) -> EventHandler:
return EventHandler(self)
@cache_in_self
def get_event_stream_handler(self) -> EventStreamHandler:
return EventStreamHandler(self)
@cache_in_self
def get_federation_handler(self) -> FederationHandler:
return FederationHandler(self)
@cache_in_self
def get_identity_handler(self) -> IdentityHandler:
return IdentityHandler(self)
@cache_in_self
def get_initial_sync_handler(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@cache_in_self
def get_profile_handler(self):
return ProfileHandler(self)
@cache_in_self
def get_event_creation_handler(self) -> EventCreationHandler:
return EventCreationHandler(self)
@cache_in_self
def get_deactivate_account_handler(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@cache_in_self
def get_search_handler(self) -> SearchHandler:
return SearchHandler(self)
@cache_in_self
def get_set_password_handler(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@cache_in_self
def get_event_sources(self) -> EventSources:
return EventSources(self)
@cache_in_self
def get_keyring(self) -> Keyring:
return Keyring(self)
@cache_in_self
def get_event_builder_factory(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@cache_in_self
def get_filtering(self) -> Filtering:
return Filtering(self)
@cache_in_self
def get_pusherpool(self) -> PusherPool:
return PusherPool(self)
@cache_in_self
def get_media_repository_resource(self) -> MediaRepositoryResource:
# build the media repo resource. This indirects through the HomeServer
# to ensure that we only have a single instance of
return MediaRepositoryResource(self)
@cache_in_self
def get_media_repository(self) -> MediaRepository:
return MediaRepository(self)
@cache_in_self
def get_federation_transport_client(self) -> TransportLayerClient:
return TransportLayerClient(self)
@cache_in_self
def get_federation_sender(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@cache_in_self
def get_receipts_handler(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@cache_in_self
def get_read_marker_handler(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@cache_in_self
def get_tcp_replication(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@cache_in_self
def get_action_generator(self) -> ActionGenerator:
return ActionGenerator(self)
@cache_in_self
def get_user_directory_handler(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@cache_in_self
def get_groups_local_handler(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@cache_in_self
def get_groups_server_handler(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@cache_in_self
def get_groups_attestation_signing(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@cache_in_self
def get_groups_attestation_renewer(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@cache_in_self
def get_secrets(self) -> Secrets:
return Secrets()
@cache_in_self
def get_stats_handler(self) -> StatsHandler:
return StatsHandler(self)
@cache_in_self
def get_spam_checker(self):
return SpamChecker(self)
@cache_in_self
def get_third_party_event_rules(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@cache_in_self
def get_room_member_handler(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@cache_in_self
def get_federation_registry(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@cache_in_self
def get_server_notices_manager(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@cache_in_self
def get_server_notices_sender(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@cache_in_self
def get_message_handler(self) -> MessageHandler:
return MessageHandler(self)
@cache_in_self
def get_pagination_handler(self) -> PaginationHandler:
return PaginationHandler(self)
@cache_in_self
def get_room_context_handler(self) -> RoomContextHandler:
return RoomContextHandler(self)
@cache_in_self
def get_registration_handler(self) -> RegistrationHandler:
return RegistrationHandler(self)
@cache_in_self
def get_account_validity_handler(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@cache_in_self
def get_cas_handler(self) -> CasHandler:
return CasHandler(self)
@cache_in_self
def get_saml_handler(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@cache_in_self
def get_oidc_handler(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@cache_in_self
def get_event_client_serializer(self) -> EventClientSerializer:
return EventClientSerializer(self)
@cache_in_self
def get_password_policy_handler(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@cache_in_self
def get_storage(self) -> Storage:
return Storage(self, self.get_datastores())
@cache_in_self
def get_replication_streamer(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@cache_in_self
def get_replication_data_handler(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@cache_in_self
def get_replication_streams(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@cache_in_self
def get_federation_ratelimiter(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), config=self.config.rc_federation)
@cache_in_self
def get_module_api(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def remove_pusher(self, app_id: str, push_key: str, user_id: str):
return await self.get_pusherpool().remove_pusher(app_id, push_key, user_id)
def should_send_federation(self) -> bool:
"Should this server be sending federation traffic directly?"
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| open_redirect | {
"code": [
" @cache_in_self",
" def get_http_client(self) -> MatrixFederationHttpClient:",
" tls_client_options_factory = context_factory.FederationPolicyForHTTPS(",
" self.config",
" )",
" return MatrixFederationHttpClient(self, tls_client_options_factory)"
],
"line_no": [
517,
518,
519,
520,
521,
522
]
} | {
"code": [
" \"\"\"",
" \"\"\"",
" \"\"\"",
" An HTTP client that uses configured HTTP(S) proxies.",
" return SimpleHttpClient(",
" self,",
" http_proxy=os.getenvb(b\"http_proxy\"),",
" )",
" @cache_in_self",
" \"\"\"",
" An HTTP client that uses configured HTTP(S) proxies and blacklists IPs",
" based on the IP range blacklist.",
" ip_blacklist=self.config.ip_range_blacklist,",
" def get_federation_http_client(self) -> MatrixFederationHttpClient:",
" \"\"\"",
" An HTTP client for federation.",
" tls_client_options_factory = context_factory.FederationPolicyForHTTPS(",
" self.config",
" )",
" return MatrixFederationHttpClient(self, tls_client_options_factory)"
],
"line_no": [
353,
355,
360,
361,
363,
364,
365,
367,
369,
371,
372,
373,
377,
383,
384,
385,
387,
388,
389,
390
]
} |
import abc
import functools
import logging
import os
from typing import .TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import .HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import .ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import .TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
VAR_0 = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
VAR_1 = TypeVar("T", bound=Callable[..., Any])
def FUNC_0(VAR_2: VAR_1) -> T:
if not VAR_2.__name__.startswith("get_"):
raise Exception(
"@FUNC_0 can only be used on functions starting with `get_`"
)
VAR_3 = VAR_2.__name__[len("get") :]
VAR_4 = [False]
@functools.wraps(VAR_2)
def FUNC_1(self):
try:
return getattr(self, VAR_3)
except AttributeError:
pass
if VAR_4[0]:
raise ValueError("Cyclic dependency while VAR_4 %s" % (VAR_3,))
VAR_4[0] = True
try:
VAR_18 = VAR_2(self)
setattr(self, VAR_3, VAR_18)
finally:
VAR_4[0] = False
return VAR_18
return cast(VAR_1, FUNC_1)
class CLASS_0(metaclass=abc.ABCMeta):
VAR_5 = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
VAR_6 = abc.abstractproperty()
def __init__(
self,
VAR_7: str,
VAR_8: HomeServerConfig,
VAR_9=None,
VAR_10="Synapse",
):
if not VAR_9:
from twisted.internet import .reactor as _reactor
VAR_9 = _reactor
self._reactor = VAR_9
self.hostname = VAR_7
self.signing_key = VAR_8.key.signing_key[0]
self.config = VAR_8
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = VAR_8.worker_name or "master"
self.version_string = VAR_10
self.datastores = None # type: Optional[Databases]
def FUNC_2(self) -> str:
return self._instance_id
def FUNC_3(self) -> str:
return self._instance_name
def FUNC_4(self) -> None:
VAR_0.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
VAR_0.info("Finished setting up.")
if self.config.run_background_tasks:
self.setup_background_tasks()
def FUNC_5(self) -> None:
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def FUNC_6(self) -> twisted.internet.base.ReactorBase:
return self._reactor
def FUNC_7(self, VAR_11) -> str:
return VAR_11.getClientIP()
def FUNC_8(self, VAR_12: DomainSpecificString) -> bool:
return VAR_12.domain == self.hostname
def FUNC_9(self, VAR_13: str) -> bool:
return VAR_13.split(":", 1)[1] == self.hostname
@FUNC_0
def FUNC_10(self) -> Clock:
return Clock(self._reactor)
def FUNC_11(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def FUNC_12(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def FUNC_13(self) -> HomeServerConfig:
return self.config
@FUNC_0
def FUNC_14(self) -> Distributor:
return Distributor()
@FUNC_0
def FUNC_15(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@FUNC_0
def FUNC_16(self) -> FederationClient:
return FederationClient(self)
@FUNC_0
def FUNC_17(self) -> FederationServer:
return FederationServer(self)
@FUNC_0
def FUNC_18(self) -> Notifier:
return Notifier(self)
@FUNC_0
def FUNC_19(self) -> Auth:
return Auth(self)
@FUNC_0
def FUNC_20(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@FUNC_0
def FUNC_21(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@FUNC_0
def FUNC_22(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_23(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@FUNC_0
def FUNC_24(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@FUNC_0
def FUNC_25(self) -> sendmail:
return sendmail
@FUNC_0
def FUNC_26(self) -> StateHandler:
return StateHandler(self)
@FUNC_0
def FUNC_27(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@FUNC_0
def FUNC_28(self) -> PresenceHandler:
return PresenceHandler(self)
@FUNC_0
def FUNC_29(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@FUNC_0
def FUNC_30(self) -> SsoHandler:
return SsoHandler(self)
@FUNC_0
def FUNC_31(self) -> SyncHandler:
return SyncHandler(self)
@FUNC_0
def FUNC_32(self) -> RoomListHandler:
return RoomListHandler(self)
@FUNC_0
def FUNC_33(self) -> AuthHandler:
return AuthHandler(self)
@FUNC_0
def FUNC_34(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@FUNC_0
def FUNC_35(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@FUNC_0
def FUNC_36(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@FUNC_0
def FUNC_37(self) -> DirectoryHandler:
return DirectoryHandler(self)
@FUNC_0
def FUNC_38(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@FUNC_0
def FUNC_39(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@FUNC_0
def FUNC_40(self) -> AcmeHandler:
return AcmeHandler(self)
@FUNC_0
def FUNC_41(self) -> AdminHandler:
return AdminHandler(self)
@FUNC_0
def FUNC_42(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@FUNC_0
def FUNC_43(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@FUNC_0
def FUNC_44(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@FUNC_0
def FUNC_45(self) -> EventHandler:
return EventHandler(self)
@FUNC_0
def FUNC_46(self) -> EventStreamHandler:
return EventStreamHandler(self)
@FUNC_0
def FUNC_47(self) -> FederationHandler:
return FederationHandler(self)
@FUNC_0
def FUNC_48(self) -> IdentityHandler:
return IdentityHandler(self)
@FUNC_0
def FUNC_49(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@FUNC_0
def FUNC_50(self):
return ProfileHandler(self)
@FUNC_0
def FUNC_51(self) -> EventCreationHandler:
return EventCreationHandler(self)
@FUNC_0
def FUNC_52(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@FUNC_0
def FUNC_53(self) -> SearchHandler:
return SearchHandler(self)
@FUNC_0
def FUNC_54(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@FUNC_0
def FUNC_55(self) -> EventSources:
return EventSources(self)
@FUNC_0
def FUNC_56(self) -> Keyring:
return Keyring(self)
@FUNC_0
def FUNC_57(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@FUNC_0
def FUNC_58(self) -> Filtering:
return Filtering(self)
@FUNC_0
def FUNC_59(self) -> PusherPool:
return PusherPool(self)
@FUNC_0
def FUNC_60(self) -> MatrixFederationHttpClient:
VAR_17 = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, VAR_17)
@FUNC_0
def FUNC_61(self) -> MediaRepositoryResource:
return MediaRepositoryResource(self)
@FUNC_0
def FUNC_62(self) -> MediaRepository:
return MediaRepository(self)
@FUNC_0
def FUNC_63(self) -> TransportLayerClient:
return TransportLayerClient(self)
@FUNC_0
def FUNC_64(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@FUNC_0
def FUNC_65(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@FUNC_0
def FUNC_66(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@FUNC_0
def FUNC_67(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@FUNC_0
def FUNC_68(self) -> ActionGenerator:
return ActionGenerator(self)
@FUNC_0
def FUNC_69(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@FUNC_0
def FUNC_70(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@FUNC_0
def FUNC_71(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@FUNC_0
def FUNC_72(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@FUNC_0
def FUNC_73(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@FUNC_0
def FUNC_74(self) -> Secrets:
return Secrets()
@FUNC_0
def FUNC_75(self) -> StatsHandler:
return StatsHandler(self)
@FUNC_0
def FUNC_76(self):
return SpamChecker(self)
@FUNC_0
def FUNC_77(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@FUNC_0
def FUNC_78(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@FUNC_0
def FUNC_79(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@FUNC_0
def FUNC_80(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@FUNC_0
def FUNC_81(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@FUNC_0
def FUNC_82(self) -> MessageHandler:
return MessageHandler(self)
@FUNC_0
def FUNC_83(self) -> PaginationHandler:
return PaginationHandler(self)
@FUNC_0
def FUNC_84(self) -> RoomContextHandler:
return RoomContextHandler(self)
@FUNC_0
def FUNC_85(self) -> RegistrationHandler:
return RegistrationHandler(self)
@FUNC_0
def FUNC_86(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@FUNC_0
def FUNC_87(self) -> CasHandler:
return CasHandler(self)
@FUNC_0
def FUNC_88(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@FUNC_0
def FUNC_89(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@FUNC_0
def FUNC_90(self) -> EventClientSerializer:
return EventClientSerializer(self)
@FUNC_0
def FUNC_91(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@FUNC_0
def FUNC_92(self) -> Storage:
return Storage(self, self.get_datastores())
@FUNC_0
def FUNC_93(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@FUNC_0
def FUNC_94(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@FUNC_0
def FUNC_95(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@FUNC_0
def FUNC_96(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), VAR_8=self.config.rc_federation)
@FUNC_0
def FUNC_97(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def FUNC_98(self, VAR_14: str, VAR_15: str, VAR_16: str):
return await self.get_pusherpool().remove_pusher(VAR_14, VAR_15, VAR_16)
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
|
import abc
import functools
import logging
import os
from typing import .TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import .HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import .ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import .TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
VAR_0 = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
VAR_1 = TypeVar("T", bound=Callable[..., Any])
def FUNC_0(VAR_2: VAR_1) -> T:
if not VAR_2.__name__.startswith("get_"):
raise Exception(
"@FUNC_0 can only be used on functions starting with `get_`"
)
VAR_3 = VAR_2.__name__[len("get") :]
VAR_4 = [False]
@functools.wraps(VAR_2)
def FUNC_1(self):
try:
return getattr(self, VAR_3)
except AttributeError:
pass
if VAR_4[0]:
raise ValueError("Cyclic dependency while VAR_4 %s" % (VAR_3,))
VAR_4[0] = True
try:
VAR_18 = VAR_2(self)
setattr(self, VAR_3, VAR_18)
finally:
VAR_4[0] = False
return VAR_18
return cast(VAR_1, FUNC_1)
class CLASS_0(metaclass=abc.ABCMeta):
VAR_5 = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
VAR_6 = abc.abstractproperty()
def __init__(
self,
VAR_7: str,
VAR_8: HomeServerConfig,
VAR_9=None,
VAR_10="Synapse",
):
if not VAR_9:
from twisted.internet import .reactor as _reactor
VAR_9 = _reactor
self._reactor = VAR_9
self.hostname = VAR_7
self.signing_key = VAR_8.key.signing_key[0]
self.config = VAR_8
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = VAR_8.worker_name or "master"
self.version_string = VAR_10
self.datastores = None # type: Optional[Databases]
def FUNC_2(self) -> str:
return self._instance_id
def FUNC_3(self) -> str:
return self._instance_name
def FUNC_4(self) -> None:
VAR_0.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
VAR_0.info("Finished setting up.")
if self.config.run_background_tasks:
self.setup_background_tasks()
def FUNC_5(self) -> None:
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def FUNC_6(self) -> twisted.internet.base.ReactorBase:
return self._reactor
def FUNC_7(self, VAR_11) -> str:
return VAR_11.getClientIP()
def FUNC_8(self, VAR_12: DomainSpecificString) -> bool:
return VAR_12.domain == self.hostname
def FUNC_9(self, VAR_13: str) -> bool:
return VAR_13.split(":", 1)[1] == self.hostname
@FUNC_0
def FUNC_10(self) -> Clock:
return Clock(self._reactor)
def FUNC_11(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def FUNC_12(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def FUNC_13(self) -> HomeServerConfig:
return self.config
@FUNC_0
def FUNC_14(self) -> Distributor:
return Distributor()
@FUNC_0
def FUNC_15(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@FUNC_0
def FUNC_16(self) -> FederationClient:
return FederationClient(self)
@FUNC_0
def FUNC_17(self) -> FederationServer:
return FederationServer(self)
@FUNC_0
def FUNC_18(self) -> Notifier:
return Notifier(self)
@FUNC_0
def FUNC_19(self) -> Auth:
return Auth(self)
@FUNC_0
def FUNC_20(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@FUNC_0
def FUNC_21(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@FUNC_0
def FUNC_22(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_23(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
ip_blacklist=self.config.ip_range_blacklist,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_24(self) -> MatrixFederationHttpClient:
VAR_17 = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, VAR_17)
@FUNC_0
def FUNC_25(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@FUNC_0
def FUNC_26(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@FUNC_0
def FUNC_27(self) -> sendmail:
return sendmail
@FUNC_0
def FUNC_28(self) -> StateHandler:
return StateHandler(self)
@FUNC_0
def FUNC_29(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@FUNC_0
def FUNC_30(self) -> PresenceHandler:
return PresenceHandler(self)
@FUNC_0
def FUNC_31(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@FUNC_0
def FUNC_32(self) -> SsoHandler:
return SsoHandler(self)
@FUNC_0
def FUNC_33(self) -> SyncHandler:
return SyncHandler(self)
@FUNC_0
def FUNC_34(self) -> RoomListHandler:
return RoomListHandler(self)
@FUNC_0
def FUNC_35(self) -> AuthHandler:
return AuthHandler(self)
@FUNC_0
def FUNC_36(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@FUNC_0
def FUNC_37(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@FUNC_0
def FUNC_38(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@FUNC_0
def FUNC_39(self) -> DirectoryHandler:
return DirectoryHandler(self)
@FUNC_0
def FUNC_40(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@FUNC_0
def FUNC_41(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@FUNC_0
def FUNC_42(self) -> AcmeHandler:
return AcmeHandler(self)
@FUNC_0
def FUNC_43(self) -> AdminHandler:
return AdminHandler(self)
@FUNC_0
def FUNC_44(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@FUNC_0
def FUNC_45(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@FUNC_0
def FUNC_46(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@FUNC_0
def FUNC_47(self) -> EventHandler:
return EventHandler(self)
@FUNC_0
def FUNC_48(self) -> EventStreamHandler:
return EventStreamHandler(self)
@FUNC_0
def FUNC_49(self) -> FederationHandler:
return FederationHandler(self)
@FUNC_0
def FUNC_50(self) -> IdentityHandler:
return IdentityHandler(self)
@FUNC_0
def FUNC_51(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@FUNC_0
def FUNC_52(self):
return ProfileHandler(self)
@FUNC_0
def FUNC_53(self) -> EventCreationHandler:
return EventCreationHandler(self)
@FUNC_0
def FUNC_54(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@FUNC_0
def FUNC_55(self) -> SearchHandler:
return SearchHandler(self)
@FUNC_0
def FUNC_56(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@FUNC_0
def FUNC_57(self) -> EventSources:
return EventSources(self)
@FUNC_0
def FUNC_58(self) -> Keyring:
return Keyring(self)
@FUNC_0
def FUNC_59(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@FUNC_0
def FUNC_60(self) -> Filtering:
return Filtering(self)
@FUNC_0
def FUNC_61(self) -> PusherPool:
return PusherPool(self)
@FUNC_0
def FUNC_62(self) -> MediaRepositoryResource:
return MediaRepositoryResource(self)
@FUNC_0
def FUNC_63(self) -> MediaRepository:
return MediaRepository(self)
@FUNC_0
def FUNC_64(self) -> TransportLayerClient:
return TransportLayerClient(self)
@FUNC_0
def FUNC_65(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@FUNC_0
def FUNC_66(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@FUNC_0
def FUNC_67(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@FUNC_0
def FUNC_68(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@FUNC_0
def FUNC_69(self) -> ActionGenerator:
return ActionGenerator(self)
@FUNC_0
def FUNC_70(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@FUNC_0
def FUNC_71(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@FUNC_0
def FUNC_72(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@FUNC_0
def FUNC_73(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@FUNC_0
def FUNC_74(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@FUNC_0
def FUNC_75(self) -> Secrets:
return Secrets()
@FUNC_0
def FUNC_76(self) -> StatsHandler:
return StatsHandler(self)
@FUNC_0
def FUNC_77(self):
return SpamChecker(self)
@FUNC_0
def FUNC_78(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@FUNC_0
def FUNC_79(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@FUNC_0
def FUNC_80(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@FUNC_0
def FUNC_81(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@FUNC_0
def FUNC_82(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@FUNC_0
def FUNC_83(self) -> MessageHandler:
return MessageHandler(self)
@FUNC_0
def FUNC_84(self) -> PaginationHandler:
return PaginationHandler(self)
@FUNC_0
def FUNC_85(self) -> RoomContextHandler:
return RoomContextHandler(self)
@FUNC_0
def FUNC_86(self) -> RegistrationHandler:
return RegistrationHandler(self)
@FUNC_0
def FUNC_87(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@FUNC_0
def FUNC_88(self) -> CasHandler:
return CasHandler(self)
@FUNC_0
def FUNC_89(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@FUNC_0
def FUNC_90(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@FUNC_0
def FUNC_91(self) -> EventClientSerializer:
return EventClientSerializer(self)
@FUNC_0
def FUNC_92(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@FUNC_0
def FUNC_93(self) -> Storage:
return Storage(self, self.get_datastores())
@FUNC_0
def FUNC_94(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@FUNC_0
def FUNC_95(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@FUNC_0
def FUNC_96(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@FUNC_0
def FUNC_97(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), VAR_8=self.config.rc_federation)
@FUNC_0
def FUNC_98(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def FUNC_99(self, VAR_14: str, VAR_15: str, VAR_16: str):
return await self.get_pusherpool().remove_pusher(VAR_14, VAR_15, VAR_16)
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
29,
34,
126,
128,
132,
133,
135,
136,
140,
144,
149,
150,
152,
154,
161,
162,
165,
172,
174,
175,
176,
178,
179,
182,
186,
189,
195,
205,
206,
207,
208,
210,
225,
227,
230,
235,
238,
240,
242,
245,
250,
253,
258,
264,
265,
266,
267,
270,
279,
285,
287,
289,
292,
295,
299,
303,
305,
309,
311,
314,
318,
326,
330,
334,
338,
342,
350,
354,
362,
366,
370,
374,
378,
382,
386,
393,
397,
401,
405,
409,
413,
420,
424,
428,
432,
436,
440,
444,
448,
452,
456,
460,
464,
468,
472,
476,
480,
484,
488,
492,
496,
500,
504,
508,
512,
516,
523,
526,
527,
529,
533,
537,
546,
550,
554,
558,
562,
566,
573,
580,
584,
588,
592,
596,
600,
604,
610,
614,
620,
626,
630,
634,
638,
642,
646,
650,
654,
656,
660,
662,
666,
670,
674,
678,
682,
686,
690,
694,
697,
704,
138,
139,
140,
141,
142,
143,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
218,
219,
220,
221,
222,
244,
245,
246,
247,
248,
252,
253,
254,
255,
256,
272,
273,
274,
275,
276,
281,
282,
283,
698,
699
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
29,
34,
126,
128,
132,
133,
135,
136,
140,
144,
149,
150,
152,
154,
161,
162,
165,
172,
174,
175,
176,
178,
179,
182,
186,
189,
195,
205,
206,
207,
208,
210,
225,
227,
230,
235,
238,
240,
242,
245,
250,
253,
258,
264,
265,
266,
267,
270,
279,
285,
287,
289,
292,
295,
299,
303,
305,
309,
311,
314,
318,
326,
330,
334,
338,
342,
350,
357,
368,
381,
391,
395,
399,
403,
407,
411,
415,
422,
426,
430,
434,
438,
442,
449,
453,
457,
461,
465,
469,
473,
477,
481,
485,
489,
493,
497,
501,
505,
509,
513,
517,
521,
525,
529,
533,
537,
541,
545,
548,
549,
551,
555,
559,
568,
572,
576,
580,
584,
588,
595,
602,
606,
610,
614,
618,
622,
626,
632,
636,
642,
648,
652,
656,
660,
664,
668,
672,
676,
678,
682,
684,
688,
692,
696,
700,
704,
708,
712,
716,
719,
726,
138,
139,
140,
141,
142,
143,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
218,
219,
220,
221,
222,
244,
245,
246,
247,
248,
252,
253,
254,
255,
256,
272,
273,
274,
275,
276,
281,
282,
283,
353,
354,
355,
360,
361,
362,
371,
372,
373,
374,
384,
385,
386,
720,
721
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import paginate, yt_paginate
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.category.models import Category
from spirit.comment.forms import CommentForm
from spirit.comment.utils import comment_posted
from spirit.comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def publish(request, category_id=None):
if category_id:
get_object_or_404(
Category.objects.visible(),
pk=category_id)
user = request.user
form = TopicForm(
user=user,
data=post_data(request),
initial={'category': category_id})
cform = CommentForm(
user=user,
data=post_data(request))
if (is_post(request) and
all([form.is_valid(), cform.is_valid()]) and
not request.is_limited()):
if not user.st.update_post_hash(form.get_topic_hash()):
return redirect(
request.POST.get('next', None) or
form.get_category().get_absolute_url())
# wrap in transaction.atomic?
topic = form.save()
cform.topic = topic
comment = cform.save()
comment_posted(comment=comment, mentions=cform.mentions)
return redirect(topic.get_absolute_url())
return render(
request=request,
template_name='spirit/topic/publish.html',
context={'form': form, 'cform': cform})
@login_required
def update(request, pk):
topic = Topic.objects.for_update_or_404(pk, request.user)
category_id = topic.category_id
form = TopicForm(
user=request.user,
data=post_data(request),
instance=topic)
if is_post(request) and form.is_valid():
topic = form.save()
if topic.category_id != category_id:
Comment.create_moderation_action(
user=request.user, topic=topic, action=Comment.MOVED)
return redirect(request.POST.get('next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/update.html',
context={'form': form})
def detail(request, pk, slug):
topic = Topic.objects.get_public_or_404(pk, request.user)
if topic.slug != slug:
return HttpResponsePermanentRedirect(topic.get_absolute_url())
utils.topic_viewed(request=request, topic=topic)
comments = (
Comment.objects
.for_topic(topic=topic)
.with_likes(user=request.user)
.with_polls(user=request.user)
.order_by('date'))
comments = paginate(
comments,
per_page=config.comments_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/detail.html',
context={
'topic': topic,
'comments': comments})
def index_active(request):
categories = (
Category.objects
.visible()
.parents()
.ordered())
topics = (
Topic.objects
.visible()
.global_()
.with_bookmarks(user=request.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
topics = yt_paginate(
topics,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/active.html',
context={
'categories': categories,
'topics': topics})
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import paginate, yt_paginate
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.category.models import Category
from spirit.comment.forms import CommentForm
from spirit.comment.utils import comment_posted
from spirit.comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def publish(request, category_id=None):
if category_id:
get_object_or_404(
Category.objects.visible(),
pk=category_id)
user = request.user
form = TopicForm(
user=user,
data=post_data(request),
initial={'category': category_id})
cform = CommentForm(
user=user,
data=post_data(request))
if (is_post(request) and
all([form.is_valid(), cform.is_valid()]) and
not request.is_limited()):
if not user.st.update_post_hash(form.get_topic_hash()):
default_url = lambda: form.get_category().get_absolute_url()
return safe_redirect(
request, 'next', default_url, method='POST')
# wrap in transaction.atomic?
topic = form.save()
cform.topic = topic
comment = cform.save()
comment_posted(comment=comment, mentions=cform.mentions)
return redirect(topic.get_absolute_url())
return render(
request=request,
template_name='spirit/topic/publish.html',
context={'form': form, 'cform': cform})
@login_required
def update(request, pk):
topic = Topic.objects.for_update_or_404(pk, request.user)
category_id = topic.category_id
form = TopicForm(
user=request.user,
data=post_data(request),
instance=topic)
if is_post(request) and form.is_valid():
topic = form.save()
if topic.category_id != category_id:
Comment.create_moderation_action(
user=request.user, topic=topic, action=Comment.MOVED)
return safe_redirect(request,'next', topic.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/topic/update.html',
context={'form': form})
def detail(request, pk, slug):
topic = Topic.objects.get_public_or_404(pk, request.user)
if topic.slug != slug:
return HttpResponsePermanentRedirect(topic.get_absolute_url())
utils.topic_viewed(request=request, topic=topic)
comments = (
Comment.objects
.for_topic(topic=topic)
.with_likes(user=request.user)
.with_polls(user=request.user)
.order_by('date'))
comments = paginate(
comments,
per_page=config.comments_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/detail.html',
context={
'topic': topic,
'comments': comments})
def index_active(request):
categories = (
Category.objects
.visible()
.parents()
.ordered())
topics = (
Topic.objects
.visible()
.global_()
.with_bookmarks(user=request.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
topics = yt_paginate(
topics,
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1))
return render(
request=request,
template_name='spirit/topic/active.html',
context={
'categories': categories,
'topics': topics})
| open_redirect | {
"code": [
" return redirect(",
" request.POST.get('next', None) or",
" form.get_category().get_absolute_url())",
" return redirect(request.POST.get('next', topic.get_absolute_url()))"
],
"line_no": [
41,
42,
43,
69
]
} | {
"code": [
"from spirit.core.utils.http import safe_redirect",
" default_url = lambda: form.get_category().get_absolute_url()",
" return safe_redirect(",
" return safe_redirect(request,'next', topic.get_absolute_url(), method='POST')"
],
"line_no": [
9,
42,
43,
70
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import paginate, yt_paginate
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.category.models import Category
from spirit.comment.forms import CommentForm
from spirit.comment.utils import .comment_posted
from spirit.comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def FUNC_0(VAR_0, VAR_1=None):
if VAR_1:
get_object_or_404(
Category.objects.visible(),
VAR_2=VAR_1)
VAR_4 = VAR_0.user
VAR_5 = TopicForm(
VAR_4=user,
data=post_data(VAR_0),
initial={'category': VAR_1})
VAR_6 = CommentForm(
VAR_4=user,
data=post_data(VAR_0))
if (is_post(VAR_0) and
all([VAR_5.is_valid(), VAR_6.is_valid()]) and
not VAR_0.is_limited()):
if not VAR_4.st.update_post_hash(VAR_5.get_topic_hash()):
return redirect(
VAR_0.POST.get('next', None) or
VAR_5.get_category().get_absolute_url())
VAR_7 = VAR_5.save()
VAR_6.topic = VAR_7
VAR_11 = VAR_6.save()
comment_posted(VAR_11=comment, mentions=VAR_6.mentions)
return redirect(VAR_7.get_absolute_url())
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_0.html',
context={'form': VAR_5, 'cform': VAR_6})
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_7 = Topic.objects.for_update_or_404(VAR_2, VAR_0.user)
VAR_1 = VAR_7.category_id
VAR_5 = TopicForm(
VAR_4=VAR_0.user,
data=post_data(VAR_0),
instance=VAR_7)
if is_post(VAR_0) and VAR_5.is_valid():
VAR_7 = VAR_5.save()
if VAR_7.category_id != VAR_1:
Comment.create_moderation_action(
VAR_4=VAR_0.user, VAR_7=topic, action=Comment.MOVED)
return redirect(VAR_0.POST.get('next', VAR_7.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_1.html',
context={'form': VAR_5})
def FUNC_2(VAR_0, VAR_2, VAR_3):
VAR_7 = Topic.objects.get_public_or_404(VAR_2, VAR_0.user)
if VAR_7.slug != VAR_3:
return HttpResponsePermanentRedirect(VAR_7.get_absolute_url())
utils.topic_viewed(VAR_0=request, VAR_7=topic)
VAR_8 = (
Comment.objects
.for_topic(VAR_7=VAR_7)
.with_likes(VAR_4=VAR_0.user)
.with_polls(VAR_4=VAR_0.user)
.order_by('date'))
VAR_8 = paginate(
VAR_8,
per_page=config.comments_per_page,
page_number=VAR_0.GET.get('page', 1))
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_2.html',
context={
'topic': VAR_7,
'comments': VAR_8})
def FUNC_3(VAR_0):
VAR_9 = (
Category.objects
.visible()
.parents()
.ordered())
VAR_10 = (
Topic.objects
.visible()
.global_()
.with_bookmarks(VAR_4=VAR_0.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
VAR_10 = yt_paginate(
VAR_10,
per_page=config.topics_per_page,
page_number=VAR_0.GET.get('page', 1))
return render(
VAR_0=request,
template_name='spirit/VAR_7/active.html',
context={
'categories': VAR_9,
'topics': VAR_10})
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponsePermanentRedirect
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import paginate, yt_paginate
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.category.models import Category
from spirit.comment.forms import CommentForm
from spirit.comment.utils import .comment_posted
from spirit.comment.models import Comment
from .models import Topic
from .forms import TopicForm
from . import utils
@login_required
@ratelimit(rate='1/10s')
def FUNC_0(VAR_0, VAR_1=None):
if VAR_1:
get_object_or_404(
Category.objects.visible(),
VAR_2=VAR_1)
VAR_4 = VAR_0.user
VAR_5 = TopicForm(
VAR_4=user,
data=post_data(VAR_0),
initial={'category': VAR_1})
VAR_6 = CommentForm(
VAR_4=user,
data=post_data(VAR_0))
if (is_post(VAR_0) and
all([VAR_5.is_valid(), VAR_6.is_valid()]) and
not VAR_0.is_limited()):
if not VAR_4.st.update_post_hash(VAR_5.get_topic_hash()):
VAR_12 = lambda: VAR_5.get_category().get_absolute_url()
return safe_redirect(
VAR_0, 'next', VAR_12, method='POST')
VAR_7 = VAR_5.save()
VAR_6.topic = VAR_7
VAR_11 = VAR_6.save()
comment_posted(VAR_11=comment, mentions=VAR_6.mentions)
return redirect(VAR_7.get_absolute_url())
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_0.html',
context={'form': VAR_5, 'cform': VAR_6})
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_7 = Topic.objects.for_update_or_404(VAR_2, VAR_0.user)
VAR_1 = VAR_7.category_id
VAR_5 = TopicForm(
VAR_4=VAR_0.user,
data=post_data(VAR_0),
instance=VAR_7)
if is_post(VAR_0) and VAR_5.is_valid():
VAR_7 = VAR_5.save()
if VAR_7.category_id != VAR_1:
Comment.create_moderation_action(
VAR_4=VAR_0.user, VAR_7=topic, action=Comment.MOVED)
return safe_redirect(VAR_0,'next', VAR_7.get_absolute_url(), method='POST')
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_1.html',
context={'form': VAR_5})
def FUNC_2(VAR_0, VAR_2, VAR_3):
VAR_7 = Topic.objects.get_public_or_404(VAR_2, VAR_0.user)
if VAR_7.slug != VAR_3:
return HttpResponsePermanentRedirect(VAR_7.get_absolute_url())
utils.topic_viewed(VAR_0=request, VAR_7=topic)
VAR_8 = (
Comment.objects
.for_topic(VAR_7=VAR_7)
.with_likes(VAR_4=VAR_0.user)
.with_polls(VAR_4=VAR_0.user)
.order_by('date'))
VAR_8 = paginate(
VAR_8,
per_page=config.comments_per_page,
page_number=VAR_0.GET.get('page', 1))
return render(
VAR_0=request,
template_name='spirit/VAR_7/FUNC_2.html',
context={
'topic': VAR_7,
'comments': VAR_8})
def FUNC_3(VAR_0):
VAR_9 = (
Category.objects
.visible()
.parents()
.ordered())
VAR_10 = (
Topic.objects
.visible()
.global_()
.with_bookmarks(VAR_4=VAR_0.user)
.order_by('-is_globally_pinned', '-last_active')
.select_related('category'))
VAR_10 = yt_paginate(
VAR_10,
per_page=config.topics_per_page,
page_number=VAR_0.GET.get('page', 1))
return render(
VAR_0=request,
template_name='spirit/VAR_7/active.html',
context={
'categories': VAR_9,
'topics': VAR_10})
| [
1,
2,
6,
8,
19,
20,
28,
44,
54,
55,
74,
75,
78,
81,
83,
90,
95,
102,
103,
110,
118,
123,
130
] | [
1,
2,
6,
8,
20,
21,
29,
45,
55,
56,
75,
76,
79,
82,
84,
91,
96,
103,
104,
111,
119,
124,
131
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import range, str
import datetime
import json
import os
import subprocess
import sys
from math import ceil
from flask import Blueprint, Response, request, stream_with_context, url_for
from opendiamond.dataretriever.util import DiamondTextAttr
from werkzeug.datastructures import Headers
# IMPORTANT: requires ffmpeg >= 3.3. Lower versions produce incorrect clipping.
BASEURL = 'video'
STYLE = False
INDEXDIR = DATAROOT = None
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('video_store', __name__)
@scope_blueprint.route('/scope/<gididx>')
@scope_blueprint.route('/scope/stride/<int:stride>/span/<int:span>/<gididx>')
def get_scope(gididx, stride=5, span=5):
index = 'GIDIDX' + gididx.upper()
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
with open(_get_index_absolute_path(index), 'rt') as f:
for line in f:
video = line.strip()
video_path = str(_get_obj_absolute_path(video))
try:
video_meta = _ffprobe(video_path)
length_sec = float(video_meta['format']['duration'])
num_clips = int(ceil(length_sec / stride))
yield '<count adjust="{}"/>\n'.format(num_clips)
for clip in range(num_clips):
yield _get_object_element(start=clip * stride, span=span, video=video) + '\n'
except Exception as e:
print("Error parsing {}. {}. Skip.".format(video, str(e)), file=sys.stderr)
pass
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/start/<int:start>/span/<int:span>/<path:video>')
def get_object_id(start, span, video):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(start, span, video),
"200 OK",
headers=headers)
@scope_blueprint.route('/obj/start/<int:start>/span/<int:span>/<path:video>')
def get_object(start, span, video):
# Reference:
# https://github.com/mikeboers/PyAV/blob/master/tests/test_seek.py
video_path = str(_get_obj_absolute_path(video))
proc = _create_ffmpeg_segment_proc(video_path,
start_sec=start,
duration_sec=span)
def generate():
while True:
data = proc.stdout.read(4096)
if not data:
break
yield data
headers = Headers([('Content-Type', 'video/mp4')])
response = Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
# Cache control
stat = os.stat(video_path)
last_modified = stat.st_mtime
size = stat.st_size
etag = "{}_{}_{}_{}".format(last_modified, size, start, span)
response.last_modified = last_modified
response.set_etag(etag=etag)
response.cache_control.public = True
response.cache_control.max_age = \
datetime.timedelta(days=365).total_seconds()
response.make_conditional(request)
return response
def _get_object_element(start, span, video):
return '<object id="{}" src="{}" />'.format(
url_for('.get_object_id', start=start, span=span, video=video),
url_for('.get_object', start=start, span=span, video=video))
def _get_obj_absolute_path(obj_path):
return os.path.join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return os.path.join(INDEXDIR, index)
def _ffprobe(video_path):
cmd_l = ['ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_format', video_path]
proc = subprocess.Popen(cmd_l, stdout=subprocess.PIPE, bufsize=-1)
data = json.load(proc.stdout)
return data
def _create_ffmpeg_segment_proc(video_path, start_sec, duration_sec):
"""
Use ffmpeg to extract a .mp4 segment of the video. Outfile is written to stdout.
Note: requires ffmpeg >= 3.3. Lower versions produce wrong results.
Reference: http://trac.ffmpeg.org/wiki/Seeking
https://stackoverflow.com/questions/34123272/ffmpeg-transmux-mpegts-to-mp4-gives-error-muxer-does-not-support-non-seekable
:param video_path:
:param start_sec:
:param duration_sec:
:return: the subprocess
"""
cmd_l = ['ffmpeg', '-v', 'quiet',
'-ss', str(start_sec),
'-t', str(duration_sec),
'-i', str(video_path),
'-movflags', 'frag_keyframe+empty_moov',
'-c', 'copy',
'-f', 'mp4',
'pipe:1']
proc = subprocess.Popen(cmd_l, stdout=subprocess.PIPE, bufsize=-1)
return proc
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import range, str
import datetime
import json
import os
import subprocess
import sys
from math import ceil
from flask import Blueprint, Response, request, stream_with_context, url_for
from opendiamond.dataretriever.util import DiamondTextAttr
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
# IMPORTANT: requires ffmpeg >= 3.3. Lower versions produce incorrect clipping.
BASEURL = 'video'
STYLE = False
INDEXDIR = DATAROOT = None
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('video_store', __name__)
@scope_blueprint.route('/scope/<gididx>')
@scope_blueprint.route('/scope/stride/<int:stride>/span/<int:span>/<gididx>')
def get_scope(gididx, stride=5, span=5):
index = 'GIDIDX' + gididx.upper()
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
with open(_get_index_absolute_path(index), 'rt') as f:
for line in f:
video = line.strip()
video_path = str(_get_obj_absolute_path(video))
try:
video_meta = _ffprobe(video_path)
length_sec = float(video_meta['format']['duration'])
num_clips = int(ceil(length_sec / stride))
yield '<count adjust="{}"/>\n'.format(num_clips)
for clip in range(num_clips):
yield _get_object_element(start=clip * stride, span=span, video=video) + '\n'
except Exception as e:
print("Error parsing {}. {}. Skip.".format(video, str(e)), file=sys.stderr)
pass
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/start/<int:start>/span/<int:span>/<path:video>')
def get_object_id(start, span, video):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(start, span, video),
"200 OK",
headers=headers)
@scope_blueprint.route('/obj/start/<int:start>/span/<int:span>/<path:video>')
def get_object(start, span, video):
# Reference:
# https://github.com/mikeboers/PyAV/blob/master/tests/test_seek.py
video_path = str(_get_obj_absolute_path(video))
proc = _create_ffmpeg_segment_proc(video_path,
start_sec=start,
duration_sec=span)
def generate():
while True:
data = proc.stdout.read(4096)
if not data:
break
yield data
headers = Headers([('Content-Type', 'video/mp4')])
response = Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
# Cache control
stat = os.stat(video_path)
last_modified = stat.st_mtime
size = stat.st_size
etag = "{}_{}_{}_{}".format(last_modified, size, start, span)
response.last_modified = last_modified
response.set_etag(etag=etag)
response.cache_control.public = True
response.cache_control.max_age = \
datetime.timedelta(days=365).total_seconds()
response.make_conditional(request)
return response
def _get_object_element(start, span, video):
return '<object id="{}" src="{}" />'.format(
url_for('.get_object_id', start=start, span=span, video=video),
url_for('.get_object', start=start, span=span, video=video))
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return safe_join(INDEXDIR, index)
def _ffprobe(video_path):
cmd_l = ['ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_format', video_path]
proc = subprocess.Popen(cmd_l, stdout=subprocess.PIPE, bufsize=-1)
data = json.load(proc.stdout)
return data
def _create_ffmpeg_segment_proc(video_path, start_sec, duration_sec):
"""
Use ffmpeg to extract a .mp4 segment of the video. Outfile is written to stdout.
Note: requires ffmpeg >= 3.3. Lower versions produce wrong results.
Reference: http://trac.ffmpeg.org/wiki/Seeking
https://stackoverflow.com/questions/34123272/ffmpeg-transmux-mpegts-to-mp4-gives-error-muxer-does-not-support-non-seekable
:param video_path:
:param start_sec:
:param duration_sec:
:return: the subprocess
"""
cmd_l = ['ffmpeg', '-v', 'quiet',
'-ss', str(start_sec),
'-t', str(duration_sec),
'-i', str(video_path),
'-movflags', 'frag_keyframe+empty_moov',
'-c', 'copy',
'-f', 'mp4',
'pipe:1']
proc = subprocess.Popen(cmd_l, stdout=subprocess.PIPE, bufsize=-1)
return proc
| path_disclosure | {
"code": [
" return os.path.join(DATAROOT, obj_path)",
" return os.path.join(INDEXDIR, index)"
],
"line_no": [
125,
129
]
} | {
"code": [
" return safe_join(DATAROOT, obj_path)"
],
"line_no": [
126
]
} |
from builtins import range, str
import datetime
import json
import os
import subprocess
import sys
from math import ceil
from flask import Blueprint, Response, request, stream_with_context, url_for
from opendiamond.dataretriever.util import DiamondTextAttr
from werkzeug.datastructures import Headers
VAR_0 = 'video'
VAR_1 = False
VAR_2 = VAR_3 = None
def FUNC_0(VAR_4):
global VAR_2, VAR_3 # pylint: disable=global-statement
VAR_2 = VAR_4.indexdir
VAR_3 = VAR_4.dataroot
VAR_5 = Blueprint('video_store', __name__)
@VAR_5.route('/scope/<VAR_6>')
@VAR_5.route('/scope/VAR_7/<int:VAR_7>/VAR_8/<int:VAR_8>/<VAR_6>')
def FUNC_1(VAR_6, VAR_7=5, VAR_8=5):
VAR_12 = 'GIDIDX' + VAR_6.upper()
def FUNC_9():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
with open(FUNC_6(VAR_12), 'rt') as f:
for line in f:
VAR_10 = line.strip()
VAR_13 = str(FUNC_5(VAR_10))
try:
VAR_25 = FUNC_7(VAR_13)
VAR_26 = float(VAR_25['format']['duration'])
VAR_27 = int(ceil(VAR_26 / VAR_7))
yield '<count adjust="{}"/>\n'.format(VAR_27)
for clip in range(VAR_27):
yield FUNC_4(VAR_9=clip * VAR_7, VAR_8=span, VAR_10=video) + '\n'
except Exception as e:
print("Error parsing {}. {}. Skip.".format(VAR_10, str(e)), file=sys.stderr)
pass
yield '</objectlist>\n'
VAR_16 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_16=headers)
@VAR_5.route('/id/VAR_9/<int:VAR_9>/VAR_8/<int:VAR_8>/<path:VAR_10>')
def FUNC_2(VAR_9, VAR_8, VAR_10):
VAR_16 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_9, VAR_8, VAR_10),
"200 OK",
VAR_16=headers)
@VAR_5.route('/obj/VAR_9/<int:VAR_9>/VAR_8/<int:VAR_8>/<path:VAR_10>')
def FUNC_3(VAR_9, VAR_8, VAR_10):
VAR_13 = str(FUNC_5(VAR_10))
VAR_17 = FUNC_8(VAR_13,
VAR_14=VAR_9,
VAR_15=VAR_8)
def FUNC_9():
while True:
VAR_24 = VAR_17.stdout.read(4096)
if not VAR_24:
break
yield VAR_24
VAR_16 = Headers([('Content-Type', 'video/mp4')])
VAR_18 = Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_16=headers)
VAR_19 = os.stat(VAR_13)
VAR_20 = VAR_19.st_mtime
VAR_21 = VAR_19.st_size
VAR_22 = "{}_{}_{}_{}".format(VAR_20, VAR_21, VAR_9, VAR_8)
VAR_18.last_modified = VAR_20
VAR_18.set_etag(VAR_22=etag)
VAR_18.cache_control.public = True
VAR_18.cache_control.max_age = \
datetime.timedelta(days=365).total_seconds()
VAR_18.make_conditional(request)
return VAR_18
def FUNC_4(VAR_9, VAR_8, VAR_10):
return '<object id="{}" src="{}" />'.format(
url_for('.get_object_id', VAR_9=start, VAR_8=span, VAR_10=video),
url_for('.get_object', VAR_9=start, VAR_8=span, VAR_10=video))
def FUNC_5(VAR_11):
return os.path.join(VAR_3, VAR_11)
def FUNC_6(VAR_12):
return os.path.join(VAR_2, VAR_12)
def FUNC_7(VAR_13):
VAR_23 = ['ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_format', VAR_13]
VAR_17 = subprocess.Popen(VAR_23, stdout=subprocess.PIPE, bufsize=-1)
VAR_24 = json.load(VAR_17.stdout)
return VAR_24
def FUNC_8(VAR_13, VAR_14, VAR_15):
VAR_23 = ['ffmpeg', '-v', 'quiet',
'-ss', str(VAR_14),
'-t', str(VAR_15),
'-i', str(VAR_13),
'-movflags', 'frag_keyframe+empty_moov',
'-c', 'copy',
'-f', 'mp4',
'pipe:1']
VAR_17 = subprocess.Popen(VAR_23, stdout=subprocess.PIPE, bufsize=-1)
return VAR_17
|
from builtins import range, str
import datetime
import json
import os
import subprocess
import sys
from math import ceil
from flask import Blueprint, Response, request, stream_with_context, url_for
from opendiamond.dataretriever.util import DiamondTextAttr
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
VAR_0 = 'video'
VAR_1 = False
VAR_2 = VAR_3 = None
def FUNC_0(VAR_4):
global VAR_2, VAR_3 # pylint: disable=global-statement
VAR_2 = VAR_4.indexdir
VAR_3 = VAR_4.dataroot
VAR_5 = Blueprint('video_store', __name__)
@VAR_5.route('/scope/<VAR_6>')
@VAR_5.route('/scope/VAR_7/<int:VAR_7>/VAR_8/<int:VAR_8>/<VAR_6>')
def FUNC_1(VAR_6, VAR_7=5, VAR_8=5):
VAR_12 = 'GIDIDX' + VAR_6.upper()
def FUNC_9():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist>\n'
with open(FUNC_6(VAR_12), 'rt') as f:
for line in f:
VAR_10 = line.strip()
VAR_13 = str(FUNC_5(VAR_10))
try:
VAR_25 = FUNC_7(VAR_13)
VAR_26 = float(VAR_25['format']['duration'])
VAR_27 = int(ceil(VAR_26 / VAR_7))
yield '<count adjust="{}"/>\n'.format(VAR_27)
for clip in range(VAR_27):
yield FUNC_4(VAR_9=clip * VAR_7, VAR_8=span, VAR_10=video) + '\n'
except Exception as e:
print("Error parsing {}. {}. Skip.".format(VAR_10, str(e)), file=sys.stderr)
pass
yield '</objectlist>\n'
VAR_16 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_16=headers)
@VAR_5.route('/id/VAR_9/<int:VAR_9>/VAR_8/<int:VAR_8>/<path:VAR_10>')
def FUNC_2(VAR_9, VAR_8, VAR_10):
VAR_16 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_9, VAR_8, VAR_10),
"200 OK",
VAR_16=headers)
@VAR_5.route('/obj/VAR_9/<int:VAR_9>/VAR_8/<int:VAR_8>/<path:VAR_10>')
def FUNC_3(VAR_9, VAR_8, VAR_10):
VAR_13 = str(FUNC_5(VAR_10))
VAR_17 = FUNC_8(VAR_13,
VAR_14=VAR_9,
VAR_15=VAR_8)
def FUNC_9():
while True:
VAR_24 = VAR_17.stdout.read(4096)
if not VAR_24:
break
yield VAR_24
VAR_16 = Headers([('Content-Type', 'video/mp4')])
VAR_18 = Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_16=headers)
VAR_19 = os.stat(VAR_13)
VAR_20 = VAR_19.st_mtime
VAR_21 = VAR_19.st_size
VAR_22 = "{}_{}_{}_{}".format(VAR_20, VAR_21, VAR_9, VAR_8)
VAR_18.last_modified = VAR_20
VAR_18.set_etag(VAR_22=etag)
VAR_18.cache_control.public = True
VAR_18.cache_control.max_age = \
datetime.timedelta(days=365).total_seconds()
VAR_18.make_conditional(request)
return VAR_18
def FUNC_4(VAR_9, VAR_8, VAR_10):
return '<object id="{}" src="{}" />'.format(
url_for('.get_object_id', VAR_9=start, VAR_8=span, VAR_10=video),
url_for('.get_object', VAR_9=start, VAR_8=span, VAR_10=video))
def FUNC_5(VAR_11):
return safe_join(VAR_3, VAR_11)
def FUNC_6(VAR_12):
return safe_join(VAR_2, VAR_12)
def FUNC_7(VAR_13):
VAR_23 = ['ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_format', VAR_13]
VAR_17 = subprocess.Popen(VAR_23, stdout=subprocess.PIPE, bufsize=-1)
VAR_24 = json.load(VAR_17.stdout)
return VAR_24
def FUNC_8(VAR_13, VAR_14, VAR_15):
VAR_23 = ['ffmpeg', '-v', 'quiet',
'-ss', str(VAR_14),
'-t', str(VAR_15),
'-i', str(VAR_13),
'-movflags', 'frag_keyframe+empty_moov',
'-c', 'copy',
'-f', 'mp4',
'pipe:1']
VAR_17 = subprocess.Popen(VAR_23, stdout=subprocess.PIPE, bufsize=-1)
return VAR_17
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
13,
20,
24,
25,
26,
30,
31,
36,
37,
39,
44,
49,
51,
66,
68,
73,
74,
81,
82,
85,
86,
91,
98,
103,
114,
116,
117,
122,
123,
126,
127,
130,
131,
135,
138,
140,
141,
161,
164,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
13,
20,
25,
26,
27,
31,
32,
37,
38,
40,
45,
50,
52,
67,
69,
74,
75,
82,
83,
86,
87,
92,
99,
104,
115,
117,
118,
123,
124,
127,
128,
131,
132,
136,
139,
141,
142,
162,
165,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.i18n import set_language
from itertools import chain
from shuup.apps.provides import get_provide_objects
from .views.basket import BasketView
from .views.category import AllCategoriesView, CategoryView
from .views.checkout import get_checkout_view
from .views.dashboard import DashboardView
from .views.index import IndexView
from .views.misc import (
force_anonymous_contact,
force_company_contact,
force_person_contact,
stop_impersonating,
toggle_all_seeing,
)
from .views.order import OrderCompleteView
from .views.payment import ProcessPaymentView
from .views.product import ProductDetailView
from .views.upload import media_upload
# TODO: Check _not_here_yet URLs in this file
def _not_here_yet(request, *args, **kwargs):
return HttpResponse("Not here yet: %s (%r, %r)" % (request.path, args, kwargs), status=410)
# Use a different js catalog function in front urlpatterns to prevent forcing
# the shop language settings in admin js catalog.
def front_javascript_catalog_all(request, domain="djangojs"):
from shuup.utils.i18n import javascript_catalog_all
return javascript_catalog_all(request, domain)
checkout_view = get_checkout_view()
urlpatterns = [
url(r"^set-language/$", csrf_exempt(set_language), name="set-language"),
url(r"^i18n.js$", front_javascript_catalog_all, name="js-catalog"),
url(r"^checkout/$", checkout_view, name="checkout"),
url(r"^checkout/(?P<phase>.+)/$", checkout_view, name="checkout"),
url(r"^basket/$", csrf_exempt(BasketView.as_view()), name="basket"),
url(r"^dashboard/$", login_required(DashboardView.as_view()), name="dashboard"),
url(r"^toggle-allseeing/$", login_required(toggle_all_seeing), name="toggle-all-seeing"),
url(r"^force-anonymous-contact/$", login_required(force_anonymous_contact), name="force-anonymous-contact"),
url(r"^force-company-contact/$", login_required(force_company_contact), name="force-company-contact"),
url(r"^force-person-contact/$", login_required(force_person_contact), name="force-person-contact"),
url(r"^stop-impersonating/$", login_required(stop_impersonating), name="stop-impersonating"),
url(r"^upload-media/$", login_required(media_upload), name="media-upload"),
url(
r"^order/payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
kwargs={"mode": "payment"},
name="order_process_payment",
),
url(
r"^order/process-payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
kwargs={"mode": "return"},
name="order_process_payment_return",
),
url(
r"^order/payment-canceled/(?P<pk>.+?)/(?P<key>.+?)/$",
ProcessPaymentView.as_view(),
kwargs={"mode": "cancel"},
name="order_payment_canceled",
),
url(r"^order/complete/(?P<pk>.+?)/(?P<key>.+?)/$", csrf_exempt(OrderCompleteView.as_view()), name="order_complete"),
url(r"^order/verification/(?P<pk>.+?)/(?P<key>.+?)/$", _not_here_yet, name="order_requires_verification"),
url(
r"^order/get-attachment/(?P<order_pk>\d+)/(?P<key>.+?)/(?P<att_pk>\d+)/",
_not_here_yet,
name="secure_attachment",
),
url(r"^p/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(ProductDetailView.as_view()), name="product"),
url(
r"^s/(?P<supplier_pk>\d+)-(?P<pk>\d+)-(?P<slug>.*)/$",
csrf_exempt(ProductDetailView.as_view()),
name="supplier-product",
),
url(r"^c/$", csrf_exempt(AllCategoriesView.as_view()), name="all-categories"),
url(r"^c/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(CategoryView.as_view()), name="category"),
]
# TODO: Document `front_urls_pre`, `front_urls` and `front_urls_post`.
def _get_extension_urlpatterns(provide_category):
return chain(*get_provide_objects(provide_category))
app_name = "shuup"
urlpatterns = list(
chain(
*(
_get_extension_urlpatterns("front_urls_pre"),
urlpatterns,
_get_extension_urlpatterns("front_urls"),
[url(r"^$", IndexView.as_view(), name="index")],
_get_extension_urlpatterns("front_urls_post"),
)
)
)
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from django.views.i18n import set_language
from itertools import chain
from shuup.apps.provides import get_provide_objects
from .views.basket import BasketView
from .views.category import AllCategoriesView, CategoryView
from .views.checkout import get_checkout_view
from .views.dashboard import DashboardView
from .views.index import IndexView
from .views.misc import (
force_anonymous_contact,
force_company_contact,
force_person_contact,
stop_impersonating,
toggle_all_seeing,
)
from .views.order import OrderCompleteView
from .views.payment import ProcessPaymentView
from .views.product import ProductDetailView
from .views.upload import media_upload
# TODO: Check _not_here_yet URLs in this file
def _not_here_yet(request, *args, **kwargs):
return HttpResponse("Not here yet: %s (%r, %r)" % (request.path, escape(args), escape(kwargs)), status=410)
# Use a different js catalog function in front urlpatterns to prevent forcing
# the shop language settings in admin js catalog.
def front_javascript_catalog_all(request, domain="djangojs"):
from shuup.utils.i18n import javascript_catalog_all
return javascript_catalog_all(request, domain)
checkout_view = get_checkout_view()
urlpatterns = [
url(r"^set-language/$", csrf_exempt(set_language), name="set-language"),
url(r"^i18n.js$", front_javascript_catalog_all, name="js-catalog"),
url(r"^checkout/$", checkout_view, name="checkout"),
url(r"^checkout/(?P<phase>.+)/$", checkout_view, name="checkout"),
url(r"^basket/$", csrf_exempt(BasketView.as_view()), name="basket"),
url(r"^dashboard/$", login_required(DashboardView.as_view()), name="dashboard"),
url(r"^toggle-allseeing/$", login_required(toggle_all_seeing), name="toggle-all-seeing"),
url(r"^force-anonymous-contact/$", login_required(force_anonymous_contact), name="force-anonymous-contact"),
url(r"^force-company-contact/$", login_required(force_company_contact), name="force-company-contact"),
url(r"^force-person-contact/$", login_required(force_person_contact), name="force-person-contact"),
url(r"^stop-impersonating/$", login_required(stop_impersonating), name="stop-impersonating"),
url(r"^upload-media/$", login_required(media_upload), name="media-upload"),
url(
r"^order/payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
kwargs={"mode": "payment"},
name="order_process_payment",
),
url(
r"^order/process-payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
kwargs={"mode": "return"},
name="order_process_payment_return",
),
url(
r"^order/payment-canceled/(?P<pk>.+?)/(?P<key>.+?)/$",
ProcessPaymentView.as_view(),
kwargs={"mode": "cancel"},
name="order_payment_canceled",
),
url(r"^order/complete/(?P<pk>.+?)/(?P<key>.+?)/$", csrf_exempt(OrderCompleteView.as_view()), name="order_complete"),
url(r"^order/verification/(?P<pk>.+?)/(?P<key>.+?)/$", _not_here_yet, name="order_requires_verification"),
url(
r"^order/get-attachment/(?P<order_pk>\d+)/(?P<key>.+?)/(?P<att_pk>\d+)/",
_not_here_yet,
name="secure_attachment",
),
url(r"^p/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(ProductDetailView.as_view()), name="product"),
url(
r"^s/(?P<supplier_pk>\d+)-(?P<pk>\d+)-(?P<slug>.*)/$",
csrf_exempt(ProductDetailView.as_view()),
name="supplier-product",
),
url(r"^c/$", csrf_exempt(AllCategoriesView.as_view()), name="all-categories"),
url(r"^c/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(CategoryView.as_view()), name="category"),
]
# TODO: Document `front_urls_pre`, `front_urls` and `front_urls_post`.
def _get_extension_urlpatterns(provide_category):
return chain(*get_provide_objects(provide_category))
app_name = "shuup"
urlpatterns = list(
chain(
*(
_get_extension_urlpatterns("front_urls_pre"),
urlpatterns,
_get_extension_urlpatterns("front_urls"),
[url(r"^$", IndexView.as_view(), name="index")],
_get_extension_urlpatterns("front_urls_post"),
)
)
)
| xss | {
"code": [
" return HttpResponse(\"Not here yet: %s (%r, %r)\" % (request.path, args, kwargs), status=410)"
],
"line_no": [
40
]
} | {
"code": [
"from django.utils.html import escape"
],
"line_no": [
13
]
} |
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.i18n import set_language
from itertools import chain
from shuup.apps.provides import get_provide_objects
from .views.basket import BasketView
from .views.category import AllCategoriesView, CategoryView
from .views.checkout import get_checkout_view
from .views.dashboard import DashboardView
from .views.index import IndexView
from .views.misc import (
force_anonymous_contact,
force_company_contact,
force_person_contact,
stop_impersonating,
toggle_all_seeing,
)
from .views.order import OrderCompleteView
from .views.payment import ProcessPaymentView
from .views.product import ProductDetailView
from .views.upload import media_upload
def FUNC_0(VAR_0, *VAR_1, **VAR_2):
return HttpResponse("Not here yet: %s (%r, %r)" % (VAR_0.path, VAR_1, VAR_2), status=410)
def FUNC_1(VAR_0, VAR_3="djangojs"):
from shuup.utils.i18n import javascript_catalog_all
return javascript_catalog_all(VAR_0, VAR_3)
VAR_4 = get_checkout_view()
VAR_5 = [
url(r"^set-language/$", csrf_exempt(set_language), name="set-language"),
url(r"^i18n.js$", FUNC_1, name="js-catalog"),
url(r"^checkout/$", VAR_4, name="checkout"),
url(r"^checkout/(?P<phase>.+)/$", VAR_4, name="checkout"),
url(r"^basket/$", csrf_exempt(BasketView.as_view()), name="basket"),
url(r"^dashboard/$", login_required(DashboardView.as_view()), name="dashboard"),
url(r"^toggle-allseeing/$", login_required(toggle_all_seeing), name="toggle-all-seeing"),
url(r"^force-anonymous-contact/$", login_required(force_anonymous_contact), name="force-anonymous-contact"),
url(r"^force-company-contact/$", login_required(force_company_contact), name="force-company-contact"),
url(r"^force-person-contact/$", login_required(force_person_contact), name="force-person-contact"),
url(r"^stop-impersonating/$", login_required(stop_impersonating), name="stop-impersonating"),
url(r"^upload-media/$", login_required(media_upload), name="media-upload"),
url(
r"^order/payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
VAR_2={"mode": "payment"},
name="order_process_payment",
),
url(
r"^order/process-payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
VAR_2={"mode": "return"},
name="order_process_payment_return",
),
url(
r"^order/payment-canceled/(?P<pk>.+?)/(?P<key>.+?)/$",
ProcessPaymentView.as_view(),
VAR_2={"mode": "cancel"},
name="order_payment_canceled",
),
url(r"^order/complete/(?P<pk>.+?)/(?P<key>.+?)/$", csrf_exempt(OrderCompleteView.as_view()), name="order_complete"),
url(r"^order/verification/(?P<pk>.+?)/(?P<key>.+?)/$", FUNC_0, name="order_requires_verification"),
url(
r"^order/get-attachment/(?P<order_pk>\d+)/(?P<key>.+?)/(?P<att_pk>\d+)/",
FUNC_0,
name="secure_attachment",
),
url(r"^p/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(ProductDetailView.as_view()), name="product"),
url(
r"^s/(?P<supplier_pk>\d+)-(?P<pk>\d+)-(?P<slug>.*)/$",
csrf_exempt(ProductDetailView.as_view()),
name="supplier-product",
),
url(r"^c/$", csrf_exempt(AllCategoriesView.as_view()), name="all-categories"),
url(r"^c/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(CategoryView.as_view()), name="category"),
]
def FUNC_2(VAR_6):
return chain(*get_provide_objects(VAR_6))
VAR_7 = "shuup"
VAR_5 = list(
chain(
*(
FUNC_2("front_urls_pre"),
VAR_5,
FUNC_2("front_urls"),
[url(r"^$", IndexView.as_view(), name="index")],
FUNC_2("front_urls_post"),
)
)
)
|
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from django.views.i18n import set_language
from itertools import chain
from shuup.apps.provides import get_provide_objects
from .views.basket import BasketView
from .views.category import AllCategoriesView, CategoryView
from .views.checkout import get_checkout_view
from .views.dashboard import DashboardView
from .views.index import IndexView
from .views.misc import (
force_anonymous_contact,
force_company_contact,
force_person_contact,
stop_impersonating,
toggle_all_seeing,
)
from .views.order import OrderCompleteView
from .views.payment import ProcessPaymentView
from .views.product import ProductDetailView
from .views.upload import media_upload
def FUNC_0(VAR_0, *VAR_1, **VAR_2):
return HttpResponse("Not here yet: %s (%r, %r)" % (VAR_0.path, escape(VAR_1), escape(VAR_2)), status=410)
def FUNC_1(VAR_0, VAR_3="djangojs"):
from shuup.utils.i18n import javascript_catalog_all
return javascript_catalog_all(VAR_0, VAR_3)
VAR_4 = get_checkout_view()
VAR_5 = [
url(r"^set-language/$", csrf_exempt(set_language), name="set-language"),
url(r"^i18n.js$", FUNC_1, name="js-catalog"),
url(r"^checkout/$", VAR_4, name="checkout"),
url(r"^checkout/(?P<phase>.+)/$", VAR_4, name="checkout"),
url(r"^basket/$", csrf_exempt(BasketView.as_view()), name="basket"),
url(r"^dashboard/$", login_required(DashboardView.as_view()), name="dashboard"),
url(r"^toggle-allseeing/$", login_required(toggle_all_seeing), name="toggle-all-seeing"),
url(r"^force-anonymous-contact/$", login_required(force_anonymous_contact), name="force-anonymous-contact"),
url(r"^force-company-contact/$", login_required(force_company_contact), name="force-company-contact"),
url(r"^force-person-contact/$", login_required(force_person_contact), name="force-person-contact"),
url(r"^stop-impersonating/$", login_required(stop_impersonating), name="stop-impersonating"),
url(r"^upload-media/$", login_required(media_upload), name="media-upload"),
url(
r"^order/payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
VAR_2={"mode": "payment"},
name="order_process_payment",
),
url(
r"^order/process-payment/(?P<pk>.+?)/(?P<key>.+?)/$",
csrf_exempt(ProcessPaymentView.as_view()),
VAR_2={"mode": "return"},
name="order_process_payment_return",
),
url(
r"^order/payment-canceled/(?P<pk>.+?)/(?P<key>.+?)/$",
ProcessPaymentView.as_view(),
VAR_2={"mode": "cancel"},
name="order_payment_canceled",
),
url(r"^order/complete/(?P<pk>.+?)/(?P<key>.+?)/$", csrf_exempt(OrderCompleteView.as_view()), name="order_complete"),
url(r"^order/verification/(?P<pk>.+?)/(?P<key>.+?)/$", FUNC_0, name="order_requires_verification"),
url(
r"^order/get-attachment/(?P<order_pk>\d+)/(?P<key>.+?)/(?P<att_pk>\d+)/",
FUNC_0,
name="secure_attachment",
),
url(r"^p/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(ProductDetailView.as_view()), name="product"),
url(
r"^s/(?P<supplier_pk>\d+)-(?P<pk>\d+)-(?P<slug>.*)/$",
csrf_exempt(ProductDetailView.as_view()),
name="supplier-product",
),
url(r"^c/$", csrf_exempt(AllCategoriesView.as_view()), name="all-categories"),
url(r"^c/(?P<pk>\d+)-(?P<slug>.*)/$", csrf_exempt(CategoryView.as_view()), name="category"),
]
def FUNC_2(VAR_6):
return chain(*get_provide_objects(VAR_6))
VAR_7 = "shuup"
VAR_5 = list(
chain(
*(
FUNC_2("front_urls_pre"),
VAR_5,
FUNC_2("front_urls"),
[url(r"^$", IndexView.as_view(), name="index")],
FUNC_2("front_urls_post"),
)
)
)
| [
1,
2,
3,
4,
5,
6,
7,
9,
16,
18,
35,
36,
37,
38,
41,
42,
43,
44,
47,
49,
50,
52,
53,
101,
102,
103,
104,
107,
108,
121
] | [
1,
2,
3,
4,
5,
6,
7,
9,
17,
19,
36,
37,
38,
39,
42,
43,
44,
45,
48,
50,
51,
53,
54,
102,
103,
104,
105,
108,
109,
122
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from ...core import utils
@require_POST
@login_required
def create(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = FavoriteForm(user=request.user, topic=topic, data=request.POST)
if form.is_valid():
form.save()
else:
messages.error(request, utils.render_form_errors(form))
return redirect(request.POST.get('next', topic.get_absolute_url()))
@require_POST
@login_required
def delete(request, pk):
favorite = get_object_or_404(TopicFavorite, pk=pk, user=request.user)
favorite.delete()
return redirect(request.POST.get('next', favorite.topic.get_absolute_url()))
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from spirit.core import utils
from spirit.core.utils.http import safe_redirect
@require_POST
@login_required
def create(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = FavoriteForm(user=request.user, topic=topic, data=request.POST)
if form.is_valid():
form.save()
else:
messages.error(request, utils.render_form_errors(form))
return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')
@require_POST
@login_required
def delete(request, pk):
favorite = get_object_or_404(TopicFavorite, pk=pk, user=request.user)
favorite.delete()
return safe_redirect(request, 'next', favorite.topic.get_absolute_url(), method='POST')
| open_redirect | {
"code": [
"from django.shortcuts import redirect",
"from ...core import utils",
" return redirect(request.POST.get('next', topic.get_absolute_url()))",
" return redirect(request.POST.get('next', favorite.topic.get_absolute_url()))"
],
"line_no": [
5,
12,
26,
34
]
} | {
"code": [
"from spirit.core import utils",
"from spirit.core.utils.http import safe_redirect",
" return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')",
" return safe_redirect(request, 'next', favorite.topic.get_absolute_url(), method='POST')"
],
"line_no": [
11,
12,
26,
34
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from ...core import utils
@require_POST
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_3 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_4 = FavoriteForm(user=VAR_0.user, VAR_3=topic, data=VAR_0.POST)
if VAR_4.is_valid():
VAR_4.save()
else:
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))
@require_POST
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_5 = get_object_or_404(TopicFavorite, VAR_2=pk, user=VAR_0.user)
VAR_5.delete()
return redirect(VAR_0.POST.get('next', VAR_5.topic.get_absolute_url()))
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from spirit.core import utils
from spirit.core.utils.http import safe_redirect
@require_POST
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_3 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_4 = FavoriteForm(user=VAR_0.user, VAR_3=topic, data=VAR_0.POST)
if VAR_4.is_valid():
VAR_4.save()
else:
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return safe_redirect(VAR_0, 'next', VAR_3.get_absolute_url(), method='POST')
@require_POST
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_5 = get_object_or_404(TopicFavorite, VAR_2=pk, user=VAR_0.user)
VAR_5.delete()
return safe_redirect(VAR_0, 'next', VAR_5.topic.get_absolute_url(), method='POST')
| [
1,
2,
8,
13,
14,
20,
25,
27,
28,
35
] | [
1,
2,
7,
13,
14,
20,
25,
27,
28,
35
] |
2CWE-601
| #!/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.tools
"""
import os
import sys
import shutil
import tempfile
import smtplib
import datetime
import unittest
DEFAULT_URI = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import Table
from gluon import tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
IS_IMAP = "imap" in DEFAULT_URI
class TestMail(unittest.TestCase):
"""
Test the Mail class.
"""
class Message(object):
def __init__(self, sender, to, payload):
self.sender = sender
self.to = to
self.payload = payload
self._parsed_payload = None
@property
def parsed_payload(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class DummySMTP(object):
"""
Dummy smtp server
NOTE: Test methods should take care of always leaving inbox and users empty when they finish.
"""
inbox = []
users = {}
def __init__(self, address, port, **kwargs):
self.address = address
self.port = port
self.has_quit = False
self.tls = False
def login(self, username, password):
if username not in self.users or self.users[username] != password:
raise smtplib.SMTPAuthenticationError
self.username = username
self.password = password
def sendmail(self, sender, to, payload):
self.inbox.append(TestMail.Message(sender, to, payload))
def quit(self):
self.has_quit = True
def ehlo(self, hostname=None):
pass
def starttls(self):
self.tls = True
def setUp(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = TestMail.DummySMTP
smtplib.SMTP_SSL = TestMail.DummySMTP
def tearDown(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def test_hello_world(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
message = TestMail.DummySMTP.inbox.pop()
self.assertEqual(message.sender, mail.settings.sender)
self.assertEqual(message.to, ['somebody@example.com'])
header = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(header in message.payload)
self.assertTrue(message.payload.endswith('world'))
def test_failed_login(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertFalse(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
def test_login(self):
TestMail.DummySMTP.users['username'] = 'password'
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
del TestMail.DummySMTP.users['username']
TestMail.DummySMTP.inbox.pop()
def test_html(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='<html><head></head><body></body></html>'))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in message.payload)
def test_alternative(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
message=('Text only', '<html><pre>HTML Only</pre></html>')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue(message.parsed_payload.is_multipart())
self.assertTrue(message.parsed_payload.get_content_type() == 'multipart/alternative')
parts = message.parsed_payload.get_payload()
self.assertTrue('Text only' in parts[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in parts[1].as_string())
def test_ssl(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.ssl = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_tls(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.tls = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_attachment(self):
module_file = os.path.abspath(__file__)
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file)))
message = TestMail.DummySMTP.inbox.pop()
attachment = message.parsed_payload.get_payload(1).get_payload(decode=True)
with open(module_file, 'rb') as mf:
self.assertEqual(to_bytes(attachment), to_bytes(mf.read()))
# Test missing attachment name error
stream = open(module_file)
self.assertRaises(Exception, lambda *args, **kwargs: Mail.Attachment(*args, **kwargs), stream)
stream.close()
# Test you can define content-id and content type
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file, content_id='trololo', content_type='tra/lala')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in message.payload)
self.assertTrue('Content-Id: <trololo>' in message.payload)
# TODO: class TestAuthJWT(unittest.TestCase):
class TestAuthJWT(unittest.TestCase):
def setUp(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.user_data = dict(username='jwtuser', password='jwtuser123')
self.db.auth_user.insert(username=self.user_data['username'],
password=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def test_jwt_token_manager(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def test_allows_jwt(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def optional_auth():
self.assertEqual(self.user_data['username'], self.auth.user.username)
optional_auth()
@unittest.skipIf(IS_IMAP, "TODO: Imap raises 'Connection refused'")
# class TestAuth(unittest.TestCase):
#
# def setUp(self):
# request = Request(env={})
# request.application = 'a'
# request.controller = 'c'
# request.function = 'f'
# request.folder = 'applications/admin'
# response = Response()
# session = Session()
# T = TranslatorFactory('', 'en')
# session.connect(request, response)
# from gluon.globals import current
# current.request = request
# current.response = response
# current.session = session
# current.T = T
# self.db = DAL(DEFAULT_URI, check_reserved=['all'])
# self.auth = Auth(self.db)
# self.auth.define_tables(username=True, signature=False)
# self.db.define_table('t0', Field('tt'), self.auth.signature)
# self.auth.enable_record_versioning(self.db)
# # Create a user
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# registration_key='bart',
# registration_id=''
# ))
# # self.auth.settings.registration_requires_verification = False
# # self.auth.settings.registration_requires_approval = False
#
# def test_assert_setup(self):
# self.assertEqual(self.db(self.db.auth_user.username == 'bart').select().first()['username'], 'bart')
# self.assertTrue('auth_user' in self.db)
# self.assertTrue('auth_group' in self.db)
# self.assertTrue('auth_membership' in self.db)
# self.assertTrue('auth_permission' in self.db)
# self.assertTrue('auth_event' in self.db)
#
# def test_enable_record_versioning(self):
# self.assertTrue('t0_archive' in self.db)
#
# def test_basic_blank_forms(self):
# for f in ['login', 'retrieve_password',
# 'retrieve_username',
# # 'register' # register complain about : client_side=self.settings.client_side
# ]:
# html_form = getattr(self.auth, f)().xml()
# self.assertTrue('name="_formkey"' in html_form)
#
# # NOTE: Not sure it is the proper way to logout_bare() as there is not methods for that and auth.logout() failed
# self.auth.logout_bare()
# # self.assertTrue(self.auth.is_logged_in())
#
# for f in ['logout', 'verify_email', 'reset_password',
# 'change_password', 'profile', 'groups']:
# self.assertRaisesRegexp(HTTP, "303*", getattr(self.auth, f))
#
# self.assertRaisesRegexp(HTTP, "401*", self.auth.impersonate)
#
# try:
# for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
# 'auth_membership', 'auth_permission', 'auth_group',
# 'auth_user']:
# self.db[t].drop()
# except SyntaxError as e:
# # GAE doesn't support drop
# pass
# return
#
# def test_get_or_create_user(self):
# self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
# self.db.commit()
# # True case
# self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
# 'username': 'user1',
# 'password': 'password_123'
# })['username'], 'user1')
# # user2 doesn't exist yet and get created
# self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
# 'username': 'user2'})['username'], 'user2')
# # user3 for corner case
# self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
# 'last_name': 'Simpson',
# 'email': 'user3@test.com',
# 'registration_id': 'user3',
# 'username': 'user3'})['username'], 'user3')
# # False case
# self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_login_bare(self):
# # The following test case should succeed but failed as I never received the user record but False
# self.auth.login_bare(username='bart@simpson.com', password='bart_password')
# self.assertTrue(self.auth.is_logged_in())
# # Failing login because bad_password
# self.assertEqual(self.auth.login_bare(username='bart', password='wrong_password'), False)
# self.db.auth_user.truncate()
#
# def test_register_bare(self):
# # corner case empty register call register_bare without args
# self.assertRaises(ValueError, self.auth.register_bare)
# # failing register_bare user already exist
# self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# # successful register_bare
# self.assertEqual(self.auth.register_bare(username='user2',
# email='user2@test.com',
# password='password_123')['username'], 'user2')
# # raise ValueError
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(wrong_field_name='user3', password='password_123'))
# # raise ValueError wrong email
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(email='user4@', password='password_123'))
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_bulk_register(self):
# self.auth.login_bare(username='bart', password='bart_password')
# self.auth.settings.bulk_register_enabled = True
# bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
# self.assertTrue('name="_formkey"' in bulk_register_form)
#
# def test_change_password(self):
# self.auth.login_bare(username='bart', password='bart_password')
# change_password_form = getattr(self.auth, 'change_password')().xml()
# self.assertTrue('name="_formkey"' in change_password_form)
#
# def test_profile(self):
# self.auth.login_bare(username='bart', password='bart_password')
# profile_form = getattr(self.auth, 'profile')().xml()
# self.assertTrue('name="_formkey"' in profile_form)
#
# # def test_impersonate(self):
# # # Create a user to be impersonated
# # self.auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # self.auth.add_group('impersonate')
# # self.auth.add_membership(user_id=1,
# # group_id=self.db(self.db.auth_user.username == 'bart'
# # ).select(self.db.auth_user.id).first().id)
# # self.auth.add_permission(group_id=self.db(self.db.auth_group.role == 'impersonate'
# # ).select(self.db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # self.auth.login_bare(username='bart', password='bart_password')
# # self.assertTrue(self.auth.is_logged_in())
# # # Bart impersonate Omer
# # omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
# # impersonate_form = self.auth.impersonate(user_id=omer_id)
# # self.assertTrue(self.auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
#
# # def test_impersonate(self):
# # request = Request(env={})
# # request.application = 'a'
# # request.controller = 'c'
# # request.function = 'f'
# # request.folder = 'applications/admin'
# # response = Response()
# # session = Session()
# # T = TranslatorFactory('', 'en')
# # session.connect(request, response)
# # from gluon.globals import current
# # current.request = request
# # current.response = response
# # current.session = session
# # current.T = T
# # db = DAL(DEFAULT_URI, check_reserved=['all'])
# # auth = Auth(db)
# # auth.define_tables(username=True, signature=False)
# # db.define_table('t0', Field('tt'), auth.signature)
# # auth.enable_record_versioning(db)
# # # Create a user
# # auth.get_or_create_user(dict(first_name='Bart',
# # last_name='Simpson',
# # username='bart',
# # email='bart@simpson.com',
# # password='bart_password',
# # registration_key='bart',
# # registration_id=''
# # ))
# # # Create a user to be impersonated
# # auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # auth.add_group('impersonate')
# # auth.add_membership(user_id=1,
# # group_id=db(db.auth_user.username == 'bart'
# # ).select(db.auth_user.id).first().id)
# # auth.add_permission(group_id=db(db.auth_group.role == 'impersonate'
# # ).select(db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # auth.login_bare(username='bart', password='bart_password')
# # # Bart impersonate Omer
# # omer_id = db(db.auth_user.username == 'omer').select(db.auth_user.id).first().id
# # impersonate_form = auth.impersonate(user_id=omer_id)
# # self.assertTrue(auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
class TestAuth(unittest.TestCase):
def myassertRaisesRegex(self, *args, **kwargs):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*args, **kwargs)
return getattr(self, 'assertRaisesRegex')(*args, **kwargs)
def setUp(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
T = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = T
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
# Create a user
# Note: get_or_create_user() doesn't seems to create user properly it better to use register_bare() and
# prevent login_bare() test from succeed. db insert the user manually not properly work either.
# Not working
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# # registration_key=None,
# #registration_id='bart@simpson.com'
# ),
# login=False)
# Not working
# self.db.auth_user.insert(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password')
# self.db.commit()
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
username='bart',
email='bart@simpson.com',
password='bart_password')
def test_assert_setup(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
bart_record = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(bart_record['username'], 'bart')
self.assertEqual(bart_record['registration_key'], '')
bart_id = self.db(self.db.auth_user.username == 'bart').select().first().id
bart_group_id = self.db(self.db.auth_group.role == 'user_{0}'.format(bart_id)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == bart_group_id) &
(self.db.auth_membership.user_id == bart_id)).select().first())
# Just calling many form functions
def test_basic_blank_forms(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
html_form = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in html_form)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
# GAE doesn't support drop
pass
return
def test_get_vars_next(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
# TODO: def test_navbar(self):
# TODO: def test___get_migrate(self):
def test_enable_record_versioning(self):
self.assertTrue('t0_archive' in self.db)
# TODO: def test_define_signature(self):
# TODO: def test_define_signature(self):
# TODO: def test_define_table(self):
def test_log_event(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
# user logged in
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': bart_id,
'description': 'some_log_event_description_var1'}.items()))
# user not logged
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
# no logging tests
self.auth.settings.logging_enabled = False
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
self.auth.settings.logging_enabled = True
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# TODO: Corner case translated description...
def test_get_or_create_user(self):
self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
self.db.commit()
# True case
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
# user2 doesn't exist yet and get created
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
# user3 for corner case
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
# False case
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_basic(self):
# TODO: def test_login_user(self):
# TODO: def test__get_login_settings(self):
def test_login_bare(self):
self.auth.login_bare(username='bart', password='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
# Failing login because wrong_password
self.assertFalse(self.auth.login_bare(username='bart', password='wrong_password'))
# NOTE : The following failed for some reason, but I can't find out why
# self.auth = Auth(self.db)
# self.auth.define_tables(username=False, signature=False)
# self.auth.settings.registration_requires_verification = False
# self.auth.settings.registration_requires_approval = False
# self.auth.register_bare(first_name='Omer',
# last_name='Simpson',
# # no username field passed, failed with :
# # ValueError('register_bare: userfield not provided or invalid')
# # Or
# # username='omer',
# # Or
# # username='omer@simpson.com',
# # In either previous cases, it failed with :
# # self.assertTrue(self.auth.is_logged_in()) AssertionError: False is not true
# email='omer@simpson.com',
# password='omer_password')
# self.auth.login_bare(username='omer@sympson.com', password='omer_password')
# self.assertTrue(self.auth.is_logged_in())
def test_register_bare(self):
# corner case empty register call register_bare without args
self.assertRaises(ValueError, self.auth.register_bare)
# failing register_bare user already exist
self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# successful register_bare
self.assertEqual(self.auth.register_bare(username='user2',
email='user2@test.com',
password='password_123')['username'], 'user2')
# raise ValueError
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', password='password_123'))
# raise ValueError wrong email
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', password='password_123'))
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_cas_login(self):
# TODO: def test_cas_validate(self):
# TODO: def test__reset_two_factor_auth(self):
# TODO: def test_when_is_logged_in_bypass_next_in_url(self):
# TODO: def test_login(self):
# TODO: def test_logout(self):
def test_logout_bare(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_register(self):
def test_is_logged_in(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_verify_email(self):
# TODO: def test_retrieve_username(self):
def test_random_password(self):
# let just check that the function is callable
self.assertTrue(self.auth.random_password())
# TODO: def test_reset_password_deprecated(self):
# TODO: def test_confirm_registration(self):
# TODO: def test_email_registration(self):
def test_bulk_register(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in bulk_register_form)
# TODO: def test_manage_tokens(self):
# TODO: def test_reset_password(self):
# TODO: def test_request_reset_password(self):
# TODO: def test_email_reset_password(self):
# TODO: def test_retrieve_password(self):
def test_change_password(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
change_password_form = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in change_password_form)
def test_profile(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
profile_form = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in profile_form)
# TODO: def test_run_login_onaccept(self):
# TODO: def test_jwt(self):
# TODO: def test_is_impersonating(self):
def test_impersonate(self):
# Create a user to be impersonated
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
username='omer',
email='omer@test.com',
password='password_omer',
registration_key='',
registration_id=''),
login=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
# Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
group_id = self.auth.add_group('impersonate')
self.auth.add_membership(user_id=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
group_id=group_id)
self.auth.add_permission(group_id=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
# Bart login
# self.auth.login_bare(username='bart', password='bart_password')
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, bart_id)
# self.session.auth = self.auth
# self.assertTrue(self.session.auth)
# basic impersonate() test that return a read form
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
# bart impersonate itself
self.assertEqual(self.auth.impersonate(bart_id), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
# Bart impersonate Omer
omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
impersonate_form = self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, omer_id) # we make it really sure
self.assertEqual(impersonate_form.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-mail: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
# Failing impersonation
# User lacking impersonate membership
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, bart_id)
self.auth.logout_bare()
# Try impersonate a non existing user
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # user with id 1000 shouldn't exist
# Try impersonate user with id = 0 or '0' when bart impersonating omer
self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(user_id=0), None)
# TODO: def test_update_groups(self):
def test_groups(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def test_not_authorized(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def test_allows_jwt(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
# TODO: def test_requires(self):
# def test_login(self):
# Basic testing above in "test_basic_blank_forms()" could be refined here
# TODO: def test_requires_login_or_token(self):
# TODO: def test_requires_membership(self):
# TODO: def test_requires_permission(self):
# TODO: def test_requires_signature(self):
def test_add_group(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def test_del_group(self):
bart_group_id = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(group_id=bart_group_id), None)
def test_id_group(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
# If role don't exist it return None
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def test_user_group(self):
self.assertEqual(self.auth.user_group(user_id=1), 1)
# Bart should be user 1 and it unique group should be 1, 'user_1'
def test_user_group_role(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
user_group_role = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), user_group_role)
self.auth.logout_bare()
# with user_id args
self.assertEqual(self.auth.user_group_role(user_id=1), 'user_1')
# test None
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(user_id=1), None)
def test_has_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(group_id=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', user_id=1))
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
# check that event is logged
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
def test_add_membership(self):
user = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
user_id = user.id
role_name = 'test_add_membership_group'
group_id = self.auth.add_group(role_name)
self.assertFalse(self.auth.has_membership(role_name))
self.auth.add_membership(group_id=group_id, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
self.auth.add_membership(role=role_name, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(group_id=group_id, user_id=None)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(role=role_name, user_id=None)
self.auth.login_user(user)
self.auth.add_membership(group_id=group_id)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
self.auth.add_membership(role=role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# default usage (group_id=role_name)
self.auth.add_membership(role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# re-adding a membership should return the existing membership
record0_id = self.auth.add_membership(group_id)
self.assertTrue(self.auth.has_membership(group_id))
record1_id = self.auth.add_membership(group_id)
self.assertEqual(record0_id, record1_id)
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def test_del_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
user_1_role_id = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), user_1_role_id)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# not logged in test case
group_id = self.auth.add_group('some_test_group')
membership_id = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[group_id], 'some_test_group')
self.auth.logout_bare()
# not deleted
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(membership_id).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
# deleted
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', user_id=bart_id))
self.assertEqual(self.db.auth_membership(membership_id), None) # is really deleted
def test_has_permission(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
# True case
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
# False case
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
def test_add_permission(self):
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# True case
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertTrue(permission_count)
# False case
permission_count = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(permission_count)
# corner case
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
permission_id = \
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
permission_name = \
self.db(self.db.auth_permission.id == permission_id).select(self.db.auth_permission.name).first().name
self.assertEqual(permission_name, 'user_1_permission')
# add an existing permission
permission_id =\
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(permission_id)
def test_del_permission(self):
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# really deleted
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertFalse(permission_count)
# TODO: def test_accessible_query(self):
# TODO: def test_archive(self):
# TODO: def test_wiki(self):
# TODO: def test_wikimenu(self):
# End Auth test
# TODO: class TestCrud(unittest.TestCase):
# It deprecated so far from a priority
# TODO: class TestService(unittest.TestCase):
# TODO: class TestPluginManager(unittest.TestCase):
# TODO: class TestWiki(unittest.TestCase):
# TODO: class TestConfig(unittest.TestCase):
class TestToolsFunctions(unittest.TestCase):
"""
Test suite for all the tools.py functions
"""
def test_prettydate(self):
# plain
now = datetime.datetime.now()
self.assertEqual(prettydate(d=now), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second), '1 second ago')
more_than_one_second = now - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(d=more_than_one_second), '2 seconds ago')
one_minute = now - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(d=one_minute), '1 minute ago')
more_than_one_minute = now - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(d=more_than_one_minute), '1 minute ago')
two_minutes = now - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(d=two_minutes), '2 minutes ago')
more_than_two_minutes = now - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(d=more_than_two_minutes), '2 minutes ago')
one_hour = now - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(d=one_hour), '1 hour ago')
more_than_one_hour = now - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(d=more_than_one_hour), '1 hour ago')
two_hours = now - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(d=two_hours), '2 hours ago')
more_than_two_hours = now - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(d=more_than_two_hours), '2 hours ago')
one_day = now - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
more_than_one_day = now - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=more_than_one_day), '2 days ago')
one_week = now - datetime.timedelta(days=7)
self.assertEqual(prettydate(d=one_week), '1 week ago')
more_than_one_week = now - datetime.timedelta(days=8)
self.assertEqual(prettydate(d=more_than_one_week), '1 week ago')
two_weeks = now - datetime.timedelta(days=14)
self.assertEqual(prettydate(d=two_weeks), '2 weeks ago')
more_than_two_weeks = now - datetime.timedelta(days=15)
self.assertEqual(prettydate(d=more_than_two_weeks), '2 weeks ago')
three_weeks = now - datetime.timedelta(days=21)
self.assertEqual(prettydate(d=three_weeks), '3 weeks ago')
one_month = now - datetime.timedelta(days=27)
self.assertEqual(prettydate(d=one_month), '1 month ago')
more_than_one_month = now - datetime.timedelta(days=28)
self.assertEqual(prettydate(d=more_than_one_month), '1 month ago')
two_months = now - datetime.timedelta(days=60)
self.assertEqual(prettydate(d=two_months), '2 months ago')
three_months = now - datetime.timedelta(days=90)
self.assertEqual(prettydate(d=three_months), '3 months ago')
one_year = now - datetime.timedelta(days=365)
self.assertEqual(prettydate(d=one_year), '1 year ago')
more_than_one_year = now - datetime.timedelta(days=366)
self.assertEqual(prettydate(d=more_than_one_year), '1 year ago')
two_years = now - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(d=two_years), '2 years ago')
more_than_two_years = now - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(d=more_than_two_years), '2 years ago')
# date()
d = now.date()
self.assertEqual(prettydate(d=d), 'now')
one_day = now.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
tow_days = now.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=tow_days), '2 days ago')
# from now
# from now is picky depending of the execution time, so we can't use sharp value like 1 second or 1 day
in_one_minute = now - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(d=in_one_minute), '1 minute from now')
in_twenty_three_hours = now - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(d=in_twenty_three_hours), '23 hours from now')
in_one_year = now - datetime.timedelta(days=-366)
self.assertEqual(prettydate(d=in_one_year), '1 year from now')
# utc=True
now = datetime.datetime.utcnow()
self.assertEqual(prettydate(d=now, utc=True), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second, utc=True), '1 second ago')
# not d or invalid date
self.assertEqual(prettydate(d=None), '')
self.assertEqual(prettydate(d='invalid_date'), '[invalid date]')
pjoin = os.path.join
def have_symlinks():
return os.name == 'posix'
class Test_Expose__in_base(unittest.TestCase):
def test_in_base(self):
are_under = [
# (sub, base)
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, base in are_under:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s is not under %s' % (sub, base))
def test_not_in_base(self):
are_not_under = [
# (sub, base)
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, base in are_not_under:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s should not be under %s' % (sub, base))
class TestExpose(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if have_symlinks():
self.make_symlinks()
# $BASE/
# |-- inside/
# | |-- dir1/
# | | |-- file1
# | | `-- file2
# | |-- dir2/
# | | |-- link_to_dir1/@ -> $BASE/inside/dir1/
# | | `-- link_to_file1@ -> $BASE/inside/dir1/file1
# | |-- link_to_outside/@ -> $BASE/outside/
# | |-- link_to_file3@ -> $BASE/outside/file3
# | `-- README
# `-- outside/
# `-- file3
self.set_expectations()
tools.URL = lambda args: URL(a='a', c='c', f='f', args=args)
def tearDown(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def make_dirs(self):
"""setup directory structure"""
for d in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(pjoin(self.base_dir, *d))
def touch_files(self):
"""create some files"""
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(pjoin(self.base_dir, *f), 'a'):
pass
def make_readme(self):
with open(pjoin(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def make_symlinks(self):
"""setup extension for posix systems"""
# inside links
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1', 'file1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
# outside links
os.symlink(
pjoin(self.base_dir, 'outside'),
pjoin(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
pjoin(self.base_dir, 'outside', 'file3'),
pjoin(self.base_dir, 'inside', 'link_to_file3'))
def set_expectations(self):
url = lambda args: URL('a', 'c', 'f', args=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=url(args=['dir1'])))),
TR(TD(A('dir2', _href=url(args=['dir2'])))),
_class='table',
))
self.expected_folders[pjoin('inside', 'dir1')] = ''
if have_symlinks():
self.expected_folders[pjoin('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=url(args=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[pjoin('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=url(args=['README']))), TD('')),
_class='table',
))
self.expected_files[pjoin('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=url(args=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=url(args=['dir1', 'file2']))), TD('')),
_class='table',
))
if have_symlinks():
self.expected_files[pjoin('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=url(args=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[pjoin('inside', 'dir2')] = ''
def make_expose(self, base, show='', follow_symlink_out=False):
current.request = Request(env={})
current.request.raw_args = show
current.request.args = show.split('/')
return Expose(base=pjoin(self.base_dir, base),
basename=base,
follow_symlink_out=follow_symlink_out)
def test_expose_inside_state(self):
expose = self.make_expose(base='inside', show='')
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
@unittest.skipUnless(have_symlinks(), 'requires symlinks')
def test_expose_inside_state_floow_symlink_out(self):
expose = self.make_expose(base='inside', show='',
follow_symlink_out=True)
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
def test_expose_inside_dir1_state(self):
expose = self.make_expose(base='inside', show='dir1')
self.assertEqual(expose.args, ['dir1'])
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, ['file1', 'file2'])
def test_expose_inside_dir2_state(self):
expose = self.make_expose(base='inside', show='dir2')
self.assertEqual(expose.args, ['dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def test_expose_base_inside_state(self):
expose = self.make_expose(base='', show='inside')
self.assertEqual(expose.args, ['inside'])
if have_symlinks():
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
def test_expose_base_inside_dir2_state(self):
expose = self.make_expose(base='', show='inside/dir2')
self.assertEqual(expose.args, ['inside', 'dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def assertSameXML(self, a, b):
self.assertEqual(a if isinstance(a, str) else a.xml(),
b if isinstance(b, str) else b.xml())
def run_test_xml_for(self, base, show):
expose = self.make_expose(base, show)
path = pjoin(base, show).rstrip(os.path.sep)
request = Request(env={})
self.assertSameXML(expose.table_files(), self.expected_files[path])
self.assertSameXML(expose.table_folders(), self.expected_folders[path])
def test_xml_inside(self):
self.run_test_xml_for(base='inside', show='')
def test_xml_dir1(self):
self.run_test_xml_for(base='inside', show='dir1')
def test_xml_dir2(self):
self.run_test_xml_for(base='inside', show='dir2')
def test_file_not_found(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='dir1/file_not_found')
def test_not_authorized(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='link_to_file3')
| #!/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.tools
"""
import os
import sys
import shutil
import tempfile
import smtplib
import datetime
import unittest
DEFAULT_URI = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import Table
from gluon import tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
IS_IMAP = "imap" in DEFAULT_URI
class TestMail(unittest.TestCase):
"""
Test the Mail class.
"""
class Message(object):
def __init__(self, sender, to, payload):
self.sender = sender
self.to = to
self.payload = payload
self._parsed_payload = None
@property
def parsed_payload(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class DummySMTP(object):
"""
Dummy smtp server
NOTE: Test methods should take care of always leaving inbox and users empty when they finish.
"""
inbox = []
users = {}
def __init__(self, address, port, **kwargs):
self.address = address
self.port = port
self.has_quit = False
self.tls = False
def login(self, username, password):
if username not in self.users or self.users[username] != password:
raise smtplib.SMTPAuthenticationError
self.username = username
self.password = password
def sendmail(self, sender, to, payload):
self.inbox.append(TestMail.Message(sender, to, payload))
def quit(self):
self.has_quit = True
def ehlo(self, hostname=None):
pass
def starttls(self):
self.tls = True
def setUp(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = TestMail.DummySMTP
smtplib.SMTP_SSL = TestMail.DummySMTP
def tearDown(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def test_hello_world(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
message = TestMail.DummySMTP.inbox.pop()
self.assertEqual(message.sender, mail.settings.sender)
self.assertEqual(message.to, ['somebody@example.com'])
header = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(header in message.payload)
self.assertTrue(message.payload.endswith('world'))
def test_failed_login(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertFalse(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
def test_login(self):
TestMail.DummySMTP.users['username'] = 'password'
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
del TestMail.DummySMTP.users['username']
TestMail.DummySMTP.inbox.pop()
def test_html(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='<html><head></head><body></body></html>'))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in message.payload)
def test_alternative(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
message=('Text only', '<html><pre>HTML Only</pre></html>')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue(message.parsed_payload.is_multipart())
self.assertTrue(message.parsed_payload.get_content_type() == 'multipart/alternative')
parts = message.parsed_payload.get_payload()
self.assertTrue('Text only' in parts[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in parts[1].as_string())
def test_ssl(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.ssl = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_tls(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.tls = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_attachment(self):
module_file = os.path.abspath(__file__)
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file)))
message = TestMail.DummySMTP.inbox.pop()
attachment = message.parsed_payload.get_payload(1).get_payload(decode=True)
with open(module_file, 'rb') as mf:
self.assertEqual(to_bytes(attachment), to_bytes(mf.read()))
# Test missing attachment name error
stream = open(module_file)
self.assertRaises(Exception, lambda *args, **kwargs: Mail.Attachment(*args, **kwargs), stream)
stream.close()
# Test you can define content-id and content type
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file, content_id='trololo', content_type='tra/lala')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in message.payload)
self.assertTrue('Content-Id: <trololo>' in message.payload)
# TODO: class TestAuthJWT(unittest.TestCase):
class TestAuthJWT(unittest.TestCase):
def setUp(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.user_data = dict(username='jwtuser', password='jwtuser123')
self.db.auth_user.insert(username=self.user_data['username'],
password=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def test_jwt_token_manager(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def test_allows_jwt(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def optional_auth():
self.assertEqual(self.user_data['username'], self.auth.user.username)
optional_auth()
@unittest.skipIf(IS_IMAP, "TODO: Imap raises 'Connection refused'")
# class TestAuth(unittest.TestCase):
#
# def setUp(self):
# request = Request(env={})
# request.application = 'a'
# request.controller = 'c'
# request.function = 'f'
# request.folder = 'applications/admin'
# response = Response()
# session = Session()
# T = TranslatorFactory('', 'en')
# session.connect(request, response)
# from gluon.globals import current
# current.request = request
# current.response = response
# current.session = session
# current.T = T
# self.db = DAL(DEFAULT_URI, check_reserved=['all'])
# self.auth = Auth(self.db)
# self.auth.define_tables(username=True, signature=False)
# self.db.define_table('t0', Field('tt'), self.auth.signature)
# self.auth.enable_record_versioning(self.db)
# # Create a user
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# registration_key='bart',
# registration_id=''
# ))
# # self.auth.settings.registration_requires_verification = False
# # self.auth.settings.registration_requires_approval = False
#
# def test_assert_setup(self):
# self.assertEqual(self.db(self.db.auth_user.username == 'bart').select().first()['username'], 'bart')
# self.assertTrue('auth_user' in self.db)
# self.assertTrue('auth_group' in self.db)
# self.assertTrue('auth_membership' in self.db)
# self.assertTrue('auth_permission' in self.db)
# self.assertTrue('auth_event' in self.db)
#
# def test_enable_record_versioning(self):
# self.assertTrue('t0_archive' in self.db)
#
# def test_basic_blank_forms(self):
# for f in ['login', 'retrieve_password',
# 'retrieve_username',
# # 'register' # register complain about : client_side=self.settings.client_side
# ]:
# html_form = getattr(self.auth, f)().xml()
# self.assertTrue('name="_formkey"' in html_form)
#
# # NOTE: Not sure it is the proper way to logout_bare() as there is not methods for that and auth.logout() failed
# self.auth.logout_bare()
# # self.assertTrue(self.auth.is_logged_in())
#
# for f in ['logout', 'verify_email', 'reset_password',
# 'change_password', 'profile', 'groups']:
# self.assertRaisesRegexp(HTTP, "303*", getattr(self.auth, f))
#
# self.assertRaisesRegexp(HTTP, "401*", self.auth.impersonate)
#
# try:
# for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
# 'auth_membership', 'auth_permission', 'auth_group',
# 'auth_user']:
# self.db[t].drop()
# except SyntaxError as e:
# # GAE doesn't support drop
# pass
# return
#
# def test_get_or_create_user(self):
# self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
# self.db.commit()
# # True case
# self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
# 'username': 'user1',
# 'password': 'password_123'
# })['username'], 'user1')
# # user2 doesn't exist yet and get created
# self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
# 'username': 'user2'})['username'], 'user2')
# # user3 for corner case
# self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
# 'last_name': 'Simpson',
# 'email': 'user3@test.com',
# 'registration_id': 'user3',
# 'username': 'user3'})['username'], 'user3')
# # False case
# self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_login_bare(self):
# # The following test case should succeed but failed as I never received the user record but False
# self.auth.login_bare(username='bart@simpson.com', password='bart_password')
# self.assertTrue(self.auth.is_logged_in())
# # Failing login because bad_password
# self.assertEqual(self.auth.login_bare(username='bart', password='wrong_password'), False)
# self.db.auth_user.truncate()
#
# def test_register_bare(self):
# # corner case empty register call register_bare without args
# self.assertRaises(ValueError, self.auth.register_bare)
# # failing register_bare user already exist
# self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# # successful register_bare
# self.assertEqual(self.auth.register_bare(username='user2',
# email='user2@test.com',
# password='password_123')['username'], 'user2')
# # raise ValueError
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(wrong_field_name='user3', password='password_123'))
# # raise ValueError wrong email
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(email='user4@', password='password_123'))
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_bulk_register(self):
# self.auth.login_bare(username='bart', password='bart_password')
# self.auth.settings.bulk_register_enabled = True
# bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
# self.assertTrue('name="_formkey"' in bulk_register_form)
#
# def test_change_password(self):
# self.auth.login_bare(username='bart', password='bart_password')
# change_password_form = getattr(self.auth, 'change_password')().xml()
# self.assertTrue('name="_formkey"' in change_password_form)
#
# def test_profile(self):
# self.auth.login_bare(username='bart', password='bart_password')
# profile_form = getattr(self.auth, 'profile')().xml()
# self.assertTrue('name="_formkey"' in profile_form)
#
# # def test_impersonate(self):
# # # Create a user to be impersonated
# # self.auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # self.auth.add_group('impersonate')
# # self.auth.add_membership(user_id=1,
# # group_id=self.db(self.db.auth_user.username == 'bart'
# # ).select(self.db.auth_user.id).first().id)
# # self.auth.add_permission(group_id=self.db(self.db.auth_group.role == 'impersonate'
# # ).select(self.db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # self.auth.login_bare(username='bart', password='bart_password')
# # self.assertTrue(self.auth.is_logged_in())
# # # Bart impersonate Omer
# # omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
# # impersonate_form = self.auth.impersonate(user_id=omer_id)
# # self.assertTrue(self.auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
#
# # def test_impersonate(self):
# # request = Request(env={})
# # request.application = 'a'
# # request.controller = 'c'
# # request.function = 'f'
# # request.folder = 'applications/admin'
# # response = Response()
# # session = Session()
# # T = TranslatorFactory('', 'en')
# # session.connect(request, response)
# # from gluon.globals import current
# # current.request = request
# # current.response = response
# # current.session = session
# # current.T = T
# # db = DAL(DEFAULT_URI, check_reserved=['all'])
# # auth = Auth(db)
# # auth.define_tables(username=True, signature=False)
# # db.define_table('t0', Field('tt'), auth.signature)
# # auth.enable_record_versioning(db)
# # # Create a user
# # auth.get_or_create_user(dict(first_name='Bart',
# # last_name='Simpson',
# # username='bart',
# # email='bart@simpson.com',
# # password='bart_password',
# # registration_key='bart',
# # registration_id=''
# # ))
# # # Create a user to be impersonated
# # auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # auth.add_group('impersonate')
# # auth.add_membership(user_id=1,
# # group_id=db(db.auth_user.username == 'bart'
# # ).select(db.auth_user.id).first().id)
# # auth.add_permission(group_id=db(db.auth_group.role == 'impersonate'
# # ).select(db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # auth.login_bare(username='bart', password='bart_password')
# # # Bart impersonate Omer
# # omer_id = db(db.auth_user.username == 'omer').select(db.auth_user.id).first().id
# # impersonate_form = auth.impersonate(user_id=omer_id)
# # self.assertTrue(auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
class TestAuth(unittest.TestCase):
def myassertRaisesRegex(self, *args, **kwargs):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*args, **kwargs)
return getattr(self, 'assertRaisesRegex')(*args, **kwargs)
def setUp(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
T = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = T
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
# Create a user
# Note: get_or_create_user() doesn't seems to create user properly it better to use register_bare() and
# prevent login_bare() test from succeed. db insert the user manually not properly work either.
# Not working
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# # registration_key=None,
# #registration_id='bart@simpson.com'
# ),
# login=False)
# Not working
# self.db.auth_user.insert(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password')
# self.db.commit()
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
username='bart',
email='bart@simpson.com',
password='bart_password')
def test_assert_setup(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
bart_record = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(bart_record['username'], 'bart')
self.assertEqual(bart_record['registration_key'], '')
bart_id = self.db(self.db.auth_user.username == 'bart').select().first().id
bart_group_id = self.db(self.db.auth_group.role == 'user_{0}'.format(bart_id)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == bart_group_id) &
(self.db.auth_membership.user_id == bart_id)).select().first())
# Just calling many form functions
def test_basic_blank_forms(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
html_form = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in html_form)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
# GAE doesn't support drop
pass
return
def test_get_vars_next(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
# TODO: def test_navbar(self):
# TODO: def test___get_migrate(self):
def test_enable_record_versioning(self):
self.assertTrue('t0_archive' in self.db)
# TODO: def test_define_signature(self):
# TODO: def test_define_signature(self):
# TODO: def test_define_table(self):
def test_log_event(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
# user logged in
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': bart_id,
'description': 'some_log_event_description_var1'}.items()))
# user not logged
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
# no logging tests
self.auth.settings.logging_enabled = False
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
self.auth.settings.logging_enabled = True
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# TODO: Corner case translated description...
def test_get_or_create_user(self):
self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
self.db.commit()
# True case
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
# user2 doesn't exist yet and get created
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
# user3 for corner case
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
# False case
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_basic(self):
# TODO: def test_login_user(self):
# TODO: def test__get_login_settings(self):
def test_login_bare(self):
self.auth.login_bare(username='bart', password='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
# Failing login because wrong_password
self.assertFalse(self.auth.login_bare(username='bart', password='wrong_password'))
# NOTE : The following failed for some reason, but I can't find out why
# self.auth = Auth(self.db)
# self.auth.define_tables(username=False, signature=False)
# self.auth.settings.registration_requires_verification = False
# self.auth.settings.registration_requires_approval = False
# self.auth.register_bare(first_name='Omer',
# last_name='Simpson',
# # no username field passed, failed with :
# # ValueError('register_bare: userfield not provided or invalid')
# # Or
# # username='omer',
# # Or
# # username='omer@simpson.com',
# # In either previous cases, it failed with :
# # self.assertTrue(self.auth.is_logged_in()) AssertionError: False is not true
# email='omer@simpson.com',
# password='omer_password')
# self.auth.login_bare(username='omer@sympson.com', password='omer_password')
# self.assertTrue(self.auth.is_logged_in())
def test_register_bare(self):
# corner case empty register call register_bare without args
self.assertRaises(ValueError, self.auth.register_bare)
# failing register_bare user already exist
self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# successful register_bare
self.assertEqual(self.auth.register_bare(username='user2',
email='user2@test.com',
password='password_123')['username'], 'user2')
# raise ValueError
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', password='password_123'))
# raise ValueError wrong email
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', password='password_123'))
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_cas_login(self):
# TODO: def test_cas_validate(self):
# TODO: def test__reset_two_factor_auth(self):
# TODO: def test_when_is_logged_in_bypass_next_in_url(self):
# TODO: def test_login(self):
# TODO: def test_logout(self):
def test_logout_bare(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_register(self):
def test_is_logged_in(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_verify_email(self):
# TODO: def test_retrieve_username(self):
def test_random_password(self):
# let just check that the function is callable
self.assertTrue(self.auth.random_password())
# TODO: def test_reset_password_deprecated(self):
# TODO: def test_confirm_registration(self):
# TODO: def test_email_registration(self):
def test_bulk_register(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in bulk_register_form)
# TODO: def test_manage_tokens(self):
# TODO: def test_reset_password(self):
# TODO: def test_request_reset_password(self):
# TODO: def test_email_reset_password(self):
# TODO: def test_retrieve_password(self):
def test_change_password(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
change_password_form = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in change_password_form)
def test_profile(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
profile_form = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in profile_form)
# TODO: def test_run_login_onaccept(self):
# TODO: def test_jwt(self):
# TODO: def test_is_impersonating(self):
def test_impersonate(self):
# Create a user to be impersonated
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
username='omer',
email='omer@test.com',
password='password_omer',
registration_key='',
registration_id=''),
login=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
# Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
group_id = self.auth.add_group('impersonate')
self.auth.add_membership(user_id=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
group_id=group_id)
self.auth.add_permission(group_id=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
# Bart login
# self.auth.login_bare(username='bart', password='bart_password')
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, bart_id)
# self.session.auth = self.auth
# self.assertTrue(self.session.auth)
# basic impersonate() test that return a read form
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
# bart impersonate itself
self.assertEqual(self.auth.impersonate(bart_id), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
# Bart impersonate Omer
omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
impersonate_form = self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, omer_id) # we make it really sure
self.assertEqual(impersonate_form.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-mail: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
# Failing impersonation
# User lacking impersonate membership
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, bart_id)
self.auth.logout_bare()
# Try impersonate a non existing user
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # user with id 1000 shouldn't exist
# Try impersonate user with id = 0 or '0' when bart impersonating omer
self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(user_id=0), None)
# TODO: def test_update_groups(self):
def test_groups(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def test_not_authorized(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def test_allows_jwt(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
# TODO: def test_requires(self):
# def test_login(self):
# Basic testing above in "test_basic_blank_forms()" could be refined here
# TODO: def test_requires_login_or_token(self):
# TODO: def test_requires_membership(self):
# TODO: def test_requires_permission(self):
# TODO: def test_requires_signature(self):
def test_add_group(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def test_del_group(self):
bart_group_id = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(group_id=bart_group_id), None)
def test_id_group(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
# If role don't exist it return None
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def test_user_group(self):
self.assertEqual(self.auth.user_group(user_id=1), 1)
# Bart should be user 1 and it unique group should be 1, 'user_1'
def test_user_group_role(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
user_group_role = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), user_group_role)
self.auth.logout_bare()
# with user_id args
self.assertEqual(self.auth.user_group_role(user_id=1), 'user_1')
# test None
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(user_id=1), None)
def test_has_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(group_id=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', user_id=1))
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
# check that event is logged
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
def test_add_membership(self):
user = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
user_id = user.id
role_name = 'test_add_membership_group'
group_id = self.auth.add_group(role_name)
self.assertFalse(self.auth.has_membership(role_name))
self.auth.add_membership(group_id=group_id, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
self.auth.add_membership(role=role_name, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(group_id=group_id, user_id=None)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(role=role_name, user_id=None)
self.auth.login_user(user)
self.auth.add_membership(group_id=group_id)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
self.auth.add_membership(role=role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# default usage (group_id=role_name)
self.auth.add_membership(role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# re-adding a membership should return the existing membership
record0_id = self.auth.add_membership(group_id)
self.assertTrue(self.auth.has_membership(group_id))
record1_id = self.auth.add_membership(group_id)
self.assertEqual(record0_id, record1_id)
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def test_del_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
user_1_role_id = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), user_1_role_id)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# not logged in test case
group_id = self.auth.add_group('some_test_group')
membership_id = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[group_id], 'some_test_group')
self.auth.logout_bare()
# not deleted
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(membership_id).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
# deleted
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', user_id=bart_id))
self.assertEqual(self.db.auth_membership(membership_id), None) # is really deleted
def test_has_permission(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
# True case
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
# False case
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
def test_add_permission(self):
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# True case
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertTrue(permission_count)
# False case
permission_count = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(permission_count)
# corner case
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
permission_id = \
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
permission_name = \
self.db(self.db.auth_permission.id == permission_id).select(self.db.auth_permission.name).first().name
self.assertEqual(permission_name, 'user_1_permission')
# add an existing permission
permission_id =\
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(permission_id)
def test_del_permission(self):
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# really deleted
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertFalse(permission_count)
# TODO: def test_accessible_query(self):
# TODO: def test_archive(self):
# TODO: def test_wiki(self):
# TODO: def test_wikimenu(self):
# End Auth test
# TODO: class TestCrud(unittest.TestCase):
# It deprecated so far from a priority
# TODO: class TestService(unittest.TestCase):
# TODO: class TestPluginManager(unittest.TestCase):
# TODO: class TestWiki(unittest.TestCase):
# TODO: class TestConfig(unittest.TestCase):
class TestToolsFunctions(unittest.TestCase):
"""
Test suite for all the tools.py functions
"""
def test_prettydate(self):
# plain
now = datetime.datetime.now()
self.assertEqual(prettydate(d=now), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second), '1 second ago')
more_than_one_second = now - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(d=more_than_one_second), '2 seconds ago')
one_minute = now - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(d=one_minute), '1 minute ago')
more_than_one_minute = now - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(d=more_than_one_minute), '1 minute ago')
two_minutes = now - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(d=two_minutes), '2 minutes ago')
more_than_two_minutes = now - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(d=more_than_two_minutes), '2 minutes ago')
one_hour = now - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(d=one_hour), '1 hour ago')
more_than_one_hour = now - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(d=more_than_one_hour), '1 hour ago')
two_hours = now - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(d=two_hours), '2 hours ago')
more_than_two_hours = now - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(d=more_than_two_hours), '2 hours ago')
one_day = now - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
more_than_one_day = now - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=more_than_one_day), '2 days ago')
one_week = now - datetime.timedelta(days=7)
self.assertEqual(prettydate(d=one_week), '1 week ago')
more_than_one_week = now - datetime.timedelta(days=8)
self.assertEqual(prettydate(d=more_than_one_week), '1 week ago')
two_weeks = now - datetime.timedelta(days=14)
self.assertEqual(prettydate(d=two_weeks), '2 weeks ago')
more_than_two_weeks = now - datetime.timedelta(days=15)
self.assertEqual(prettydate(d=more_than_two_weeks), '2 weeks ago')
three_weeks = now - datetime.timedelta(days=21)
self.assertEqual(prettydate(d=three_weeks), '3 weeks ago')
one_month = now - datetime.timedelta(days=27)
self.assertEqual(prettydate(d=one_month), '1 month ago')
more_than_one_month = now - datetime.timedelta(days=28)
self.assertEqual(prettydate(d=more_than_one_month), '1 month ago')
two_months = now - datetime.timedelta(days=60)
self.assertEqual(prettydate(d=two_months), '2 months ago')
three_months = now - datetime.timedelta(days=90)
self.assertEqual(prettydate(d=three_months), '3 months ago')
one_year = now - datetime.timedelta(days=365)
self.assertEqual(prettydate(d=one_year), '1 year ago')
more_than_one_year = now - datetime.timedelta(days=366)
self.assertEqual(prettydate(d=more_than_one_year), '1 year ago')
two_years = now - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(d=two_years), '2 years ago')
more_than_two_years = now - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(d=more_than_two_years), '2 years ago')
# date()
d = now.date()
self.assertEqual(prettydate(d=d), 'now')
one_day = now.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
tow_days = now.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=tow_days), '2 days ago')
# from now
# from now is picky depending of the execution time, so we can't use sharp value like 1 second or 1 day
in_one_minute = now - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(d=in_one_minute), '1 minute from now')
in_twenty_three_hours = now - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(d=in_twenty_three_hours), '23 hours from now')
in_one_year = now - datetime.timedelta(days=-366)
self.assertEqual(prettydate(d=in_one_year), '1 year from now')
# utc=True
now = datetime.datetime.utcnow()
self.assertEqual(prettydate(d=now, utc=True), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second, utc=True), '1 second ago')
# not d or invalid date
self.assertEqual(prettydate(d=None), '')
self.assertEqual(prettydate(d='invalid_date'), '[invalid date]')
pjoin = os.path.join
def have_symlinks():
return os.name == 'posix'
class Test_Expose__in_base(unittest.TestCase):
def test_in_base(self):
are_under = [
# (sub, base)
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, base in are_under:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s is not under %s' % (sub, base))
def test_not_in_base(self):
are_not_under = [
# (sub, base)
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, base in are_not_under:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s should not be under %s' % (sub, base))
class TestExpose(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if have_symlinks():
self.make_symlinks()
# $BASE/
# |-- inside/
# | |-- dir1/
# | | |-- file1
# | | `-- file2
# | |-- dir2/
# | | |-- link_to_dir1/@ -> $BASE/inside/dir1/
# | | `-- link_to_file1@ -> $BASE/inside/dir1/file1
# | |-- link_to_outside/@ -> $BASE/outside/
# | |-- link_to_file3@ -> $BASE/outside/file3
# | `-- README
# `-- outside/
# `-- file3
self.set_expectations()
tools.URL = lambda args: URL(a='a', c='c', f='f', args=args)
def tearDown(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def make_dirs(self):
"""setup directory structure"""
for d in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(pjoin(self.base_dir, *d))
def touch_files(self):
"""create some files"""
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(pjoin(self.base_dir, *f), 'a'):
pass
def make_readme(self):
with open(pjoin(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def make_symlinks(self):
"""setup extension for posix systems"""
# inside links
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1', 'file1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
# outside links
os.symlink(
pjoin(self.base_dir, 'outside'),
pjoin(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
pjoin(self.base_dir, 'outside', 'file3'),
pjoin(self.base_dir, 'inside', 'link_to_file3'))
def set_expectations(self):
url = lambda args: URL('a', 'c', 'f', args=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=url(args=['dir1'])))),
TR(TD(A('dir2', _href=url(args=['dir2'])))),
_class='table',
))
self.expected_folders[pjoin('inside', 'dir1')] = ''
if have_symlinks():
self.expected_folders[pjoin('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=url(args=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[pjoin('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=url(args=['README']))), TD('')),
_class='table',
))
self.expected_files[pjoin('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=url(args=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=url(args=['dir1', 'file2']))), TD('')),
_class='table',
))
if have_symlinks():
self.expected_files[pjoin('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=url(args=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[pjoin('inside', 'dir2')] = ''
def make_expose(self, base, show='', follow_symlink_out=False):
current.request = Request(env={})
current.request.raw_args = show
current.request.args = show.split('/')
return Expose(base=pjoin(self.base_dir, base),
basename=base,
follow_symlink_out=follow_symlink_out)
def test_expose_inside_state(self):
expose = self.make_expose(base='inside', show='')
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
@unittest.skipUnless(have_symlinks(), 'requires symlinks')
def test_expose_inside_state_floow_symlink_out(self):
expose = self.make_expose(base='inside', show='',
follow_symlink_out=True)
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
def test_expose_inside_dir1_state(self):
expose = self.make_expose(base='inside', show='dir1')
self.assertEqual(expose.args, ['dir1'])
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, ['file1', 'file2'])
def test_expose_inside_dir2_state(self):
expose = self.make_expose(base='inside', show='dir2')
self.assertEqual(expose.args, ['dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def test_expose_base_inside_state(self):
expose = self.make_expose(base='', show='inside')
self.assertEqual(expose.args, ['inside'])
if have_symlinks():
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
def test_expose_base_inside_dir2_state(self):
expose = self.make_expose(base='', show='inside/dir2')
self.assertEqual(expose.args, ['inside', 'dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def assertSameXML(self, a, b):
self.assertEqual(a if isinstance(a, str) else a.xml(),
b if isinstance(b, str) else b.xml())
def run_test_xml_for(self, base, show):
expose = self.make_expose(base, show)
path = pjoin(base, show).rstrip(os.path.sep)
request = Request(env={})
self.assertSameXML(expose.table_files(), self.expected_files[path])
self.assertSameXML(expose.table_folders(), self.expected_folders[path])
def test_xml_inside(self):
self.run_test_xml_for(base='inside', show='')
def test_xml_dir1(self):
self.run_test_xml_for(base='inside', show='dir1')
def test_xml_dir2(self):
self.run_test_xml_for(base='inside', show='dir2')
def test_file_not_found(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='dir1/file_not_found')
def test_not_authorized(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='link_to_file3')
class Test_OpenRedirectPrevention(unittest.TestCase):
def test_open_redirect(self):
bad_urls = [
"/",
"//",
"~/",
"//example.com",
"/\example.com"
"~/example.com"
"//example.com/a/b/c",
"//example.com/a/b/c",
"~/example.com/a/b/c"
]
good_urls = [
"a/b/c",
"/a",
"/a/b",
"/a/b/c",
]
prefixes = ["", ":", "http:", "https:", "ftp:"]
for prefix in prefixes:
for url in bad_urls:
self.assertEqual(prevent_open_redirect(prefix + url), None)
for prefix in prefixes:
for url in good_urls:
self.assertEqual(prevent_open_redirect(prefix + url), prefix + url)
| open_redirect | {
"code": [
"from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose"
],
"line_no": [
20
]
} | {
"code": [
"from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect",
"class Test_OpenRedirectPrevention(unittest.TestCase):",
" def test_open_redirect(self):",
" bad_urls = [",
" \"/\",",
" \"//\",",
" \"~/\",",
" \"//example.com\",",
" \"/\\example.com\"",
" \"~/example.com\"",
" \"//example.com/a/b/c\",",
" \"//example.com/a/b/c\",",
" \"~/example.com/a/b/c\"",
" ]",
" good_urls = [",
" \"a/b/c\",",
" \"/a\",",
" \"/a/b\",",
" \"/a/b/c\",",
" ]",
" prefixes = [\"\", \":\", \"http:\", \"https:\", \"ftp:\"]",
" for prefix in prefixes:",
" for url in bad_urls:",
" self.assertEqual(prevent_open_redirect(prefix + url), None)",
" for prefix in prefixes:",
" for url in good_urls:",
" self.assertEqual(prevent_open_redirect(prefix + url), prefix + url)"
],
"line_no": [
20,
1385,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1401,
1402,
1403,
1404,
1405,
1406,
1407,
1408,
1409,
1410,
1411
]
} |
import os
import sys
import shutil
import tempfile
import smtplib
import .datetime
import unittest
VAR_0 = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import .Table
from gluon import .tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import .TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
VAR_1 = "imap" in VAR_0
class VAR_23(unittest.TestCase):
class CLASS_6(object):
def __init__(self, VAR_10, VAR_11, VAR_12):
self.sender = VAR_10
self.to = VAR_11
self.payload = VAR_12
self._parsed_payload = None
@property
def FUNC_63(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class CLASS_7(object):
VAR_13 = []
VAR_14 = {}
def __init__(self, VAR_15, VAR_16, **VAR_4):
self.address = VAR_15
self.port = VAR_16
self.has_quit = False
self.tls = False
def FUNC_64(self, VAR_17, VAR_18):
if VAR_17 not in self.users or self.users[VAR_17] != VAR_18:
raise smtplib.SMTPAuthenticationError
self.username = VAR_17
self.password = VAR_18
def FUNC_65(self, VAR_10, VAR_11, VAR_12):
self.inbox.append(VAR_23.Message(VAR_10, VAR_11, VAR_12))
def FUNC_66(self):
self.has_quit = True
def FUNC_67(self, VAR_19=None):
pass
def FUNC_68(self):
self.tls = True
def FUNC_1(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = VAR_23.DummySMTP
smtplib.SMTP_SSL = VAR_23.DummySMTP
def FUNC_2(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def FUNC_3(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertEqual(VAR_21.sender, VAR_20.settings.sender)
self.assertEqual(VAR_21.to, ['somebody@example.com'])
VAR_22 = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(VAR_22 in VAR_21.payload)
self.assertTrue(VAR_21.payload.endswith('world'))
def FUNC_4(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertFalse(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
def FUNC_5(self):
VAR_23.DummySMTP.users['username'] = 'password'
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
del VAR_23.DummySMTP.users['username']
VAR_23.DummySMTP.inbox.pop()
def FUNC_6(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='<html><head></head><body></body></html>'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in VAR_21.payload)
def FUNC_7(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
VAR_21=('Text only', '<html><pre>HTML Only</pre></html>')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue(VAR_21.parsed_payload.is_multipart())
self.assertTrue(VAR_21.parsed_payload.get_content_type() == 'multipart/alternative')
VAR_24 = VAR_21.parsed_payload.get_payload()
self.assertTrue('Text only' in VAR_24[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in VAR_24[1].as_string())
def FUNC_8(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.ssl = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_9(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.tls = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_10(self):
VAR_25 = os.path.abspath(__file__)
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25)))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
VAR_26 = VAR_21.parsed_payload.get_payload(1).get_payload(decode=True)
with open(VAR_25, 'rb') as mf:
self.assertEqual(to_bytes(VAR_26), to_bytes(mf.read()))
VAR_27 = open(VAR_25)
self.assertRaises(Exception, lambda *VAR_3, **VAR_4: Mail.Attachment(*VAR_3, **VAR_4), VAR_27)
stream.close()
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25, content_id='trololo', content_type='tra/lala')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in VAR_21.payload)
self.assertTrue('Content-Id: <trololo>' in VAR_21.payload)
class CLASS_1(unittest.TestCase):
def FUNC_1(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.user_data = dict(VAR_17='jwtuser', VAR_18='jwtuser123')
self.db.auth_user.insert(VAR_17=self.user_data['username'],
VAR_18=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def FUNC_11(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def FUNC_12(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def FUNC_69():
self.assertEqual(self.user_data['username'], self.auth.user.username)
FUNC_69()
@unittest.skipIf(VAR_1, "TODO: Imap raises 'Connection refused'")
class CLASS_2(unittest.TestCase):
def FUNC_13(self, *VAR_3, **VAR_4):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*VAR_3, **VAR_4)
return getattr(self, 'assertRaisesRegex')(*VAR_3, **VAR_4)
def FUNC_1(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
VAR_28 = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = VAR_28
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
VAR_17='bart',
email='bart@simpson.com',
VAR_18='bart_password')
def FUNC_14(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
VAR_29 = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(VAR_29['username'], 'bart')
self.assertEqual(VAR_29['registration_key'], '')
VAR_30 = self.db(self.db.auth_user.username == 'bart').select().first().id
VAR_31 = self.db(self.db.auth_group.role == 'user_{0}'.format(VAR_30)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == VAR_31) &
(self.db.auth_membership.user_id == VAR_30)).select().first())
def FUNC_15(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
VAR_89 = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in VAR_89)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
pass
return
def FUNC_16(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
def FUNC_17(self):
self.assertTrue('t0_archive' in self.db)
def FUNC_18(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': VAR_30,
'description': 'some_log_event_description_var1'}.items()))
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
self.auth.settings.logging_enabled = False
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
self.auth.settings.logging_enabled = True
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_19(self):
self.db.auth_user.insert(email='user1@test.com', VAR_17='user1', VAR_18='password_123')
self.db.commit()
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
def FUNC_20(self):
self.auth.login_bare(VAR_17='bart', VAR_18='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.login_bare(VAR_17='bart', VAR_18='wrong_password'))
def FUNC_21(self):
self.assertRaises(ValueError, self.auth.register_bare)
self.assertEqual(self.auth.register_bare(VAR_17='bart', VAR_18='wrong_password'), False)
self.assertEqual(self.auth.register_bare(VAR_17='user2',
email='user2@test.com',
VAR_18='password_123')['username'], 'user2')
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', VAR_18='password_123'))
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', VAR_18='password_123'))
self.db.auth_user.truncate()
self.db.commit()
def FUNC_22(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
def FUNC_23(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
def FUNC_24(self):
self.assertTrue(self.auth.random_password())
def FUNC_25(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
VAR_35 = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in VAR_35)
def FUNC_26(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_36 = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in VAR_36)
def FUNC_27(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_37 = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in VAR_37)
def FUNC_28(self):
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
VAR_17='omer',
email='omer@test.com',
VAR_18='password_omer',
registration_key='',
registration_id=''),
FUNC_64=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
VAR_38 = self.auth.add_group('impersonate')
self.auth.add_membership(VAR_43=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
VAR_38=group_id)
self.auth.add_permission(VAR_38=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, VAR_30)
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
self.assertEqual(self.auth.impersonate(VAR_30), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
VAR_39 = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
VAR_40 = self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, VAR_39) # we make it really sure
self.assertEqual(VAR_40.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-VAR_20: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, VAR_30)
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # VAR_42 with id 1000 shouldn't exist
self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(VAR_43=0), None)
def FUNC_29(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def FUNC_30(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def FUNC_12(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
def FUNC_31(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def FUNC_32(self):
VAR_31 = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(VAR_38=VAR_31), None)
def FUNC_33(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def FUNC_34(self):
self.assertEqual(self.auth.user_group(VAR_43=1), 1)
def FUNC_35(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_41 = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), VAR_41)
self.auth.logout_bare()
self.assertEqual(self.auth.user_group_role(VAR_43=1), 'user_1')
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(VAR_43=1), None)
def FUNC_36(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(VAR_38=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', VAR_43=1))
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_37(self):
VAR_42 = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
VAR_43 = VAR_42.id
VAR_44 = 'test_add_membership_group'
VAR_38 = self.auth.add_group(VAR_44)
self.assertFalse(self.auth.has_membership(VAR_44))
self.auth.add_membership(VAR_38=group_id, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.add_membership(role=VAR_44, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(VAR_38=group_id, VAR_43=None)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(role=VAR_44, VAR_43=None)
self.auth.login_user(VAR_42)
self.auth.add_membership(VAR_38=group_id)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(role=VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
VAR_45 = self.auth.add_membership(VAR_38)
self.assertTrue(self.auth.has_membership(VAR_38))
VAR_46 = self.auth.add_membership(VAR_38)
self.assertEqual(VAR_45, VAR_46)
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def FUNC_38(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_47 = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), VAR_47)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_38 = self.auth.add_group('some_test_group')
VAR_48 = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[VAR_38], 'some_test_group')
self.auth.logout_bare()
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(VAR_48).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', VAR_43=VAR_30))
self.assertEqual(self.db.auth_membership(VAR_48), None) # is really deleted
def FUNC_39(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
def FUNC_40(self):
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertTrue(VAR_50)
VAR_50 = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(VAR_50)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_49 = \
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
VAR_51 = \
self.db(self.db.auth_permission.id == VAR_49).select(self.db.auth_permission.name).first().name
self.assertEqual(VAR_51, 'user_1_permission')
VAR_49 =\
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(VAR_49)
def FUNC_41(self):
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertFalse(VAR_50)
class CLASS_3(unittest.TestCase):
def FUNC_42(self):
VAR_52 = datetime.datetime.now()
self.assertEqual(prettydate(VAR_78=VAR_52), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53), '1 second ago')
VAR_54 = VAR_52 - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(VAR_78=VAR_54), '2 seconds ago')
VAR_55 = VAR_52 - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(VAR_78=VAR_55), '1 minute ago')
VAR_56 = VAR_52 - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(VAR_78=VAR_56), '1 minute ago')
VAR_57 = VAR_52 - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(VAR_78=VAR_57), '2 minutes ago')
VAR_58 = VAR_52 - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(VAR_78=VAR_58), '2 minutes ago')
VAR_59 = VAR_52 - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_59), '1 hour ago')
VAR_60 = VAR_52 - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(VAR_78=VAR_60), '1 hour ago')
VAR_61 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_61), '2 hours ago')
VAR_62 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(VAR_78=VAR_62), '2 hours ago')
VAR_63 = VAR_52 - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_64 = VAR_52 - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_64), '2 days ago')
VAR_65 = VAR_52 - datetime.timedelta(days=7)
self.assertEqual(prettydate(VAR_78=VAR_65), '1 week ago')
VAR_66 = VAR_52 - datetime.timedelta(days=8)
self.assertEqual(prettydate(VAR_78=VAR_66), '1 week ago')
VAR_67 = VAR_52 - datetime.timedelta(days=14)
self.assertEqual(prettydate(VAR_78=VAR_67), '2 weeks ago')
VAR_68 = VAR_52 - datetime.timedelta(days=15)
self.assertEqual(prettydate(VAR_78=VAR_68), '2 weeks ago')
VAR_69 = VAR_52 - datetime.timedelta(days=21)
self.assertEqual(prettydate(VAR_78=VAR_69), '3 weeks ago')
VAR_70 = VAR_52 - datetime.timedelta(days=27)
self.assertEqual(prettydate(VAR_78=VAR_70), '1 month ago')
VAR_71 = VAR_52 - datetime.timedelta(days=28)
self.assertEqual(prettydate(VAR_78=VAR_71), '1 month ago')
VAR_72 = VAR_52 - datetime.timedelta(days=60)
self.assertEqual(prettydate(VAR_78=VAR_72), '2 months ago')
VAR_73 = VAR_52 - datetime.timedelta(days=90)
self.assertEqual(prettydate(VAR_78=VAR_73), '3 months ago')
VAR_74 = VAR_52 - datetime.timedelta(days=365)
self.assertEqual(prettydate(VAR_78=VAR_74), '1 year ago')
VAR_75 = VAR_52 - datetime.timedelta(days=366)
self.assertEqual(prettydate(VAR_78=VAR_75), '1 year ago')
VAR_76 = VAR_52 - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(VAR_78=VAR_76), '2 years ago')
VAR_77 = VAR_52 - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(VAR_78=VAR_77), '2 years ago')
VAR_78 = VAR_52.date()
self.assertEqual(prettydate(VAR_78=d), 'now')
VAR_63 = VAR_52.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_79 = VAR_52.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_79), '2 days ago')
VAR_80 = VAR_52 - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(VAR_78=VAR_80), '1 minute from now')
VAR_81 = VAR_52 - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(VAR_78=VAR_81), '23 hours from now')
VAR_82 = VAR_52 - datetime.timedelta(days=-366)
self.assertEqual(prettydate(VAR_78=VAR_82), '1 year from now')
VAR_52 = datetime.datetime.utcnow()
self.assertEqual(prettydate(VAR_78=VAR_52, utc=True), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53, utc=True), '1 second ago')
self.assertEqual(prettydate(VAR_78=None), '')
self.assertEqual(prettydate(VAR_78='invalid_date'), '[invalid date]')
VAR_2 = os.path.join
def FUNC_0():
return os.name == 'posix'
class CLASS_4(unittest.TestCase):
def FUNC_43(self):
VAR_83 = [
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, VAR_5 in VAR_83:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s is not under %s' % (sub, VAR_5))
def FUNC_44(self):
VAR_84 = [
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, VAR_5 in VAR_84:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s should not be under %s' % (sub, VAR_5))
class CLASS_5(unittest.TestCase):
def FUNC_1(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if FUNC_0():
self.make_symlinks()
self.set_expectations()
tools.URL = lambda VAR_3: URL(VAR_8='a', c='c', f='f', VAR_3=args)
def FUNC_2(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def FUNC_45(self):
for VAR_78 in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(VAR_2(self.base_dir, *VAR_78))
def FUNC_46(self):
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(VAR_2(self.base_dir, *f), 'a'):
pass
def FUNC_47(self):
with open(VAR_2(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def FUNC_48(self):
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1', 'file1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
os.symlink(
VAR_2(self.base_dir, 'outside'),
VAR_2(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
VAR_2(self.base_dir, 'outside', 'file3'),
VAR_2(self.base_dir, 'inside', 'link_to_file3'))
def FUNC_49(self):
VAR_85 = lambda VAR_3: URL('a', 'c', 'f', VAR_3=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=VAR_85(VAR_3=['dir1'])))),
TR(TD(A('dir2', _href=VAR_85(VAR_3=['dir2'])))),
_class='table',
))
self.expected_folders[VAR_2('inside', 'dir1')] = ''
if FUNC_0():
self.expected_folders[VAR_2('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=VAR_85(VAR_3=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[VAR_2('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=VAR_85(VAR_3=['README']))), TD('')),
_class='table',
))
self.expected_files[VAR_2('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=VAR_85(VAR_3=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=VAR_85(VAR_3=['dir1', 'file2']))), TD('')),
_class='table',
))
if FUNC_0():
self.expected_files[VAR_2('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=VAR_85(VAR_3=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[VAR_2('inside', 'dir2')] = ''
def FUNC_50(self, VAR_5, VAR_6='', VAR_7=False):
current.request = Request(env={})
current.request.raw_args = VAR_6
current.request.args = VAR_6.split('/')
return Expose(VAR_5=VAR_2(self.base_dir, VAR_5),
basename=VAR_5,
VAR_7=follow_symlink_out)
def FUNC_51(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='')
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
@unittest.skipUnless(FUNC_0(), 'requires symlinks')
def FUNC_52(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='',
VAR_7=True)
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
def FUNC_53(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir1')
self.assertEqual(VAR_86.args, ['dir1'])
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, ['file1', 'file2'])
def FUNC_54(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir2')
self.assertEqual(VAR_86.args, ['dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_55(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside')
self.assertEqual(VAR_86.args, ['inside'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
def FUNC_56(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside/dir2')
self.assertEqual(VAR_86.args, ['inside', 'dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_57(self, VAR_8, VAR_9):
self.assertEqual(VAR_8 if isinstance(VAR_8, str) else VAR_8.xml(),
VAR_9 if isinstance(VAR_9, str) else VAR_9.xml())
def FUNC_58(self, VAR_5, VAR_6):
VAR_86 = self.make_expose(VAR_5, VAR_6)
VAR_87 = VAR_2(VAR_5, VAR_6).rstrip(os.path.sep)
VAR_88 = Request(env={})
self.assertSameXML(VAR_86.table_files(), self.expected_files[VAR_87])
self.assertSameXML(VAR_86.table_folders(), self.expected_folders[VAR_87])
def FUNC_59(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='')
def FUNC_60(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir1')
def FUNC_61(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir2')
def FUNC_62(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='dir1/file_not_found')
def FUNC_30(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='link_to_file3')
|
import os
import sys
import shutil
import tempfile
import smtplib
import .datetime
import unittest
VAR_0 = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import .Table
from gluon import .tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import .TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
VAR_1 = "imap" in VAR_0
class VAR_23(unittest.TestCase):
class CLASS_7(object):
def __init__(self, VAR_10, VAR_11, VAR_12):
self.sender = VAR_10
self.to = VAR_11
self.payload = VAR_12
self._parsed_payload = None
@property
def FUNC_64(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class CLASS_8(object):
VAR_13 = []
VAR_14 = {}
def __init__(self, VAR_15, VAR_16, **VAR_4):
self.address = VAR_15
self.port = VAR_16
self.has_quit = False
self.tls = False
def FUNC_65(self, VAR_17, VAR_18):
if VAR_17 not in self.users or self.users[VAR_17] != VAR_18:
raise smtplib.SMTPAuthenticationError
self.username = VAR_17
self.password = VAR_18
def FUNC_66(self, VAR_10, VAR_11, VAR_12):
self.inbox.append(VAR_23.Message(VAR_10, VAR_11, VAR_12))
def FUNC_67(self):
self.has_quit = True
def FUNC_68(self, VAR_19=None):
pass
def FUNC_69(self):
self.tls = True
def FUNC_1(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = VAR_23.DummySMTP
smtplib.SMTP_SSL = VAR_23.DummySMTP
def FUNC_2(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def FUNC_3(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertEqual(VAR_21.sender, VAR_20.settings.sender)
self.assertEqual(VAR_21.to, ['somebody@example.com'])
VAR_22 = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(VAR_22 in VAR_21.payload)
self.assertTrue(VAR_21.payload.endswith('world'))
def FUNC_4(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertFalse(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
def FUNC_5(self):
VAR_23.DummySMTP.users['username'] = 'password'
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
del VAR_23.DummySMTP.users['username']
VAR_23.DummySMTP.inbox.pop()
def FUNC_6(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='<html><head></head><body></body></html>'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in VAR_21.payload)
def FUNC_7(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
VAR_21=('Text only', '<html><pre>HTML Only</pre></html>')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue(VAR_21.parsed_payload.is_multipart())
self.assertTrue(VAR_21.parsed_payload.get_content_type() == 'multipart/alternative')
VAR_24 = VAR_21.parsed_payload.get_payload()
self.assertTrue('Text only' in VAR_24[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in VAR_24[1].as_string())
def FUNC_8(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.ssl = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_9(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.tls = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_10(self):
VAR_25 = os.path.abspath(__file__)
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25)))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
VAR_26 = VAR_21.parsed_payload.get_payload(1).get_payload(decode=True)
with open(VAR_25, 'rb') as mf:
self.assertEqual(to_bytes(VAR_26), to_bytes(mf.read()))
VAR_27 = open(VAR_25)
self.assertRaises(Exception, lambda *VAR_3, **VAR_4: Mail.Attachment(*VAR_3, **VAR_4), VAR_27)
stream.close()
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25, content_id='trololo', content_type='tra/lala')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in VAR_21.payload)
self.assertTrue('Content-Id: <trololo>' in VAR_21.payload)
class CLASS_1(unittest.TestCase):
def FUNC_1(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.user_data = dict(VAR_17='jwtuser', VAR_18='jwtuser123')
self.db.auth_user.insert(VAR_17=self.user_data['username'],
VAR_18=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def FUNC_11(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def FUNC_12(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def FUNC_70():
self.assertEqual(self.user_data['username'], self.auth.user.username)
FUNC_70()
@unittest.skipIf(VAR_1, "TODO: Imap raises 'Connection refused'")
class CLASS_2(unittest.TestCase):
def FUNC_13(self, *VAR_3, **VAR_4):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*VAR_3, **VAR_4)
return getattr(self, 'assertRaisesRegex')(*VAR_3, **VAR_4)
def FUNC_1(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
VAR_28 = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = VAR_28
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
VAR_17='bart',
email='bart@simpson.com',
VAR_18='bart_password')
def FUNC_14(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
VAR_29 = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(VAR_29['username'], 'bart')
self.assertEqual(VAR_29['registration_key'], '')
VAR_30 = self.db(self.db.auth_user.username == 'bart').select().first().id
VAR_31 = self.db(self.db.auth_group.role == 'user_{0}'.format(VAR_30)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == VAR_31) &
(self.db.auth_membership.user_id == VAR_30)).select().first())
def FUNC_15(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
VAR_92 = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in VAR_92)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
pass
return
def FUNC_16(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
def FUNC_17(self):
self.assertTrue('t0_archive' in self.db)
def FUNC_18(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': VAR_30,
'description': 'some_log_event_description_var1'}.items()))
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
self.auth.settings.logging_enabled = False
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
self.auth.settings.logging_enabled = True
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_19(self):
self.db.auth_user.insert(email='user1@test.com', VAR_17='user1', VAR_18='password_123')
self.db.commit()
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
def FUNC_20(self):
self.auth.login_bare(VAR_17='bart', VAR_18='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.login_bare(VAR_17='bart', VAR_18='wrong_password'))
def FUNC_21(self):
self.assertRaises(ValueError, self.auth.register_bare)
self.assertEqual(self.auth.register_bare(VAR_17='bart', VAR_18='wrong_password'), False)
self.assertEqual(self.auth.register_bare(VAR_17='user2',
email='user2@test.com',
VAR_18='password_123')['username'], 'user2')
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', VAR_18='password_123'))
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', VAR_18='password_123'))
self.db.auth_user.truncate()
self.db.commit()
def FUNC_22(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
def FUNC_23(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
def FUNC_24(self):
self.assertTrue(self.auth.random_password())
def FUNC_25(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
VAR_35 = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in VAR_35)
def FUNC_26(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_36 = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in VAR_36)
def FUNC_27(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_37 = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in VAR_37)
def FUNC_28(self):
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
VAR_17='omer',
email='omer@test.com',
VAR_18='password_omer',
registration_key='',
registration_id=''),
FUNC_65=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
VAR_38 = self.auth.add_group('impersonate')
self.auth.add_membership(VAR_43=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
VAR_38=group_id)
self.auth.add_permission(VAR_38=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, VAR_30)
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
self.assertEqual(self.auth.impersonate(VAR_30), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
VAR_39 = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
VAR_40 = self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, VAR_39) # we make it really sure
self.assertEqual(VAR_40.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-VAR_20: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, VAR_30)
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # VAR_42 with id 1000 shouldn't exist
self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(VAR_43=0), None)
def FUNC_29(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def FUNC_30(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def FUNC_12(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
def FUNC_31(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def FUNC_32(self):
VAR_31 = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(VAR_38=VAR_31), None)
def FUNC_33(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def FUNC_34(self):
self.assertEqual(self.auth.user_group(VAR_43=1), 1)
def FUNC_35(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_41 = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), VAR_41)
self.auth.logout_bare()
self.assertEqual(self.auth.user_group_role(VAR_43=1), 'user_1')
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(VAR_43=1), None)
def FUNC_36(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(VAR_38=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', VAR_43=1))
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_37(self):
VAR_42 = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
VAR_43 = VAR_42.id
VAR_44 = 'test_add_membership_group'
VAR_38 = self.auth.add_group(VAR_44)
self.assertFalse(self.auth.has_membership(VAR_44))
self.auth.add_membership(VAR_38=group_id, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.add_membership(role=VAR_44, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(VAR_38=group_id, VAR_43=None)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(role=VAR_44, VAR_43=None)
self.auth.login_user(VAR_42)
self.auth.add_membership(VAR_38=group_id)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(role=VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
VAR_45 = self.auth.add_membership(VAR_38)
self.assertTrue(self.auth.has_membership(VAR_38))
VAR_46 = self.auth.add_membership(VAR_38)
self.assertEqual(VAR_45, VAR_46)
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def FUNC_38(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_47 = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), VAR_47)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_38 = self.auth.add_group('some_test_group')
VAR_48 = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[VAR_38], 'some_test_group')
self.auth.logout_bare()
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(VAR_48).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', VAR_43=VAR_30))
self.assertEqual(self.db.auth_membership(VAR_48), None) # is really deleted
def FUNC_39(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
def FUNC_40(self):
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertTrue(VAR_50)
VAR_50 = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(VAR_50)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_49 = \
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
VAR_51 = \
self.db(self.db.auth_permission.id == VAR_49).select(self.db.auth_permission.name).first().name
self.assertEqual(VAR_51, 'user_1_permission')
VAR_49 =\
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(VAR_49)
def FUNC_41(self):
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertFalse(VAR_50)
class CLASS_3(unittest.TestCase):
def FUNC_42(self):
VAR_52 = datetime.datetime.now()
self.assertEqual(prettydate(VAR_78=VAR_52), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53), '1 second ago')
VAR_54 = VAR_52 - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(VAR_78=VAR_54), '2 seconds ago')
VAR_55 = VAR_52 - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(VAR_78=VAR_55), '1 minute ago')
VAR_56 = VAR_52 - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(VAR_78=VAR_56), '1 minute ago')
VAR_57 = VAR_52 - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(VAR_78=VAR_57), '2 minutes ago')
VAR_58 = VAR_52 - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(VAR_78=VAR_58), '2 minutes ago')
VAR_59 = VAR_52 - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_59), '1 hour ago')
VAR_60 = VAR_52 - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(VAR_78=VAR_60), '1 hour ago')
VAR_61 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_61), '2 hours ago')
VAR_62 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(VAR_78=VAR_62), '2 hours ago')
VAR_63 = VAR_52 - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_64 = VAR_52 - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_64), '2 days ago')
VAR_65 = VAR_52 - datetime.timedelta(days=7)
self.assertEqual(prettydate(VAR_78=VAR_65), '1 week ago')
VAR_66 = VAR_52 - datetime.timedelta(days=8)
self.assertEqual(prettydate(VAR_78=VAR_66), '1 week ago')
VAR_67 = VAR_52 - datetime.timedelta(days=14)
self.assertEqual(prettydate(VAR_78=VAR_67), '2 weeks ago')
VAR_68 = VAR_52 - datetime.timedelta(days=15)
self.assertEqual(prettydate(VAR_78=VAR_68), '2 weeks ago')
VAR_69 = VAR_52 - datetime.timedelta(days=21)
self.assertEqual(prettydate(VAR_78=VAR_69), '3 weeks ago')
VAR_70 = VAR_52 - datetime.timedelta(days=27)
self.assertEqual(prettydate(VAR_78=VAR_70), '1 month ago')
VAR_71 = VAR_52 - datetime.timedelta(days=28)
self.assertEqual(prettydate(VAR_78=VAR_71), '1 month ago')
VAR_72 = VAR_52 - datetime.timedelta(days=60)
self.assertEqual(prettydate(VAR_78=VAR_72), '2 months ago')
VAR_73 = VAR_52 - datetime.timedelta(days=90)
self.assertEqual(prettydate(VAR_78=VAR_73), '3 months ago')
VAR_74 = VAR_52 - datetime.timedelta(days=365)
self.assertEqual(prettydate(VAR_78=VAR_74), '1 year ago')
VAR_75 = VAR_52 - datetime.timedelta(days=366)
self.assertEqual(prettydate(VAR_78=VAR_75), '1 year ago')
VAR_76 = VAR_52 - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(VAR_78=VAR_76), '2 years ago')
VAR_77 = VAR_52 - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(VAR_78=VAR_77), '2 years ago')
VAR_78 = VAR_52.date()
self.assertEqual(prettydate(VAR_78=d), 'now')
VAR_63 = VAR_52.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_79 = VAR_52.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_79), '2 days ago')
VAR_80 = VAR_52 - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(VAR_78=VAR_80), '1 minute from now')
VAR_81 = VAR_52 - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(VAR_78=VAR_81), '23 hours from now')
VAR_82 = VAR_52 - datetime.timedelta(days=-366)
self.assertEqual(prettydate(VAR_78=VAR_82), '1 year from now')
VAR_52 = datetime.datetime.utcnow()
self.assertEqual(prettydate(VAR_78=VAR_52, utc=True), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53, utc=True), '1 second ago')
self.assertEqual(prettydate(VAR_78=None), '')
self.assertEqual(prettydate(VAR_78='invalid_date'), '[invalid date]')
VAR_2 = os.path.join
def FUNC_0():
return os.name == 'posix'
class CLASS_4(unittest.TestCase):
def FUNC_43(self):
VAR_83 = [
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, VAR_5 in VAR_83:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s is not under %s' % (sub, VAR_5))
def FUNC_44(self):
VAR_84 = [
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, VAR_5 in VAR_84:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s should not be under %s' % (sub, VAR_5))
class CLASS_5(unittest.TestCase):
def FUNC_1(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if FUNC_0():
self.make_symlinks()
self.set_expectations()
tools.URL = lambda VAR_3: URL(VAR_8='a', c='c', f='f', VAR_3=args)
def FUNC_2(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def FUNC_45(self):
for VAR_78 in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(VAR_2(self.base_dir, *VAR_78))
def FUNC_46(self):
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(VAR_2(self.base_dir, *f), 'a'):
pass
def FUNC_47(self):
with open(VAR_2(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def FUNC_48(self):
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1', 'file1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
os.symlink(
VAR_2(self.base_dir, 'outside'),
VAR_2(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
VAR_2(self.base_dir, 'outside', 'file3'),
VAR_2(self.base_dir, 'inside', 'link_to_file3'))
def FUNC_49(self):
VAR_85 = lambda VAR_3: URL('a', 'c', 'f', VAR_3=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=VAR_85(VAR_3=['dir1'])))),
TR(TD(A('dir2', _href=VAR_85(VAR_3=['dir2'])))),
_class='table',
))
self.expected_folders[VAR_2('inside', 'dir1')] = ''
if FUNC_0():
self.expected_folders[VAR_2('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=VAR_85(VAR_3=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[VAR_2('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=VAR_85(VAR_3=['README']))), TD('')),
_class='table',
))
self.expected_files[VAR_2('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=VAR_85(VAR_3=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=VAR_85(VAR_3=['dir1', 'file2']))), TD('')),
_class='table',
))
if FUNC_0():
self.expected_files[VAR_2('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=VAR_85(VAR_3=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[VAR_2('inside', 'dir2')] = ''
def FUNC_50(self, VAR_5, VAR_6='', VAR_7=False):
current.request = Request(env={})
current.request.raw_args = VAR_6
current.request.args = VAR_6.split('/')
return Expose(VAR_5=VAR_2(self.base_dir, VAR_5),
basename=VAR_5,
VAR_7=follow_symlink_out)
def FUNC_51(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='')
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
@unittest.skipUnless(FUNC_0(), 'requires symlinks')
def FUNC_52(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='',
VAR_7=True)
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
def FUNC_53(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir1')
self.assertEqual(VAR_86.args, ['dir1'])
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, ['file1', 'file2'])
def FUNC_54(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir2')
self.assertEqual(VAR_86.args, ['dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_55(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside')
self.assertEqual(VAR_86.args, ['inside'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
def FUNC_56(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside/dir2')
self.assertEqual(VAR_86.args, ['inside', 'dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_57(self, VAR_8, VAR_9):
self.assertEqual(VAR_8 if isinstance(VAR_8, str) else VAR_8.xml(),
VAR_9 if isinstance(VAR_9, str) else VAR_9.xml())
def FUNC_58(self, VAR_5, VAR_6):
VAR_86 = self.make_expose(VAR_5, VAR_6)
VAR_87 = VAR_2(VAR_5, VAR_6).rstrip(os.path.sep)
VAR_88 = Request(env={})
self.assertSameXML(VAR_86.table_files(), self.expected_files[VAR_87])
self.assertSameXML(VAR_86.table_folders(), self.expected_folders[VAR_87])
def FUNC_59(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='')
def FUNC_60(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir1')
def FUNC_61(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir2')
def FUNC_62(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='dir1/file_not_found')
def FUNC_30(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='link_to_file3')
class CLASS_6(unittest.TestCase):
def FUNC_63(self):
VAR_89 = [
"/",
"//",
"~/",
"//example.com",
"/\example.com"
"~/example.com"
"//example.com/VAR_8/VAR_9/c",
"//example.com/VAR_8/VAR_9/c",
"~/example.com/VAR_8/VAR_9/c"
]
VAR_90 = [
"a/VAR_9/c",
"/a",
"/VAR_8/b",
"/VAR_8/VAR_9/c",
]
VAR_91 = ["", ":", "http:", "https:", "ftp:"]
for prefix in VAR_91:
for VAR_85 in VAR_89:
self.assertEqual(prevent_open_redirect(prefix + VAR_85), None)
for prefix in VAR_91:
for VAR_85 in VAR_90:
self.assertEqual(prevent_open_redirect(prefix + VAR_85), prefix + VAR_85)
| [
1,
2,
3,
14,
16,
27,
29,
30,
35,
37,
43,
50,
54,
59,
65,
71,
74,
77,
80,
83,
89,
93,
100,
109,
117,
120,
129,
134,
141,
146,
159,
167,
171,
179,
183,
197,
201,
209,
210,
211,
215,
217,
225,
235,
246,
256,
261,
262,
264,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
277,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
302,
303,
304,
305,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
484,
489,
513,
514,
515,
516,
517,
518,
519,
520,
521,
522,
523,
524,
525,
526,
527,
528,
529,
530,
531,
532,
538,
552,
553,
558,
561,
563,
570,
573,
577,
578,
579,
580,
583,
584,
585,
586,
587,
591,
602,
614,
629,
630,
634,
639,
642,
648,
652,
653,
654,
655,
656,
661,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
684,
686,
688,
692,
695,
700,
701,
702,
703,
704,
705,
706,
707,
713,
714,
715,
721,
722,
723,
724,
726,
728,
729,
730,
731,
732,
738,
739,
740,
741,
742,
743,
744,
749,
754,
755,
756,
757,
758,
760,
771,
780,
781,
786,
787,
788,
789,
792,
795,
803,
804,
806,
807,
810,
812,
813,
815,
819,
820,
821,
826,
832,
835,
836,
837,
838,
839,
840,
841,
842,
843,
844,
845,
849,
853,
856,
858,
861,
862,
869,
871,
874,
883,
888,
895,
900,
905,
914,
916,
921,
926,
927,
932,
933,
940,
945,
953,
955,
960,
964,
968,
977,
983,
989,
999,
1001,
1005,
1012,
1023,
1031,
1045,
1047,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1080,
1133,
1140,
1141,
1148,
1153,
1156,
1157,
1159,
1160,
1163,
1164,
1166,
1169,
1178,
1181,
1191,
1192,
1194,
1197,
1203,
1204,
1205,
1206,
1207,
1208,
1209,
1210,
1211,
1212,
1213,
1214,
1215,
1216,
1217,
1220,
1224,
1232,
1240,
1244,
1247,
1254,
1261,
1264,
1279,
1297,
1305,
1311,
1319,
1325,
1335,
1345,
1355,
1359,
1366,
1369,
1372,
1375,
1379,
1383,
4,
5,
6,
32,
33,
34,
1076,
1077,
1078,
52,
53,
54,
55,
56,
1226,
1234,
1246
] | [
1,
2,
3,
14,
16,
27,
29,
30,
35,
37,
43,
50,
54,
59,
65,
71,
74,
77,
80,
83,
89,
93,
100,
109,
117,
120,
129,
134,
141,
146,
159,
167,
171,
179,
183,
197,
201,
209,
210,
211,
215,
217,
225,
235,
246,
256,
261,
262,
264,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
277,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
302,
303,
304,
305,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
484,
489,
513,
514,
515,
516,
517,
518,
519,
520,
521,
522,
523,
524,
525,
526,
527,
528,
529,
530,
531,
532,
538,
552,
553,
558,
561,
563,
570,
573,
577,
578,
579,
580,
583,
584,
585,
586,
587,
591,
602,
614,
629,
630,
634,
639,
642,
648,
652,
653,
654,
655,
656,
661,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
684,
686,
688,
692,
695,
700,
701,
702,
703,
704,
705,
706,
707,
713,
714,
715,
721,
722,
723,
724,
726,
728,
729,
730,
731,
732,
738,
739,
740,
741,
742,
743,
744,
749,
754,
755,
756,
757,
758,
760,
771,
780,
781,
786,
787,
788,
789,
792,
795,
803,
804,
806,
807,
810,
812,
813,
815,
819,
820,
821,
826,
832,
835,
836,
837,
838,
839,
840,
841,
842,
843,
844,
845,
849,
853,
856,
858,
861,
862,
869,
871,
874,
883,
888,
895,
900,
905,
914,
916,
921,
926,
927,
932,
933,
940,
945,
953,
955,
960,
964,
968,
977,
983,
989,
999,
1001,
1005,
1012,
1023,
1031,
1045,
1047,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1080,
1133,
1140,
1141,
1148,
1153,
1156,
1157,
1159,
1160,
1163,
1164,
1166,
1169,
1178,
1181,
1191,
1192,
1194,
1197,
1203,
1204,
1205,
1206,
1207,
1208,
1209,
1210,
1211,
1212,
1213,
1214,
1215,
1216,
1217,
1220,
1224,
1232,
1240,
1244,
1247,
1254,
1261,
1264,
1279,
1297,
1305,
1311,
1319,
1325,
1335,
1345,
1355,
1359,
1366,
1369,
1372,
1375,
1379,
1383,
1384,
1386,
1412,
1413,
1414,
1415,
4,
5,
6,
32,
33,
34,
1076,
1077,
1078,
52,
53,
54,
55,
56,
1226,
1234,
1246
] |
1CWE-79
| """
Form Widget classes specific to the Django admin site.
"""
from __future__ import unicode_literals
import copy
from django import forms
from django.contrib.admin.templatetags.admin_static import static
from django.core.urlresolvers import reverse
from django.forms.widgets import RadioFieldRenderer
from django.forms.util import flatatt
from django.utils.html import escape, format_html, format_html_join, smart_urlquote
from django.utils.text import Truncator
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils import six
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the jsi18n
catalog has been loaded in the page
"""
@property
def media(self):
js = ["core.js", "SelectBox.js", "SelectFilter2.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super(FilteredSelectMultiple, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
if attrs is None:
attrs = {}
attrs['class'] = 'selectfilter'
if self.is_stacked:
attrs['class'] += 'stacked'
output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)]
output.append('<script type="text/javascript">addEvent(window, "load", function(e) {')
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n'
% (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/')))
return mark_safe(''.join(output))
class AdminDateWidget(forms.DateInput):
@property
def media(self):
js = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, attrs=None, format=None):
final_attrs = {'class': 'vDateField', 'size': '10'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format)
class AdminTimeWidget(forms.TimeInput):
@property
def media(self):
js = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, attrs=None, format=None):
final_attrs = {'class': 'vTimeField', 'size': '8'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format)
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def format_output(self, rendered_widgets):
return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>',
_('Date:'), rendered_widgets[0],
_('Time:'), rendered_widgets[1])
class AdminRadioFieldRenderer(RadioFieldRenderer):
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return format_html('<ul{0}>\n{1}\n</ul>',
flatatt(self.attrs),
format_html_join('\n', '<li>{0}</li>',
((force_text(w),) for w in self)))
class AdminRadioSelect(forms.RadioSelect):
renderer = AdminRadioFieldRenderer
class AdminFileWidget(forms.ClearableFileInput):
template_with_initial = ('<p class="file-upload">%s</p>'
% forms.ClearableFileInput.template_with_initial)
template_with_clear = ('<span class="clearable-file-input">%s</span>'
% forms.ClearableFileInput.template_with_clear)
def url_params_from_lookup_dict(lookups):
"""
Converts the type of lookups specified in a ForeignKey limit_choices_to
attribute to a dictionary of query parameters
"""
params = {}
if lookups and hasattr(lookups, 'items'):
items = []
for k, v in lookups.items():
if callable(v):
v = v()
if isinstance(v, (tuple, list)):
v = ','.join([str(x) for x in v])
elif isinstance(v, bool):
# See django.db.fields.BooleanField.get_prep_lookup
v = ('0', '1')[v]
else:
v = six.text_type(v)
items.append((k, v))
params.update(dict(items))
return params
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
def __init__(self, rel, admin_site, attrs=None, using=None):
self.rel = rel
self.admin_site = admin_site
self.db = using
super(ForeignKeyRawIdWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
rel_to = self.rel.to
if attrs is None:
attrs = {}
extra = []
if rel_to in self.admin_site._registry:
# The related object is registered with the same AdminSite
related_url = reverse('admin:%s_%s_changelist' %
(rel_to._meta.app_label,
rel_to._meta.model_name),
current_app=self.admin_site.name)
params = self.url_parameters()
if params:
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
else:
url = ''
if "class" not in attrs:
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
# TODO: "lookup_id_" is hard-coded here. This should instead use
# the correct API to determine the ID dynamically.
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
% (related_url, url, name))
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
% (static('admin/img/selector-search.gif'), _('Lookup')))
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra
if value:
output.append(self.label_for_value(value))
return mark_safe(''.join(output))
def base_url_parameters(self):
return url_params_from_lookup_dict(self.rel.limit_choices_to)
def url_parameters(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
params = self.base_url_parameters()
params.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return params
def label_for_value(self, value):
key = self.rel.get_related_field().name
try:
obj = self.rel.to._default_manager.using(self.db).get(**{key: value})
return ' <strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...'))
except (ValueError, self.rel.to.DoesNotExist):
return ''
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
if self.rel.to in self.admin_site._registry:
# The related object is registered with the same AdminSite
attrs['class'] = 'vManyToManyRawIdAdminField'
if value:
value = ','.join([force_text(v) for v in value])
else:
value = ''
return super(ManyToManyRawIdWidget, self).render(name, value, attrs)
def url_parameters(self):
return self.base_url_parameters()
def label_for_value(self, value):
return ''
def value_from_datadict(self, data, files, name):
value = data.get(name)
if value:
return value.split(',')
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
def __init__(self, widget, rel, admin_site, can_add_related=None):
self.is_hidden = widget.is_hidden
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# Backwards compatible check for whether a user can add related
# objects.
if can_add_related is None:
can_add_related = rel.to in admin_site._registry
self.can_add_related = can_add_related
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
@property
def media(self):
return self.widget.media
def render(self, name, value, *args, **kwargs):
rel_to = self.rel.to
info = (rel_to._meta.app_label, rel_to._meta.model_name)
self.widget.choices = self.choices
output = [self.widget.render(name, value, *args, **kwargs)]
if self.can_add_related:
related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name)
# TODO: "add_id_" is hard-coded here. This should instead use the
# correct API to determine the ID dynamically.
output.append('<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> '
% (related_url, name))
output.append('<img src="%s" width="10" height="10" alt="%s"/></a>'
% (static('admin/img/icon_addlink.gif'), _('Add Another')))
return mark_safe(''.join(output))
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs)
return self.attrs
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class AdminTextareaWidget(forms.Textarea):
def __init__(self, attrs=None):
final_attrs = {'class': 'vLargeTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextareaWidget, self).__init__(attrs=final_attrs)
class AdminTextInputWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextInputWidget, self).__init__(attrs=final_attrs)
class AdminEmailInputWidget(forms.EmailInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminEmailInputWidget, self).__init__(attrs=final_attrs)
class AdminURLFieldWidget(forms.URLInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vURLField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminURLFieldWidget, self).__init__(attrs=final_attrs)
def render(self, name, value, attrs=None):
html = super(AdminURLFieldWidget, self).render(name, value, attrs)
if value:
value = force_text(self._format_value(value))
final_attrs = {'href': mark_safe(smart_urlquote(value))}
html = format_html(
'<p class="url">{0} <a {1}>{2}</a><br />{3} {4}</p>',
_('Currently:'), flatatt(final_attrs), value,
_('Change:'), html
)
return html
class AdminIntegerFieldWidget(forms.TextInput):
class_name = 'vIntegerField'
def __init__(self, attrs=None):
final_attrs = {'class': self.class_name}
if attrs is not None:
final_attrs.update(attrs)
super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs)
class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget):
class_name = 'vBigIntegerField'
class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vCommaSeparatedIntegerField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
| """
Form Widget classes specific to the Django admin site.
"""
from __future__ import unicode_literals
import copy
from django import forms
from django.contrib.admin.templatetags.admin_static import static
from django.core.urlresolvers import reverse
from django.forms.widgets import RadioFieldRenderer
from django.forms.util import flatatt
from django.utils.html import escape, format_html, format_html_join, smart_urlquote
from django.utils.text import Truncator
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils import six
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the jsi18n
catalog has been loaded in the page
"""
@property
def media(self):
js = ["core.js", "SelectBox.js", "SelectFilter2.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super(FilteredSelectMultiple, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
if attrs is None:
attrs = {}
attrs['class'] = 'selectfilter'
if self.is_stacked:
attrs['class'] += 'stacked'
output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)]
output.append('<script type="text/javascript">addEvent(window, "load", function(e) {')
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n'
% (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/')))
return mark_safe(''.join(output))
class AdminDateWidget(forms.DateInput):
@property
def media(self):
js = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, attrs=None, format=None):
final_attrs = {'class': 'vDateField', 'size': '10'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format)
class AdminTimeWidget(forms.TimeInput):
@property
def media(self):
js = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(js=[static("admin/js/%s" % path) for path in js])
def __init__(self, attrs=None, format=None):
final_attrs = {'class': 'vTimeField', 'size': '8'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format)
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def format_output(self, rendered_widgets):
return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>',
_('Date:'), rendered_widgets[0],
_('Time:'), rendered_widgets[1])
class AdminRadioFieldRenderer(RadioFieldRenderer):
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return format_html('<ul{0}>\n{1}\n</ul>',
flatatt(self.attrs),
format_html_join('\n', '<li>{0}</li>',
((force_text(w),) for w in self)))
class AdminRadioSelect(forms.RadioSelect):
renderer = AdminRadioFieldRenderer
class AdminFileWidget(forms.ClearableFileInput):
template_with_initial = ('<p class="file-upload">%s</p>'
% forms.ClearableFileInput.template_with_initial)
template_with_clear = ('<span class="clearable-file-input">%s</span>'
% forms.ClearableFileInput.template_with_clear)
def url_params_from_lookup_dict(lookups):
"""
Converts the type of lookups specified in a ForeignKey limit_choices_to
attribute to a dictionary of query parameters
"""
params = {}
if lookups and hasattr(lookups, 'items'):
items = []
for k, v in lookups.items():
if callable(v):
v = v()
if isinstance(v, (tuple, list)):
v = ','.join([str(x) for x in v])
elif isinstance(v, bool):
# See django.db.fields.BooleanField.get_prep_lookup
v = ('0', '1')[v]
else:
v = six.text_type(v)
items.append((k, v))
params.update(dict(items))
return params
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
def __init__(self, rel, admin_site, attrs=None, using=None):
self.rel = rel
self.admin_site = admin_site
self.db = using
super(ForeignKeyRawIdWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
rel_to = self.rel.to
if attrs is None:
attrs = {}
extra = []
if rel_to in self.admin_site._registry:
# The related object is registered with the same AdminSite
related_url = reverse('admin:%s_%s_changelist' %
(rel_to._meta.app_label,
rel_to._meta.model_name),
current_app=self.admin_site.name)
params = self.url_parameters()
if params:
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
else:
url = ''
if "class" not in attrs:
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
# TODO: "lookup_id_" is hard-coded here. This should instead use
# the correct API to determine the ID dynamically.
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
% (related_url, url, name))
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
% (static('admin/img/selector-search.gif'), _('Lookup')))
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra
if value:
output.append(self.label_for_value(value))
return mark_safe(''.join(output))
def base_url_parameters(self):
return url_params_from_lookup_dict(self.rel.limit_choices_to)
def url_parameters(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
params = self.base_url_parameters()
params.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return params
def label_for_value(self, value):
key = self.rel.get_related_field().name
try:
obj = self.rel.to._default_manager.using(self.db).get(**{key: value})
return ' <strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...'))
except (ValueError, self.rel.to.DoesNotExist):
return ''
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
if self.rel.to in self.admin_site._registry:
# The related object is registered with the same AdminSite
attrs['class'] = 'vManyToManyRawIdAdminField'
if value:
value = ','.join([force_text(v) for v in value])
else:
value = ''
return super(ManyToManyRawIdWidget, self).render(name, value, attrs)
def url_parameters(self):
return self.base_url_parameters()
def label_for_value(self, value):
return ''
def value_from_datadict(self, data, files, name):
value = data.get(name)
if value:
return value.split(',')
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
def __init__(self, widget, rel, admin_site, can_add_related=None):
self.is_hidden = widget.is_hidden
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# Backwards compatible check for whether a user can add related
# objects.
if can_add_related is None:
can_add_related = rel.to in admin_site._registry
self.can_add_related = can_add_related
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
@property
def media(self):
return self.widget.media
def render(self, name, value, *args, **kwargs):
rel_to = self.rel.to
info = (rel_to._meta.app_label, rel_to._meta.model_name)
self.widget.choices = self.choices
output = [self.widget.render(name, value, *args, **kwargs)]
if self.can_add_related:
related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name)
# TODO: "add_id_" is hard-coded here. This should instead use the
# correct API to determine the ID dynamically.
output.append('<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> '
% (related_url, name))
output.append('<img src="%s" width="10" height="10" alt="%s"/></a>'
% (static('admin/img/icon_addlink.gif'), _('Add Another')))
return mark_safe(''.join(output))
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs)
return self.attrs
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class AdminTextareaWidget(forms.Textarea):
def __init__(self, attrs=None):
final_attrs = {'class': 'vLargeTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextareaWidget, self).__init__(attrs=final_attrs)
class AdminTextInputWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextInputWidget, self).__init__(attrs=final_attrs)
class AdminEmailInputWidget(forms.EmailInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminEmailInputWidget, self).__init__(attrs=final_attrs)
class AdminURLFieldWidget(forms.URLInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vURLField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminURLFieldWidget, self).__init__(attrs=final_attrs)
def render(self, name, value, attrs=None):
html = super(AdminURLFieldWidget, self).render(name, value, attrs)
if value:
value = force_text(self._format_value(value))
final_attrs = {'href': smart_urlquote(value)}
html = format_html(
'<p class="url">{0} <a{1}>{2}</a><br />{3} {4}</p>',
_('Currently:'), flatatt(final_attrs), value,
_('Change:'), html
)
return html
class AdminIntegerFieldWidget(forms.TextInput):
class_name = 'vIntegerField'
def __init__(self, attrs=None):
final_attrs = {'class': self.class_name}
if attrs is not None:
final_attrs.update(attrs)
super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs)
class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget):
class_name = 'vBigIntegerField'
class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vCommaSeparatedIntegerField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
| xss | {
"code": [
" final_attrs = {'href': mark_safe(smart_urlquote(value))}",
" '<p class=\"url\">{0} <a {1}>{2}</a><br />{3} {4}</p>',"
],
"line_no": [
308,
310
]
} | {
"code": [
" final_attrs = {'href': smart_urlquote(value)}",
" '<p class=\"url\">{0} <a{1}>{2}</a><br />{3} {4}</p>',"
],
"line_no": [
308,
310
]
} |
from __future__ import unicode_literals
import copy
from django import forms
from django.contrib.admin.templatetags.admin_static import static
from django.core.urlresolvers import reverse
from django.forms.widgets import RadioFieldRenderer
from django.forms.util import flatatt
from django.utils.html import escape, format_html, format_html_join, smart_urlquote
from django.utils.text import Truncator
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils import six
class CLASS_0(forms.SelectMultiple):
@property
def FUNC_1(self):
VAR_26 = ["core.js", "SelectBox.js", "SelectFilter2.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_1, VAR_2, VAR_3=None, VAR_4=()):
self.verbose_name = VAR_1
self.is_stacked = VAR_2
super(CLASS_0, self).__init__(VAR_3, VAR_4)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None, VAR_4=()):
if VAR_3 is None:
VAR_3 = {}
attrs['class'] = 'selectfilter'
if self.is_stacked:
VAR_3['class'] += 'stacked'
VAR_27 = [super(CLASS_0, self).render(VAR_5, VAR_6, VAR_3, VAR_4)]
VAR_27.append('<script type="text/javascript">addEvent(window, "load", function(e) {')
VAR_27.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n'
% (VAR_5, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/')))
return mark_safe(''.join(VAR_27))
class CLASS_1(forms.DateInput):
@property
def FUNC_1(self):
VAR_26 = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_3=None, VAR_7=None):
VAR_28 = {'class': 'vDateField', 'size': '10'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_1, self).__init__(VAR_3=VAR_28, VAR_7=format)
class CLASS_2(forms.TimeInput):
@property
def FUNC_1(self):
VAR_26 = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_3=None, VAR_7=None):
VAR_28 = {'class': 'vTimeField', 'size': '8'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_2, self).__init__(VAR_3=VAR_28, VAR_7=format)
class CLASS_3(forms.SplitDateTimeWidget):
def __init__(self, VAR_3=None):
VAR_29 = [CLASS_1, CLASS_2]
forms.MultiWidget.__init__(self, VAR_29, VAR_3)
def FUNC_3(self, VAR_8):
return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>',
_('Date:'), VAR_8[0],
_('Time:'), VAR_8[1])
class CLASS_4(RadioFieldRenderer):
def FUNC_2(self):
return format_html('<ul{0}>\n{1}\n</ul>',
flatatt(self.attrs),
format_html_join('\n', '<li>{0}</li>',
((force_text(w),) for w in self)))
class CLASS_5(forms.RadioSelect):
VAR_9 = CLASS_4
class CLASS_6(forms.ClearableFileInput):
VAR_10 = ('<p class="file-upload">%s</p>'
% forms.ClearableFileInput.template_with_initial)
VAR_11 = ('<span class="clearable-file-input">%s</span>'
% forms.ClearableFileInput.template_with_clear)
def FUNC_0(VAR_0):
VAR_12 = {}
if VAR_0 and hasattr(VAR_0, 'items'):
VAR_30 = []
for k, VAR_39 in VAR_0.items():
if callable(VAR_39):
VAR_39 = VAR_39()
if isinstance(VAR_39, (tuple, list)):
VAR_39 = ','.join([str(x) for x in VAR_39])
elif isinstance(VAR_39, bool):
VAR_39 = ('0', '1')[VAR_39]
else:
VAR_39 = six.text_type(VAR_39)
VAR_30.append((k, VAR_39))
VAR_12.update(dict(VAR_30))
return VAR_12
class CLASS_7(forms.TextInput):
def __init__(self, VAR_13, VAR_14, VAR_3=None, VAR_15=None):
self.rel = VAR_13
self.admin_site = VAR_14
self.db = VAR_15
super(CLASS_7, self).__init__(VAR_3)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
VAR_31 = self.rel.to
if VAR_3 is None:
VAR_3 = {}
VAR_32 = []
if VAR_31 in self.admin_site._registry:
VAR_38 = reverse('admin:%s_%s_changelist' %
(VAR_31._meta.app_label,
VAR_31._meta.model_name),
current_app=self.admin_site.name)
VAR_12 = self.url_parameters()
if VAR_12:
VAR_40 = '?' + '&'.join(['%s=%s' % (k, VAR_39) for k, VAR_39 in VAR_12.items()])
else:
VAR_40 = ''
if "class" not in VAR_3:
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
VAR_32.append('<a href="%s%s" class="related-lookup" VAR_35="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
% (VAR_38, VAR_40, VAR_5))
VAR_32.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
% (static('admin/img/selector-search.gif'), _('Lookup')))
VAR_27 = [super(CLASS_7, self).render(VAR_5, VAR_6, VAR_3)] + VAR_32
if VAR_6:
VAR_27.append(self.label_for_value(VAR_6))
return mark_safe(''.join(VAR_27))
def FUNC_4(self):
return FUNC_0(self.rel.limit_choices_to)
def FUNC_5(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
VAR_12 = self.base_url_parameters()
VAR_12.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return VAR_12
def FUNC_6(self, VAR_6):
VAR_33 = self.rel.get_related_field().name
try:
VAR_34 = self.rel.to._default_manager.using(self.db).get(**{VAR_33: VAR_6})
return ' <strong>%s</strong>' % escape(Truncator(VAR_34).words(14, truncate='...'))
except (ValueError, self.rel.to.DoesNotExist):
return ''
class CLASS_8(CLASS_7):
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
if VAR_3 is None:
VAR_3 = {}
if self.rel.to in self.admin_site._registry:
VAR_3['class'] = 'vManyToManyRawIdAdminField'
if VAR_6:
VAR_6 = ','.join([force_text(VAR_39) for VAR_39 in VAR_6])
else:
VAR_6 = ''
return super(CLASS_8, self).render(VAR_5, VAR_6, VAR_3)
def FUNC_5(self):
return self.base_url_parameters()
def FUNC_6(self, VAR_6):
return ''
def FUNC_7(self, VAR_16, VAR_17, VAR_5):
VAR_6 = VAR_16.get(VAR_5)
if VAR_6:
return VAR_6.split(',')
class CLASS_9(forms.Widget):
def __init__(self, VAR_18, VAR_13, VAR_14, VAR_19=None):
self.is_hidden = VAR_18.is_hidden
self.needs_multipart_form = VAR_18.needs_multipart_form
self.attrs = VAR_18.attrs
self.choices = VAR_18.choices
self.widget = VAR_18
self.rel = VAR_13
if VAR_19 is None:
VAR_19 = VAR_13.to in VAR_14._registry
self.can_add_related = VAR_19
self.admin_site = VAR_14
def __deepcopy__(self, VAR_20):
VAR_34 = copy.copy(self)
VAR_34.widget = copy.deepcopy(self.widget, VAR_20)
VAR_34.attrs = self.widget.attrs
VAR_20[VAR_35(self)] = VAR_34
return VAR_34
@property
def FUNC_1(self):
return self.widget.media
def FUNC_2(self, VAR_5, VAR_6, *VAR_21, **VAR_22):
VAR_31 = self.rel.to
VAR_36 = (VAR_31._meta.app_label, VAR_31._meta.model_name)
self.widget.choices = self.choices
VAR_27 = [self.widget.render(VAR_5, VAR_6, *VAR_21, **VAR_22)]
if self.can_add_related:
VAR_38 = reverse('admin:%s_%s_add' % VAR_36, current_app=self.admin_site.name)
VAR_27.append('<a href="%s" class="add-another" VAR_35="add_id_%s" onclick="return showAddAnotherPopup(this);"> '
% (VAR_38, VAR_5))
VAR_27.append('<img src="%s" width="10" height="10" alt="%s"/></a>'
% (static('admin/img/icon_addlink.gif'), _('Add Another')))
return mark_safe(''.join(VAR_27))
self.attrs = self.widget.build_attrs(VAR_23=None, **VAR_22)
return self.attrs
def FUNC_7(self, VAR_16, VAR_17, VAR_5):
return self.widget.value_from_datadict(VAR_16, VAR_17, VAR_5)
def FUNC_9(self, VAR_24):
return self.widget.id_for_label(VAR_24)
class CLASS_10(forms.Textarea):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vLargeTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_10, self).__init__(VAR_3=VAR_28)
class CLASS_11(forms.TextInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_11, self).__init__(VAR_3=VAR_28)
class CLASS_12(forms.EmailInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_12, self).__init__(VAR_3=VAR_28)
class CLASS_13(forms.URLInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vURLField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_13, self).__init__(VAR_3=VAR_28)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
VAR_37 = super(CLASS_13, self).render(VAR_5, VAR_6, VAR_3)
if VAR_6:
VAR_6 = force_text(self._format_value(VAR_6))
VAR_28 = {'href': mark_safe(smart_urlquote(VAR_6))}
VAR_37 = format_html(
'<p class="url">{0} <a {1}>{2}</a><br />{3} {4}</p>',
_('Currently:'), flatatt(VAR_28), VAR_6,
_('Change:'), VAR_37
)
return VAR_37
class CLASS_14(forms.TextInput):
VAR_25 = 'vIntegerField'
def __init__(self, VAR_3=None):
VAR_28 = {'class': self.class_name}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_14, self).__init__(VAR_3=VAR_28)
class CLASS_15(CLASS_14):
VAR_25 = 'vBigIntegerField'
class CLASS_16(forms.TextInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vCommaSeparatedIntegerField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_16, self).__init__(VAR_3=VAR_28)
|
from __future__ import unicode_literals
import copy
from django import forms
from django.contrib.admin.templatetags.admin_static import static
from django.core.urlresolvers import reverse
from django.forms.widgets import RadioFieldRenderer
from django.forms.util import flatatt
from django.utils.html import escape, format_html, format_html_join, smart_urlquote
from django.utils.text import Truncator
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils import six
class CLASS_0(forms.SelectMultiple):
@property
def FUNC_1(self):
VAR_26 = ["core.js", "SelectBox.js", "SelectFilter2.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_1, VAR_2, VAR_3=None, VAR_4=()):
self.verbose_name = VAR_1
self.is_stacked = VAR_2
super(CLASS_0, self).__init__(VAR_3, VAR_4)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None, VAR_4=()):
if VAR_3 is None:
VAR_3 = {}
attrs['class'] = 'selectfilter'
if self.is_stacked:
VAR_3['class'] += 'stacked'
VAR_27 = [super(CLASS_0, self).render(VAR_5, VAR_6, VAR_3, VAR_4)]
VAR_27.append('<script type="text/javascript">addEvent(window, "load", function(e) {')
VAR_27.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n'
% (VAR_5, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/')))
return mark_safe(''.join(VAR_27))
class CLASS_1(forms.DateInput):
@property
def FUNC_1(self):
VAR_26 = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_3=None, VAR_7=None):
VAR_28 = {'class': 'vDateField', 'size': '10'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_1, self).__init__(VAR_3=VAR_28, VAR_7=format)
class CLASS_2(forms.TimeInput):
@property
def FUNC_1(self):
VAR_26 = ["calendar.js", "admin/DateTimeShortcuts.js"]
return forms.Media(VAR_26=[static("admin/VAR_26/%s" % path) for path in VAR_26])
def __init__(self, VAR_3=None, VAR_7=None):
VAR_28 = {'class': 'vTimeField', 'size': '8'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_2, self).__init__(VAR_3=VAR_28, VAR_7=format)
class CLASS_3(forms.SplitDateTimeWidget):
def __init__(self, VAR_3=None):
VAR_29 = [CLASS_1, CLASS_2]
forms.MultiWidget.__init__(self, VAR_29, VAR_3)
def FUNC_3(self, VAR_8):
return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>',
_('Date:'), VAR_8[0],
_('Time:'), VAR_8[1])
class CLASS_4(RadioFieldRenderer):
def FUNC_2(self):
return format_html('<ul{0}>\n{1}\n</ul>',
flatatt(self.attrs),
format_html_join('\n', '<li>{0}</li>',
((force_text(w),) for w in self)))
class CLASS_5(forms.RadioSelect):
VAR_9 = CLASS_4
class CLASS_6(forms.ClearableFileInput):
VAR_10 = ('<p class="file-upload">%s</p>'
% forms.ClearableFileInput.template_with_initial)
VAR_11 = ('<span class="clearable-file-input">%s</span>'
% forms.ClearableFileInput.template_with_clear)
def FUNC_0(VAR_0):
VAR_12 = {}
if VAR_0 and hasattr(VAR_0, 'items'):
VAR_30 = []
for k, VAR_39 in VAR_0.items():
if callable(VAR_39):
VAR_39 = VAR_39()
if isinstance(VAR_39, (tuple, list)):
VAR_39 = ','.join([str(x) for x in VAR_39])
elif isinstance(VAR_39, bool):
VAR_39 = ('0', '1')[VAR_39]
else:
VAR_39 = six.text_type(VAR_39)
VAR_30.append((k, VAR_39))
VAR_12.update(dict(VAR_30))
return VAR_12
class CLASS_7(forms.TextInput):
def __init__(self, VAR_13, VAR_14, VAR_3=None, VAR_15=None):
self.rel = VAR_13
self.admin_site = VAR_14
self.db = VAR_15
super(CLASS_7, self).__init__(VAR_3)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
VAR_31 = self.rel.to
if VAR_3 is None:
VAR_3 = {}
VAR_32 = []
if VAR_31 in self.admin_site._registry:
VAR_38 = reverse('admin:%s_%s_changelist' %
(VAR_31._meta.app_label,
VAR_31._meta.model_name),
current_app=self.admin_site.name)
VAR_12 = self.url_parameters()
if VAR_12:
VAR_40 = '?' + '&'.join(['%s=%s' % (k, VAR_39) for k, VAR_39 in VAR_12.items()])
else:
VAR_40 = ''
if "class" not in VAR_3:
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
VAR_32.append('<a href="%s%s" class="related-lookup" VAR_35="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
% (VAR_38, VAR_40, VAR_5))
VAR_32.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
% (static('admin/img/selector-search.gif'), _('Lookup')))
VAR_27 = [super(CLASS_7, self).render(VAR_5, VAR_6, VAR_3)] + VAR_32
if VAR_6:
VAR_27.append(self.label_for_value(VAR_6))
return mark_safe(''.join(VAR_27))
def FUNC_4(self):
return FUNC_0(self.rel.limit_choices_to)
def FUNC_5(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
VAR_12 = self.base_url_parameters()
VAR_12.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return VAR_12
def FUNC_6(self, VAR_6):
VAR_33 = self.rel.get_related_field().name
try:
VAR_34 = self.rel.to._default_manager.using(self.db).get(**{VAR_33: VAR_6})
return ' <strong>%s</strong>' % escape(Truncator(VAR_34).words(14, truncate='...'))
except (ValueError, self.rel.to.DoesNotExist):
return ''
class CLASS_8(CLASS_7):
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
if VAR_3 is None:
VAR_3 = {}
if self.rel.to in self.admin_site._registry:
VAR_3['class'] = 'vManyToManyRawIdAdminField'
if VAR_6:
VAR_6 = ','.join([force_text(VAR_39) for VAR_39 in VAR_6])
else:
VAR_6 = ''
return super(CLASS_8, self).render(VAR_5, VAR_6, VAR_3)
def FUNC_5(self):
return self.base_url_parameters()
def FUNC_6(self, VAR_6):
return ''
def FUNC_7(self, VAR_16, VAR_17, VAR_5):
VAR_6 = VAR_16.get(VAR_5)
if VAR_6:
return VAR_6.split(',')
class CLASS_9(forms.Widget):
def __init__(self, VAR_18, VAR_13, VAR_14, VAR_19=None):
self.is_hidden = VAR_18.is_hidden
self.needs_multipart_form = VAR_18.needs_multipart_form
self.attrs = VAR_18.attrs
self.choices = VAR_18.choices
self.widget = VAR_18
self.rel = VAR_13
if VAR_19 is None:
VAR_19 = VAR_13.to in VAR_14._registry
self.can_add_related = VAR_19
self.admin_site = VAR_14
def __deepcopy__(self, VAR_20):
VAR_34 = copy.copy(self)
VAR_34.widget = copy.deepcopy(self.widget, VAR_20)
VAR_34.attrs = self.widget.attrs
VAR_20[VAR_35(self)] = VAR_34
return VAR_34
@property
def FUNC_1(self):
return self.widget.media
def FUNC_2(self, VAR_5, VAR_6, *VAR_21, **VAR_22):
VAR_31 = self.rel.to
VAR_36 = (VAR_31._meta.app_label, VAR_31._meta.model_name)
self.widget.choices = self.choices
VAR_27 = [self.widget.render(VAR_5, VAR_6, *VAR_21, **VAR_22)]
if self.can_add_related:
VAR_38 = reverse('admin:%s_%s_add' % VAR_36, current_app=self.admin_site.name)
VAR_27.append('<a href="%s" class="add-another" VAR_35="add_id_%s" onclick="return showAddAnotherPopup(this);"> '
% (VAR_38, VAR_5))
VAR_27.append('<img src="%s" width="10" height="10" alt="%s"/></a>'
% (static('admin/img/icon_addlink.gif'), _('Add Another')))
return mark_safe(''.join(VAR_27))
self.attrs = self.widget.build_attrs(VAR_23=None, **VAR_22)
return self.attrs
def FUNC_7(self, VAR_16, VAR_17, VAR_5):
return self.widget.value_from_datadict(VAR_16, VAR_17, VAR_5)
def FUNC_9(self, VAR_24):
return self.widget.id_for_label(VAR_24)
class CLASS_10(forms.Textarea):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vLargeTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_10, self).__init__(VAR_3=VAR_28)
class CLASS_11(forms.TextInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_11, self).__init__(VAR_3=VAR_28)
class CLASS_12(forms.EmailInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vTextField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_12, self).__init__(VAR_3=VAR_28)
class CLASS_13(forms.URLInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vURLField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_13, self).__init__(VAR_3=VAR_28)
def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):
VAR_37 = super(CLASS_13, self).render(VAR_5, VAR_6, VAR_3)
if VAR_6:
VAR_6 = force_text(self._format_value(VAR_6))
VAR_28 = {'href': smart_urlquote(VAR_6)}
VAR_37 = format_html(
'<p class="url">{0} <a{1}>{2}</a><br />{3} {4}</p>',
_('Currently:'), flatatt(VAR_28), VAR_6,
_('Change:'), VAR_37
)
return VAR_37
class CLASS_14(forms.TextInput):
VAR_25 = 'vIntegerField'
def __init__(self, VAR_3=None):
VAR_28 = {'class': self.class_name}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_14, self).__init__(VAR_3=VAR_28)
class CLASS_15(CLASS_14):
VAR_25 = 'vBigIntegerField'
class CLASS_16(forms.TextInput):
def __init__(self, VAR_3=None):
VAR_28 = {'class': 'vCommaSeparatedIntegerField'}
if VAR_3 is not None:
VAR_28.update(VAR_3)
super(CLASS_16, self).__init__(VAR_3=VAR_28)
| [
5,
7,
19,
20,
24,
32,
37,
46,
47,
51,
53,
58,
64,
66,
71,
77,
84,
85,
87,
92,
100,
103,
109,
124,
131,
142,
149,
154,
162,
163,
172,
175,
181,
189,
199,
206,
209,
212,
217,
218,
231,
232,
236,
238,
245,
249,
257,
258,
264,
269,
272,
275,
282,
289,
296,
303,
315,
316,
319,
325,
328,
335,
1,
2,
3,
22,
23,
24,
25,
26,
27,
79,
80,
81,
111,
112,
113,
114,
133,
134,
135,
136,
191,
192,
193,
194,
220,
221,
222,
223,
95,
265,
266
] | [
5,
7,
19,
20,
24,
32,
37,
46,
47,
51,
53,
58,
64,
66,
71,
77,
84,
85,
87,
92,
100,
103,
109,
124,
131,
142,
149,
154,
162,
163,
172,
175,
181,
189,
199,
206,
209,
212,
217,
218,
231,
232,
236,
238,
245,
249,
257,
258,
264,
269,
272,
275,
282,
289,
296,
303,
315,
316,
319,
325,
328,
335,
1,
2,
3,
22,
23,
24,
25,
26,
27,
79,
80,
81,
111,
112,
113,
114,
133,
134,
135,
136,
191,
192,
193,
194,
220,
221,
222,
223,
95,
265,
266
] |
1CWE-79
| # this is a package
__version__ = "4.6.2"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| # this is a package
__version__ = "4.6.3"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| xss | {
"code": [
"__version__ = \"4.6.2\""
],
"line_no": [
3
]
} | {
"code": [
"__version__ = \"4.6.3\""
],
"line_no": [
3
]
} |
__version__ = "4.6.2"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
|
__version__ = "4.6.3"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
| [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] | [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Search
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, unique, cint
from frappe.permissions import has_permission
from frappe.handler import is_whitelisted
from frappe import _
from six import string_types
import re
import wrapt
UNTRANSLATED_DOCTYPES = ["DocType", "Role"]
def sanitize_searchfield(searchfield):
blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def _raise_exception(searchfield):
frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.DataError)
if len(searchfield) == 1:
# do not allow special characters to pass as searchfields
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if regex.match(searchfield):
_raise_exception(searchfield)
if len(searchfield) >= 3:
# to avoid 1=1
if '=' in searchfield:
_raise_exception(searchfield)
# in mysql -- is used for commenting the query
elif ' --' in searchfield:
_raise_exception(searchfield)
# to avoid and, or and like
elif any(' {0} '.format(keyword) in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
# to avoid select, delete, drop, update and case
elif any(keyword in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
else:
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(regex.match(f) for f in searchfield.split()):
_raise_exception(searchfield)
# this is called by the Link Field
@frappe.whitelist()
def search_link(doctype, txt, query=None, filters=None, page_length=20, searchfield=None, reference_doctype=None, ignore_user_permissions=False):
search_widget(doctype, txt.strip(), query, searchfield=searchfield, page_length=page_length, filters=filters, reference_doctype=reference_doctype, ignore_user_permissions=ignore_user_permissions)
frappe.response['results'] = build_for_autosuggest(frappe.response["values"])
del frappe.response["values"]
# this is called by the search box
@frappe.whitelist()
def search_widget(doctype, txt, query=None, searchfield=None, start=0,
page_length=20, filters=None, filter_fields=None, as_dict=False, reference_doctype=None, ignore_user_permissions=False):
start = cint(start)
if isinstance(filters, string_types):
filters = json.loads(filters)
if searchfield:
sanitize_searchfield(searchfield)
if not searchfield:
searchfield = "name"
standard_queries = frappe.get_hooks().standard_queries or {}
if query and query.split()[0].lower()!="select":
# by method
try:
is_whitelisted(frappe.get_attr(query))
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
except frappe.exceptions.PermissionError as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not query and doctype in standard_queries:
# from standard queries
search_widget(doctype, txt, standard_queries[doctype][0],
searchfield, start, page_length, filters)
else:
meta = frappe.get_meta(doctype)
if query:
frappe.throw(_("This query style is discontinued"))
# custom query
# frappe.response["values"] = frappe.db.sql(scrub_custom_query(query, searchfield, txt))
else:
if isinstance(filters, dict):
filters_items = filters.items()
filters = []
for f in filters_items:
if isinstance(f[1], (list, tuple)):
filters.append([doctype, f[0], f[1][0], f[1][1]])
else:
filters.append([doctype, f[0], "=", f[1]])
if filters==None:
filters = []
or_filters = []
# build from doctype
if txt:
search_fields = ["name"]
if meta.title_field:
search_fields.append(meta.title_field)
if meta.search_fields:
search_fields.extend(meta.get_search_fields())
for f in search_fields:
fmeta = meta.get_field(f.strip())
if (doctype not in UNTRANSLATED_DOCTYPES) and (f == "name" or (fmeta and fmeta.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
or_filters.append([doctype, f.strip(), "like", "%{0}%".format(txt)])
if meta.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
filters.append([doctype, "enabled", "=", 1])
if meta.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
filters.append([doctype, "disabled", "!=", 1])
# format a list of fields combining search fields and filter fields
fields = get_std_fields_list(meta, searchfield or "name")
if filter_fields:
fields = list(set(fields + json.loads(filter_fields)))
formatted_fields = ['`tab%s`.`%s`' % (meta.name, f.strip()) for f in fields]
# find relevance as location of search term from the beginning of string `name`. used for sorting results.
formatted_fields.append("""locate({_txt}, `tab{doctype}`.`name`) as `_relevance`""".format(
_txt=frappe.db.escape((txt or "").replace("%", "").replace("@", "")), doctype=doctype))
# In order_by, `idx` gets second priority, because it stores link count
from frappe.model.db_query import get_order_by
order_by_based_on_meta = get_order_by(doctype, meta)
# 2 is the index of _relevance column
order_by = "_relevance, {0}, `tab{1}`.idx desc".format(order_by_based_on_meta, doctype)
ptype = 'select' if frappe.only_has_select_perm(doctype) else 'read'
ignore_permissions = True if doctype == "DocType" else (cint(ignore_user_permissions) and has_permission(doctype, ptype=ptype))
if doctype in UNTRANSLATED_DOCTYPES:
page_length = None
values = frappe.get_list(doctype,
filters=filters,
fields=formatted_fields,
or_filters=or_filters,
limit_start=start,
limit_page_length=page_length,
order_by=order_by,
ignore_permissions=ignore_permissions,
reference_doctype=reference_doctype,
as_list=not as_dict,
strict=False)
if doctype in UNTRANSLATED_DOCTYPES:
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
# remove _relevance from results
if as_dict:
for r in values:
r.pop("_relevance")
frappe.response["values"] = values
else:
frappe.response["values"] = [r[:-1] for r in values]
def get_std_fields_list(meta, key):
# get additional search fields
sflist = ["name"]
if meta.search_fields:
for d in meta.search_fields.split(","):
if d.strip() not in sflist:
sflist.append(d.strip())
if meta.title_field and meta.title_field not in sflist:
sflist.append(meta.title_field)
if key not in sflist:
sflist.append(key)
return sflist
def build_for_autosuggest(res):
results = []
for r in res:
out = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
results.append(out)
return results
def scrub_custom_query(query, key, txt):
if '%(key)s' in query:
query = query.replace('%(key)s', key)
if '%s' in query:
query = query.replace('%s', ((txt or '') + '%'))
return query
@wrapt.decorator
def validate_and_sanitize_search_inputs(fn, instance, args, kwargs):
kwargs.update(dict(zip(fn.__code__.co_varnames, args)))
sanitize_searchfield(kwargs['searchfield'])
kwargs['start'] = cint(kwargs['start'])
kwargs['page_len'] = cint(kwargs['page_len'])
if kwargs['doctype'] and not frappe.db.exists('DocType', kwargs['doctype']):
return []
return fn(**kwargs) | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Search
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, unique, cint
from frappe.permissions import has_permission
from frappe import _, is_whitelisted
from six import string_types
import re
import wrapt
UNTRANSLATED_DOCTYPES = ["DocType", "Role"]
def sanitize_searchfield(searchfield):
blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def _raise_exception(searchfield):
frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.DataError)
if len(searchfield) == 1:
# do not allow special characters to pass as searchfields
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if regex.match(searchfield):
_raise_exception(searchfield)
if len(searchfield) >= 3:
# to avoid 1=1
if '=' in searchfield:
_raise_exception(searchfield)
# in mysql -- is used for commenting the query
elif ' --' in searchfield:
_raise_exception(searchfield)
# to avoid and, or and like
elif any(' {0} '.format(keyword) in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
# to avoid select, delete, drop, update and case
elif any(keyword in searchfield.split() for keyword in blacklisted_keywords):
_raise_exception(searchfield)
else:
regex = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(regex.match(f) for f in searchfield.split()):
_raise_exception(searchfield)
# this is called by the Link Field
@frappe.whitelist()
def search_link(doctype, txt, query=None, filters=None, page_length=20, searchfield=None, reference_doctype=None, ignore_user_permissions=False):
search_widget(doctype, txt.strip(), query, searchfield=searchfield, page_length=page_length, filters=filters, reference_doctype=reference_doctype, ignore_user_permissions=ignore_user_permissions)
frappe.response['results'] = build_for_autosuggest(frappe.response["values"])
del frappe.response["values"]
# this is called by the search box
@frappe.whitelist()
def search_widget(doctype, txt, query=None, searchfield=None, start=0,
page_length=20, filters=None, filter_fields=None, as_dict=False, reference_doctype=None, ignore_user_permissions=False):
start = cint(start)
if isinstance(filters, string_types):
filters = json.loads(filters)
if searchfield:
sanitize_searchfield(searchfield)
if not searchfield:
searchfield = "name"
standard_queries = frappe.get_hooks().standard_queries or {}
if query and query.split()[0].lower()!="select":
# by method
try:
is_whitelisted(frappe.get_attr(query))
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
except frappe.exceptions.PermissionError as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not query and doctype in standard_queries:
# from standard queries
search_widget(doctype, txt, standard_queries[doctype][0],
searchfield, start, page_length, filters)
else:
meta = frappe.get_meta(doctype)
if query:
frappe.throw(_("This query style is discontinued"))
# custom query
# frappe.response["values"] = frappe.db.sql(scrub_custom_query(query, searchfield, txt))
else:
if isinstance(filters, dict):
filters_items = filters.items()
filters = []
for f in filters_items:
if isinstance(f[1], (list, tuple)):
filters.append([doctype, f[0], f[1][0], f[1][1]])
else:
filters.append([doctype, f[0], "=", f[1]])
if filters==None:
filters = []
or_filters = []
# build from doctype
if txt:
search_fields = ["name"]
if meta.title_field:
search_fields.append(meta.title_field)
if meta.search_fields:
search_fields.extend(meta.get_search_fields())
for f in search_fields:
fmeta = meta.get_field(f.strip())
if (doctype not in UNTRANSLATED_DOCTYPES) and (f == "name" or (fmeta and fmeta.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
or_filters.append([doctype, f.strip(), "like", "%{0}%".format(txt)])
if meta.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
filters.append([doctype, "enabled", "=", 1])
if meta.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
filters.append([doctype, "disabled", "!=", 1])
# format a list of fields combining search fields and filter fields
fields = get_std_fields_list(meta, searchfield or "name")
if filter_fields:
fields = list(set(fields + json.loads(filter_fields)))
formatted_fields = ['`tab%s`.`%s`' % (meta.name, f.strip()) for f in fields]
# find relevance as location of search term from the beginning of string `name`. used for sorting results.
formatted_fields.append("""locate({_txt}, `tab{doctype}`.`name`) as `_relevance`""".format(
_txt=frappe.db.escape((txt or "").replace("%", "").replace("@", "")), doctype=doctype))
# In order_by, `idx` gets second priority, because it stores link count
from frappe.model.db_query import get_order_by
order_by_based_on_meta = get_order_by(doctype, meta)
# 2 is the index of _relevance column
order_by = "_relevance, {0}, `tab{1}`.idx desc".format(order_by_based_on_meta, doctype)
ptype = 'select' if frappe.only_has_select_perm(doctype) else 'read'
ignore_permissions = True if doctype == "DocType" else (cint(ignore_user_permissions) and has_permission(doctype, ptype=ptype))
if doctype in UNTRANSLATED_DOCTYPES:
page_length = None
values = frappe.get_list(doctype,
filters=filters,
fields=formatted_fields,
or_filters=or_filters,
limit_start=start,
limit_page_length=page_length,
order_by=order_by,
ignore_permissions=ignore_permissions,
reference_doctype=reference_doctype,
as_list=not as_dict,
strict=False)
if doctype in UNTRANSLATED_DOCTYPES:
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
# remove _relevance from results
if as_dict:
for r in values:
r.pop("_relevance")
frappe.response["values"] = values
else:
frappe.response["values"] = [r[:-1] for r in values]
def get_std_fields_list(meta, key):
# get additional search fields
sflist = ["name"]
if meta.search_fields:
for d in meta.search_fields.split(","):
if d.strip() not in sflist:
sflist.append(d.strip())
if meta.title_field and meta.title_field not in sflist:
sflist.append(meta.title_field)
if key not in sflist:
sflist.append(key)
return sflist
def build_for_autosuggest(res):
results = []
for r in res:
out = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
results.append(out)
return results
def scrub_custom_query(query, key, txt):
if '%(key)s' in query:
query = query.replace('%(key)s', key)
if '%s' in query:
query = query.replace('%s', ((txt or '') + '%'))
return query
@wrapt.decorator
def validate_and_sanitize_search_inputs(fn, instance, args, kwargs):
kwargs.update(dict(zip(fn.__code__.co_varnames, args)))
sanitize_searchfield(kwargs['searchfield'])
kwargs['start'] = cint(kwargs['start'])
kwargs['page_len'] = cint(kwargs['page_len'])
if kwargs['doctype'] and not frappe.db.exists('DocType', kwargs['doctype']):
return []
return fn(**kwargs)
| xss | {
"code": [
"from frappe.handler import is_whitelisted",
"from frappe import _",
"\treturn fn(**kwargs)"
],
"line_no": [
9,
10,
224
]
} | {
"code": [
"from frappe import _, is_whitelisted"
],
"line_no": [
9
]
} |
from __future__ import unicode_literals
import .frappe, json
from VAR_20.utils import cstr, unique, cint
from VAR_20.permissions import has_permission
from VAR_20.handler import is_whitelisted
from VAR_20 import _
from six import string_types
import re
import wrapt
VAR_0 = ["DocType", "Role"]
def FUNC_0(VAR_1):
VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def FUNC_7(VAR_1):
VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)
if len(VAR_1) == 1:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if VAR_24.match(VAR_1):
FUNC_7(VAR_1)
if len(VAR_1) >= 3:
if '=' in VAR_1:
FUNC_7(VAR_1)
elif ' --' in VAR_1:
FUNC_7(VAR_1)
elif any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
elif any(keyword in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
else:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(VAR_24.match(f) for f in VAR_1.split()):
FUNC_7(VAR_1)
@VAR_20.whitelist()
def FUNC_1(VAR_2, VAR_3, VAR_4=None, VAR_5=None, VAR_6=20, VAR_1=None, VAR_7=None, VAR_8=False):
FUNC_2(VAR_2, VAR_3.strip(), VAR_4, VAR_1=searchfield, VAR_6=page_length, VAR_5=filters, VAR_7=reference_doctype, VAR_8=ignore_user_permissions)
VAR_20.response['results'] = FUNC_4(VAR_20.response["values"])
del VAR_20.response["values"]
@VAR_20.whitelist()
def FUNC_2(VAR_2, VAR_3, VAR_4=None, VAR_1=None, VAR_9=0,
VAR_6=20, VAR_5=None, VAR_10=None, VAR_11=False, VAR_7=None, VAR_8=False):
VAR_9 = cint(VAR_9)
if isinstance(VAR_5, string_types):
VAR_5 = json.loads(VAR_5)
if VAR_1:
FUNC_0(VAR_1)
if not VAR_1:
searchfield = "name"
VAR_21 = VAR_20.get_hooks().standard_queries or {}
if VAR_4 and VAR_4.split()[0].lower()!="select":
try:
is_whitelisted(VAR_20.get_attr(VAR_4))
VAR_20.response["values"] = VAR_20.call(VAR_4, VAR_2, VAR_3,
VAR_1, VAR_9, VAR_6, VAR_5, VAR_11=as_dict)
except VAR_20.exceptions.PermissionError as e:
if VAR_20.local.conf.developer_mode:
raise e
else:
VAR_20.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not VAR_4 and VAR_2 in VAR_21:
FUNC_2(VAR_2, VAR_3, VAR_21[VAR_2][0],
VAR_1, VAR_9, VAR_6, VAR_5)
else:
VAR_12 = VAR_20.get_meta(VAR_2)
if VAR_4:
VAR_20.throw(_("This VAR_4 style is discontinued"))
else:
if isinstance(VAR_5, dict):
VAR_34 = VAR_5.items()
VAR_5 = []
for f in VAR_34:
if isinstance(f[1], (list, tuple)):
VAR_5.append([VAR_2, f[0], f[1][0], f[1][1]])
else:
VAR_5.append([VAR_2, f[0], "=", f[1]])
if VAR_5==None:
VAR_5 = []
VAR_26 = []
if VAR_3:
VAR_35 = ["name"]
if VAR_12.title_field:
VAR_35.append(VAR_12.title_field)
if VAR_12.search_fields:
VAR_35.extend(VAR_12.get_search_fields())
for f in VAR_35:
VAR_36 = VAR_12.get_field(f.strip())
if (VAR_2 not in VAR_0) and (f == "name" or (VAR_36 and VAR_36.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
VAR_26.append([VAR_2, f.strip(), "like", "%{0}%".format(VAR_3)])
if VAR_12.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "enabled", "=", 1])
if VAR_12.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "disabled", "!=", 1])
fields = FUNC_3(VAR_12, VAR_1 or "name")
if VAR_10:
VAR_27 = list(set(VAR_27 + json.loads(VAR_10)))
VAR_28 = ['`tab%s`.`%s`' % (VAR_12.name, f.strip()) for f in VAR_27]
VAR_28.append("""locate({_txt}, `tab{VAR_2}`.`name`) as `_relevance`""".format(
_txt=VAR_20.db.escape((VAR_3 or "").replace("%", "").replace("@", "")), VAR_2=doctype))
from VAR_20.model.db_query import get_order_by
VAR_29 = get_order_by(VAR_2, VAR_12)
VAR_30 = "_relevance, {0}, `tab{1}`.idx desc".format(VAR_29, VAR_2)
VAR_31 = 'select' if VAR_20.only_has_select_perm(VAR_2) else 'read'
VAR_32 = True if VAR_2 == "DocType" else (cint(VAR_8) and has_permission(VAR_2, VAR_31=ptype))
if VAR_2 in VAR_0:
VAR_6 = None
VAR_33 = VAR_20.get_list(VAR_2,
VAR_5=filters,
VAR_27=VAR_28,
VAR_26=or_filters,
limit_start=VAR_9,
limit_page_length=VAR_6,
VAR_30=order_by,
VAR_32=ignore_permissions,
VAR_7=reference_doctype,
as_list=not VAR_11,
strict=False)
if VAR_2 in VAR_0:
VAR_33 = tuple([v for v in list(VAR_33) if re.search(re.escape(VAR_3)+".*", (_(v.name) if VAR_11 else _(v[0])), re.IGNORECASE)])
if VAR_11:
for r in VAR_33:
r.pop("_relevance")
VAR_20.response["values"] = VAR_33
else:
VAR_20.response["values"] = [r[:-1] for r in VAR_33]
def FUNC_3(VAR_12, VAR_13):
VAR_22 = ["name"]
if VAR_12.search_fields:
for d in VAR_12.search_fields.split(","):
if d.strip() not in VAR_22:
sflist.append(d.strip())
if VAR_12.title_field and VAR_12.title_field not in VAR_22:
sflist.append(VAR_12.title_field)
if VAR_13 not in VAR_22:
sflist.append(VAR_13)
return VAR_22
def FUNC_4(VAR_14):
VAR_23 = []
for r in VAR_14:
VAR_25 = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
VAR_23.append(VAR_25)
return VAR_23
def FUNC_5(VAR_4, VAR_13, VAR_3):
if '%(VAR_13)s' in VAR_4:
query = VAR_4.replace('%(VAR_13)s', VAR_13)
if '%s' in VAR_4:
query = VAR_4.replace('%s', ((VAR_3 or '') + '%'))
return VAR_4
@wrapt.decorator
def FUNC_6(VAR_15, VAR_16, VAR_17, VAR_18):
kwargs.update(dict(zip(VAR_15.__code__.co_varnames, VAR_17)))
FUNC_0(VAR_18['searchfield'])
VAR_18['start'] = cint(VAR_18['start'])
VAR_18['page_len'] = cint(VAR_18['page_len'])
if VAR_18['doctype'] and not VAR_20.db.exists('DocType', VAR_18['doctype']):
return []
return VAR_15(**VAR_18) |
from __future__ import unicode_literals
import .frappe, json
from VAR_20.utils import cstr, unique, cint
from VAR_20.permissions import has_permission
from VAR_20 import _, is_whitelisted
from six import string_types
import re
import wrapt
VAR_0 = ["DocType", "Role"]
def FUNC_0(VAR_1):
VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']
def FUNC_7(VAR_1):
VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)
if len(VAR_1) == 1:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()_].*')
if VAR_24.match(VAR_1):
FUNC_7(VAR_1)
if len(VAR_1) >= 3:
if '=' in VAR_1:
FUNC_7(VAR_1)
elif ' --' in VAR_1:
FUNC_7(VAR_1)
elif any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
elif any(keyword in VAR_1.split() for keyword in VAR_19):
FUNC_7(VAR_1)
else:
VAR_24 = re.compile(r'^.*[=;*,\'"$\-+%#@()].*')
if any(VAR_24.match(f) for f in VAR_1.split()):
FUNC_7(VAR_1)
@VAR_20.whitelist()
def FUNC_1(VAR_2, VAR_3, VAR_4=None, VAR_5=None, VAR_6=20, VAR_1=None, VAR_7=None, VAR_8=False):
FUNC_2(VAR_2, VAR_3.strip(), VAR_4, VAR_1=searchfield, VAR_6=page_length, VAR_5=filters, VAR_7=reference_doctype, VAR_8=ignore_user_permissions)
VAR_20.response['results'] = FUNC_4(VAR_20.response["values"])
del VAR_20.response["values"]
@VAR_20.whitelist()
def FUNC_2(VAR_2, VAR_3, VAR_4=None, VAR_1=None, VAR_9=0,
VAR_6=20, VAR_5=None, VAR_10=None, VAR_11=False, VAR_7=None, VAR_8=False):
VAR_9 = cint(VAR_9)
if isinstance(VAR_5, string_types):
VAR_5 = json.loads(VAR_5)
if VAR_1:
FUNC_0(VAR_1)
if not VAR_1:
searchfield = "name"
VAR_21 = VAR_20.get_hooks().standard_queries or {}
if VAR_4 and VAR_4.split()[0].lower()!="select":
try:
is_whitelisted(VAR_20.get_attr(VAR_4))
VAR_20.response["values"] = VAR_20.call(VAR_4, VAR_2, VAR_3,
VAR_1, VAR_9, VAR_6, VAR_5, VAR_11=as_dict)
except VAR_20.exceptions.PermissionError as e:
if VAR_20.local.conf.developer_mode:
raise e
else:
VAR_20.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
except Exception as e:
raise e
elif not VAR_4 and VAR_2 in VAR_21:
FUNC_2(VAR_2, VAR_3, VAR_21[VAR_2][0],
VAR_1, VAR_9, VAR_6, VAR_5)
else:
VAR_12 = VAR_20.get_meta(VAR_2)
if VAR_4:
VAR_20.throw(_("This VAR_4 style is discontinued"))
else:
if isinstance(VAR_5, dict):
VAR_34 = VAR_5.items()
VAR_5 = []
for f in VAR_34:
if isinstance(f[1], (list, tuple)):
VAR_5.append([VAR_2, f[0], f[1][0], f[1][1]])
else:
VAR_5.append([VAR_2, f[0], "=", f[1]])
if VAR_5==None:
VAR_5 = []
VAR_26 = []
if VAR_3:
VAR_35 = ["name"]
if VAR_12.title_field:
VAR_35.append(VAR_12.title_field)
if VAR_12.search_fields:
VAR_35.extend(VAR_12.get_search_fields())
for f in VAR_35:
VAR_36 = VAR_12.get_field(f.strip())
if (VAR_2 not in VAR_0) and (f == "name" or (VAR_36 and VAR_36.fieldtype in ["Data", "Text", "Small Text", "Long Text",
"Link", "Select", "Read Only", "Text Editor"])):
VAR_26.append([VAR_2, f.strip(), "like", "%{0}%".format(VAR_3)])
if VAR_12.get("fields", {"fieldname":"enabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "enabled", "=", 1])
if VAR_12.get("fields", {"fieldname":"disabled", "fieldtype":"Check"}):
VAR_5.append([VAR_2, "disabled", "!=", 1])
fields = FUNC_3(VAR_12, VAR_1 or "name")
if VAR_10:
VAR_27 = list(set(VAR_27 + json.loads(VAR_10)))
VAR_28 = ['`tab%s`.`%s`' % (VAR_12.name, f.strip()) for f in VAR_27]
VAR_28.append("""locate({_txt}, `tab{VAR_2}`.`name`) as `_relevance`""".format(
_txt=VAR_20.db.escape((VAR_3 or "").replace("%", "").replace("@", "")), VAR_2=doctype))
from VAR_20.model.db_query import get_order_by
VAR_29 = get_order_by(VAR_2, VAR_12)
VAR_30 = "_relevance, {0}, `tab{1}`.idx desc".format(VAR_29, VAR_2)
VAR_31 = 'select' if VAR_20.only_has_select_perm(VAR_2) else 'read'
VAR_32 = True if VAR_2 == "DocType" else (cint(VAR_8) and has_permission(VAR_2, VAR_31=ptype))
if VAR_2 in VAR_0:
VAR_6 = None
VAR_33 = VAR_20.get_list(VAR_2,
VAR_5=filters,
VAR_27=VAR_28,
VAR_26=or_filters,
limit_start=VAR_9,
limit_page_length=VAR_6,
VAR_30=order_by,
VAR_32=ignore_permissions,
VAR_7=reference_doctype,
as_list=not VAR_11,
strict=False)
if VAR_2 in VAR_0:
VAR_33 = tuple([v for v in list(VAR_33) if re.search(re.escape(VAR_3)+".*", (_(v.name) if VAR_11 else _(v[0])), re.IGNORECASE)])
if VAR_11:
for r in VAR_33:
r.pop("_relevance")
VAR_20.response["values"] = VAR_33
else:
VAR_20.response["values"] = [r[:-1] for r in VAR_33]
def FUNC_3(VAR_12, VAR_13):
VAR_22 = ["name"]
if VAR_12.search_fields:
for d in VAR_12.search_fields.split(","):
if d.strip() not in VAR_22:
sflist.append(d.strip())
if VAR_12.title_field and VAR_12.title_field not in VAR_22:
sflist.append(VAR_12.title_field)
if VAR_13 not in VAR_22:
sflist.append(VAR_13)
return VAR_22
def FUNC_4(VAR_14):
VAR_23 = []
for r in VAR_14:
VAR_25 = {"value": r[0], "description": ", ".join(unique(cstr(d) for d in r if d)[1:])}
VAR_23.append(VAR_25)
return VAR_23
def FUNC_5(VAR_4, VAR_13, VAR_3):
if '%(VAR_13)s' in VAR_4:
query = VAR_4.replace('%(VAR_13)s', VAR_13)
if '%s' in VAR_4:
query = VAR_4.replace('%s', ((VAR_3 or '') + '%'))
return VAR_4
@wrapt.decorator
def FUNC_6(VAR_15, VAR_16, VAR_17, VAR_18):
kwargs.update(dict(zip(VAR_15.__code__.co_varnames, VAR_17)))
FUNC_0(VAR_18['searchfield'])
VAR_18['start'] = cint(VAR_18['start'])
VAR_18['page_len'] = cint(VAR_18['page_len'])
if VAR_18['doctype'] and not VAR_20.db.exists('DocType', VAR_18['doctype']):
return []
return VAR_15(**VAR_18)
| [
1,
2,
3,
4,
14,
16,
19,
22,
24,
28,
30,
31,
34,
35,
38,
39,
42,
43,
46,
51,
52,
58,
59,
63,
65,
68,
71,
74,
76,
78,
93,
98,
101,
102,
112,
116,
117,
118,
123,
126,
132,
137,
138,
143,
144,
147,
148,
149,
152,
154,
157,
160,
172,
175,
176,
183,
185,
191,
194,
197,
199,
206,
213,
220,
223
] | [
1,
2,
3,
4,
13,
15,
18,
21,
23,
27,
29,
30,
33,
34,
37,
38,
41,
42,
45,
50,
51,
57,
58,
62,
64,
67,
70,
73,
75,
77,
92,
97,
100,
101,
111,
115,
116,
117,
122,
125,
131,
136,
137,
142,
143,
146,
147,
148,
151,
153,
156,
159,
171,
174,
175,
182,
184,
190,
193,
196,
198,
205,
212,
219,
222,
224
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import six
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from shuup.apps.provides import get_provide_objects
from shuup.core.basket import commands
from shuup.core.basket.command_middleware import BaseBasketCommandMiddleware
from shuup.core.signals import get_basket_command_handler
from shuup.utils.django_compat import force_text
from shuup.utils.excs import Problem
class BasketCommandDispatcher(object):
"""
BasketCommandDispatcher handles (usually AJAX) requests that somehow update the basket.
You should never instantiate BasketCommandDispatcher yourself -- instead use
`get_basket_command_dispatcher()`.
All `handle_*` methods are expected to accept `**kwargs`.
"""
commands_module = commands
def __init__(self, request, basket=None):
"""
:type request: HttpRequest
"""
self.request = request
self.ajax = self.request.is_ajax()
# :type self.basket: BaseBasket
self.basket = basket or request.basket
def get_command_handler(self, command):
handler = getattr(self.commands_module, "handle_%s" % command.lower(), None)
if handler and callable(handler):
return handler
for receiver, handler in get_basket_command_handler.send(
BasketCommandDispatcher, command=command, instance=self
):
if handler and callable(handler):
return handler
def handle(self, command, kwargs=None):
"""
Dispatch and handle processing of the given command.
:param command: Name of command to run.
:type command: unicode
:param kwargs: Arguments to pass to the command handler. If empty, `request.POST` is used.
:type kwargs: dict
:return: response.
:rtype: HttpResponse
"""
kwargs = kwargs or dict(six.iteritems(self.request.POST))
try:
handler = self.get_command_handler(command)
if not handler or not callable(handler):
raise Problem(_("Error! Invalid command `%s`.") % command)
kwargs.pop("csrfmiddlewaretoken", None) # The CSRF token should never be passed as a kwarg
kwargs.pop("command", None) # Nor the command
kwargs.update(request=self.request, basket=self.basket)
kwargs = self.preprocess_kwargs(command, kwargs)
response = handler(**kwargs) or {}
except (Problem, ValidationError) as exc:
if not self.ajax:
raise
msg = exc.message if hasattr(exc, "message") else exc
response = {
"error": force_text(msg, errors="ignore"),
"code": force_text(getattr(exc, "code", None) or "", errors="ignore"),
}
response = self.postprocess_response(command, kwargs, response)
if self.ajax:
return JsonResponse(response)
return_url = response.get("return") or kwargs.get("return")
if return_url and return_url.startswith("/"):
return HttpResponseRedirect(return_url)
return redirect("shuup:basket")
def preprocess_kwargs(self, command, kwargs):
"""
Preprocess kwargs before they are passed to the given `command` handler.
Useful for subclassing. Must return the new `kwargs`, even if it wasn't
mutated.
:param command: The name of the command about to be run.
:param kwargs: dict of arguments.
:return: dict of arguments.
"""
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
# create a copy
kwargs = dict(
basket_command_middleware().preprocess_kwargs(
basket=self.basket, request=self.request, command=command, kwargs=kwargs
)
)
return kwargs
def postprocess_response(self, command, kwargs, response):
"""
Postprocess the response dictionary (not a HTTP response!) before it is
either turned into JSON or otherwise processed (in the case of non-AJAX requests).
:param command: The command that was run.
:param kwargs: The actual kwargs the command was run with.
:param response: The response the command returned.
:return: The response to be processed and sent to the client.
"""
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
response = dict(
basket_command_middleware().postprocess_response(
basket=self.basket, request=self.request, command=command, kwargs=kwargs, response=response
)
)
return response
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import six
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import redirect
from django.utils.html import escape
from django.utils.translation import ugettext_lazy as _
from shuup.apps.provides import get_provide_objects
from shuup.core.basket import commands
from shuup.core.basket.command_middleware import BaseBasketCommandMiddleware
from shuup.core.signals import get_basket_command_handler
from shuup.utils.django_compat import force_text
from shuup.utils.excs import Problem
class BasketCommandDispatcher(object):
"""
BasketCommandDispatcher handles (usually AJAX) requests that somehow update the basket.
You should never instantiate BasketCommandDispatcher yourself -- instead use
`get_basket_command_dispatcher()`.
All `handle_*` methods are expected to accept `**kwargs`.
"""
commands_module = commands
def __init__(self, request, basket=None):
"""
:type request: HttpRequest
"""
self.request = request
self.ajax = self.request.is_ajax()
# :type self.basket: BaseBasket
self.basket = basket or request.basket
def get_command_handler(self, command):
handler = getattr(self.commands_module, "handle_%s" % command.lower(), None)
if handler and callable(handler):
return handler
for receiver, handler in get_basket_command_handler.send(
BasketCommandDispatcher, command=command, instance=self
):
if handler and callable(handler):
return handler
def handle(self, command, kwargs=None):
"""
Dispatch and handle processing of the given command.
:param command: Name of command to run.
:type command: unicode
:param kwargs: Arguments to pass to the command handler. If empty, `request.POST` is used.
:type kwargs: dict
:return: response.
:rtype: HttpResponse
"""
kwargs = kwargs or dict(six.iteritems(self.request.POST))
try:
handler = self.get_command_handler(command)
if not handler or not callable(handler):
raise Problem(_("Error! Invalid command `%s`.") % escape(command))
kwargs.pop("csrfmiddlewaretoken", None) # The CSRF token should never be passed as a kwarg
kwargs.pop("command", None) # Nor the command
kwargs.update(request=self.request, basket=self.basket)
kwargs = self.preprocess_kwargs(command, kwargs)
response = handler(**kwargs) or {}
except (Problem, ValidationError) as exc:
if not self.ajax:
raise
msg = exc.message if hasattr(exc, "message") else exc
response = {
"error": force_text(msg, errors="ignore"),
"code": force_text(getattr(exc, "code", None) or "", errors="ignore"),
}
response = self.postprocess_response(command, kwargs, response)
if self.ajax:
return JsonResponse(response)
return_url = response.get("return") or kwargs.get("return")
if return_url and return_url.startswith("/"):
return HttpResponseRedirect(return_url)
return redirect("shuup:basket")
def preprocess_kwargs(self, command, kwargs):
"""
Preprocess kwargs before they are passed to the given `command` handler.
Useful for subclassing. Must return the new `kwargs`, even if it wasn't
mutated.
:param command: The name of the command about to be run.
:param kwargs: dict of arguments.
:return: dict of arguments.
"""
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
# create a copy
kwargs = dict(
basket_command_middleware().preprocess_kwargs(
basket=self.basket, request=self.request, command=command, kwargs=kwargs
)
)
return kwargs
def postprocess_response(self, command, kwargs, response):
"""
Postprocess the response dictionary (not a HTTP response!) before it is
either turned into JSON or otherwise processed (in the case of non-AJAX requests).
:param command: The command that was run.
:param kwargs: The actual kwargs the command was run with.
:param response: The response the command returned.
:return: The response to be processed and sent to the client.
"""
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
response = dict(
basket_command_middleware().postprocess_response(
basket=self.basket, request=self.request, command=command, kwargs=kwargs, response=response
)
)
return response
| xss | {
"code": [
" raise Problem(_(\"Error! Invalid command `%s`.\") % command)"
],
"line_no": [
71
]
} | {
"code": [
"from django.utils.html import escape",
" raise Problem(_(\"Error! Invalid command `%s`.\") % escape(command))"
],
"line_no": [
14,
72
]
} |
from __future__ import unicode_literals
import six
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from shuup.apps.provides import get_provide_objects
from shuup.core.basket import .commands
from shuup.core.basket.command_middleware import BaseBasketCommandMiddleware
from shuup.core.signals import get_basket_command_handler
from shuup.utils.django_compat import force_text
from shuup.utils.excs import Problem
class CLASS_0(object):
VAR_0 = commands
def __init__(self, VAR_1, VAR_2=None):
self.request = VAR_1
self.ajax = self.request.is_ajax()
self.basket = VAR_2 or VAR_1.basket
def FUNC_0(self, VAR_3):
VAR_6 = getattr(self.commands_module, "handle_%s" % VAR_3.lower(), None)
if VAR_6 and callable(VAR_6):
return VAR_6
for receiver, VAR_6 in get_basket_command_handler.send(
CLASS_0, VAR_3=command, instance=self
):
if VAR_6 and callable(VAR_6):
return VAR_6
def FUNC_1(self, VAR_3, VAR_4=None):
VAR_4 = VAR_4 or dict(six.iteritems(self.request.POST))
try:
VAR_6 = self.get_command_handler(VAR_3)
if not VAR_6 or not callable(VAR_6):
raise Problem(_("Error! Invalid VAR_3 `%s`.") % VAR_3)
VAR_4.pop("csrfmiddlewaretoken", None) # The CSRF token should never be passed as a kwarg
VAR_4.pop("command", None) # Nor the VAR_3
VAR_4.update(VAR_1=self.request, VAR_2=self.basket)
VAR_4 = self.preprocess_kwargs(VAR_3, VAR_4)
VAR_5 = VAR_6(**VAR_4) or {}
except (Problem, ValidationError) as exc:
if not self.ajax:
raise
VAR_8 = exc.message if hasattr(exc, "message") else exc
VAR_5 = {
"error": force_text(VAR_8, errors="ignore"),
"code": force_text(getattr(exc, "code", None) or "", errors="ignore"),
}
VAR_5 = self.postprocess_response(VAR_3, VAR_4, VAR_5)
if self.ajax:
return JsonResponse(VAR_5)
VAR_7 = VAR_5.get("return") or VAR_4.get("return")
if VAR_7 and VAR_7.startswith("/"):
return HttpResponseRedirect(VAR_7)
return redirect("shuup:basket")
def FUNC_2(self, VAR_3, VAR_4):
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
VAR_4 = dict(
basket_command_middleware().preprocess_kwargs(
VAR_2=self.basket, VAR_1=self.request, VAR_3=command, VAR_4=kwargs
)
)
return VAR_4
def FUNC_3(self, VAR_3, VAR_4, VAR_5):
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
VAR_5 = dict(
basket_command_middleware().postprocess_response(
VAR_2=self.basket, VAR_1=self.request, VAR_3=command, VAR_4=kwargs, VAR_5=response
)
)
return VAR_5
|
from __future__ import unicode_literals
import six
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import redirect
from django.utils.html import escape
from django.utils.translation import ugettext_lazy as _
from shuup.apps.provides import get_provide_objects
from shuup.core.basket import .commands
from shuup.core.basket.command_middleware import BaseBasketCommandMiddleware
from shuup.core.signals import get_basket_command_handler
from shuup.utils.django_compat import force_text
from shuup.utils.excs import Problem
class CLASS_0(object):
VAR_0 = commands
def __init__(self, VAR_1, VAR_2=None):
self.request = VAR_1
self.ajax = self.request.is_ajax()
self.basket = VAR_2 or VAR_1.basket
def FUNC_0(self, VAR_3):
VAR_6 = getattr(self.commands_module, "handle_%s" % VAR_3.lower(), None)
if VAR_6 and callable(VAR_6):
return VAR_6
for receiver, VAR_6 in get_basket_command_handler.send(
CLASS_0, VAR_3=command, instance=self
):
if VAR_6 and callable(VAR_6):
return VAR_6
def FUNC_1(self, VAR_3, VAR_4=None):
VAR_4 = VAR_4 or dict(six.iteritems(self.request.POST))
try:
VAR_6 = self.get_command_handler(VAR_3)
if not VAR_6 or not callable(VAR_6):
raise Problem(_("Error! Invalid VAR_3 `%s`.") % escape(VAR_3))
VAR_4.pop("csrfmiddlewaretoken", None) # The CSRF token should never be passed as a kwarg
VAR_4.pop("command", None) # Nor the VAR_3
VAR_4.update(VAR_1=self.request, VAR_2=self.basket)
VAR_4 = self.preprocess_kwargs(VAR_3, VAR_4)
VAR_5 = VAR_6(**VAR_4) or {}
except (Problem, ValidationError) as exc:
if not self.ajax:
raise
VAR_8 = exc.message if hasattr(exc, "message") else exc
VAR_5 = {
"error": force_text(VAR_8, errors="ignore"),
"code": force_text(getattr(exc, "code", None) or "", errors="ignore"),
}
VAR_5 = self.postprocess_response(VAR_3, VAR_4, VAR_5)
if self.ajax:
return JsonResponse(VAR_5)
VAR_7 = VAR_5.get("return") or VAR_4.get("return")
if VAR_7 and VAR_7.startswith("/"):
return HttpResponseRedirect(VAR_7)
return redirect("shuup:basket")
def FUNC_2(self, VAR_3, VAR_4):
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
VAR_4 = dict(
basket_command_middleware().preprocess_kwargs(
VAR_2=self.basket, VAR_1=self.request, VAR_3=command, VAR_4=kwargs
)
)
return VAR_4
def FUNC_3(self, VAR_3, VAR_4, VAR_5):
for basket_command_middleware in get_provide_objects("basket_command_middleware"):
if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):
continue
VAR_5 = dict(
basket_command_middleware().postprocess_response(
VAR_2=self.basket, VAR_1=self.request, VAR_3=command, VAR_4=kwargs, VAR_5=response
)
)
return VAR_5
| [
1,
2,
3,
4,
5,
6,
7,
9,
15,
22,
23,
29,
32,
34,
41,
43,
48,
54,
58,
66,
76,
78,
87,
89,
92,
97,
103,
108,
112,
113,
119,
121,
126,
132,
136,
142,
144,
25,
26,
27,
28,
29,
30,
31,
36,
37,
38,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
99,
100,
101,
102,
103,
104,
105,
106,
107,
123,
124,
125,
126,
127,
128,
129,
130,
131
] | [
1,
2,
3,
4,
5,
6,
7,
9,
16,
23,
24,
30,
33,
35,
42,
44,
49,
55,
59,
67,
77,
79,
88,
90,
93,
98,
104,
109,
113,
114,
120,
122,
127,
133,
137,
143,
145,
26,
27,
28,
29,
30,
31,
32,
37,
38,
39,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
100,
101,
102,
103,
104,
105,
106,
107,
108,
124,
125,
126,
127,
128,
129,
130,
131,
132
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import get_user_model
from django.contrib import messages
from django.utils.translation import gettext as _
from djconfig import config
from ...core.utils.views import is_post, post_data
from ...core.utils.paginator import yt_paginate
from ...core.utils.decorators import administrator_required
from .forms import UserForm, UserProfileForm
User = get_user_model()
@administrator_required
def edit(request, user_id):
user = get_object_or_404(User, pk=user_id)
uform = UserForm(data=post_data(request), instance=user)
form = UserProfileForm(data=post_data(request), instance=user.st)
if is_post(request) and all([uform.is_valid(), form.is_valid()]):
uform.save()
form.save()
messages.info(request, _("This profile has been updated!"))
return redirect(request.GET.get("next", request.get_full_path()))
return render(
request=request,
template_name='spirit/user/admin/edit.html',
context={'form': form, 'uform': uform})
@administrator_required
def _index(request, queryset, template):
users = yt_paginate(
queryset.order_by('-date_joined', '-pk'),
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1)
)
return render(request, template, context={'users': users})
def index(request):
return _index(
request,
queryset=User.objects.all(),
template='spirit/user/admin/index.html'
)
def index_admins(request):
return _index(
request,
queryset=User.objects.filter(st__is_administrator=True),
template='spirit/user/admin/admins.html'
)
def index_mods(request):
return _index(
request,
queryset=User.objects.filter(st__is_moderator=True, st__is_administrator=False),
template='spirit/user/admin/mods.html'
)
def index_unactive(request):
return _index(
request,
queryset=User.objects.filter(is_active=False),
template='spirit/user/admin/unactive.html'
)
| # -*- coding: utf-8 -*-
from django.shortcuts import render, get_object_or_404
from django.contrib.auth import get_user_model
from django.contrib import messages
from django.utils.translation import gettext as _
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import yt_paginate
from spirit.core.utils.decorators import administrator_required
from .forms import UserForm, UserProfileForm
User = get_user_model()
@administrator_required
def edit(request, user_id):
user = get_object_or_404(User, pk=user_id)
uform = UserForm(data=post_data(request), instance=user)
form = UserProfileForm(data=post_data(request), instance=user.st)
if is_post(request) and all([uform.is_valid(), form.is_valid()]):
uform.save()
form.save()
messages.info(request, _("This profile has been updated!"))
return safe_redirect(request, "next", request.get_full_path())
return render(
request=request,
template_name='spirit/user/admin/edit.html',
context={'form': form, 'uform': uform})
@administrator_required
def _index(request, queryset, template):
users = yt_paginate(
queryset.order_by('-date_joined', '-pk'),
per_page=config.topics_per_page,
page_number=request.GET.get('page', 1)
)
return render(request, template, context={'users': users})
def index(request):
return _index(
request,
queryset=User.objects.all(),
template='spirit/user/admin/index.html'
)
def index_admins(request):
return _index(
request,
queryset=User.objects.filter(st__is_administrator=True),
template='spirit/user/admin/admins.html'
)
def index_mods(request):
return _index(
request,
queryset=User.objects.filter(st__is_moderator=True, st__is_administrator=False),
template='spirit/user/admin/mods.html'
)
def index_unactive(request):
return _index(
request,
queryset=User.objects.filter(is_active=False),
template='spirit/user/admin/unactive.html'
)
| open_redirect | {
"code": [
"from django.shortcuts import render, redirect, get_object_or_404",
"from ...core.utils.views import is_post, post_data",
"from ...core.utils.paginator import yt_paginate",
"from ...core.utils.decorators import administrator_required",
" return redirect(request.GET.get(\"next\", request.get_full_path()))"
],
"line_no": [
3,
10,
11,
12,
27
]
} | {
"code": [
"from django.shortcuts import render, get_object_or_404",
"from spirit.core.utils.http import safe_redirect",
"from spirit.core.utils.views import is_post, post_data",
"from spirit.core.utils.paginator import yt_paginate",
"from spirit.core.utils.decorators import administrator_required",
" return safe_redirect(request, \"next\", request.get_full_path())"
],
"line_no": [
3,
10,
11,
12,
13,
28
]
} |
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import get_user_model
from django.contrib import messages
from django.utils.translation import gettext as _
from djconfig import config
from ...core.utils.views import is_post, post_data
from ...core.utils.paginator import yt_paginate
from ...core.utils.decorators import administrator_required
from .forms import .UserForm, UserProfileForm
VAR_0 = get_user_model()
@administrator_required
def FUNC_0(VAR_1, VAR_2):
VAR_5 = get_object_or_404(VAR_0, pk=VAR_2)
VAR_6 = UserForm(data=post_data(VAR_1), instance=VAR_5)
VAR_7 = UserProfileForm(data=post_data(VAR_1), instance=VAR_5.st)
if is_post(VAR_1) and all([VAR_6.is_valid(), VAR_7.is_valid()]):
VAR_6.save()
VAR_7.save()
messages.info(VAR_1, _("This profile has been updated!"))
return redirect(VAR_1.GET.get("next", VAR_1.get_full_path()))
return render(
VAR_1=request,
template_name='spirit/VAR_5/admin/FUNC_0.html',
context={'form': VAR_7, 'uform': VAR_6})
@administrator_required
def FUNC_1(VAR_1, VAR_3, VAR_4):
VAR_8 = yt_paginate(
VAR_3.order_by('-date_joined', '-pk'),
per_page=config.topics_per_page,
page_number=VAR_1.GET.get('page', 1)
)
return render(VAR_1, VAR_4, context={'users': VAR_8})
def FUNC_2(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.all(),
VAR_4='spirit/VAR_5/admin/FUNC_2.html'
)
def FUNC_3(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(st__is_administrator=True),
VAR_4='spirit/VAR_5/admin/admins.html'
)
def FUNC_4(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(st__is_moderator=True, st__is_administrator=False),
VAR_4='spirit/VAR_5/admin/mods.html'
)
def FUNC_5(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(is_active=False),
VAR_4='spirit/VAR_5/admin/unactive.html'
)
|
from django.shortcuts import render, get_object_or_404
from django.contrib.auth import get_user_model
from django.contrib import messages
from django.utils.translation import gettext as _
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.paginator import yt_paginate
from spirit.core.utils.decorators import administrator_required
from .forms import .UserForm, UserProfileForm
VAR_0 = get_user_model()
@administrator_required
def FUNC_0(VAR_1, VAR_2):
VAR_5 = get_object_or_404(VAR_0, pk=VAR_2)
VAR_6 = UserForm(data=post_data(VAR_1), instance=VAR_5)
VAR_7 = UserProfileForm(data=post_data(VAR_1), instance=VAR_5.st)
if is_post(VAR_1) and all([VAR_6.is_valid(), VAR_7.is_valid()]):
VAR_6.save()
VAR_7.save()
messages.info(VAR_1, _("This profile has been updated!"))
return safe_redirect(VAR_1, "next", VAR_1.get_full_path())
return render(
VAR_1=request,
template_name='spirit/VAR_5/admin/FUNC_0.html',
context={'form': VAR_7, 'uform': VAR_6})
@administrator_required
def FUNC_1(VAR_1, VAR_3, VAR_4):
VAR_8 = yt_paginate(
VAR_3.order_by('-date_joined', '-pk'),
per_page=config.topics_per_page,
page_number=VAR_1.GET.get('page', 1)
)
return render(VAR_1, VAR_4, context={'users': VAR_8})
def FUNC_2(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.all(),
VAR_4='spirit/VAR_5/admin/FUNC_2.html'
)
def FUNC_3(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(st__is_administrator=True),
VAR_4='spirit/VAR_5/admin/admins.html'
)
def FUNC_4(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(st__is_moderator=True, st__is_administrator=False),
VAR_4='spirit/VAR_5/admin/mods.html'
)
def FUNC_5(VAR_1):
return FUNC_1(
VAR_1,
VAR_3=VAR_0.objects.filter(is_active=False),
VAR_4='spirit/VAR_5/admin/unactive.html'
)
| [
1,
2,
7,
9,
14,
16,
17,
32,
33,
42,
43,
50,
51,
58,
59,
66,
67,
74
] | [
1,
2,
7,
9,
15,
17,
18,
33,
34,
43,
44,
51,
52,
59,
60,
67,
68,
75
] |
0CWE-22
| from openapi_python_client import utils
def test_snake_case_uppercase_str():
assert utils.snake_case("HTTP") == "http"
assert utils.snake_case("HTTP RESPONSE") == "http_response"
def test_snake_case_from_pascal_with_acronyms():
assert utils.snake_case("HTTPResponse") == "http_response"
assert utils.snake_case("APIClientHTTPResponse") == "api_client_http_response"
assert utils.snake_case("OAuthClientHTTPResponse") == "o_auth_client_http_response"
def test_snake_case_from_pascal():
assert utils.snake_case("HttpResponsePascalCase") == "http_response_pascal_case"
def test_snake_case_from_camel():
assert utils.snake_case("httpResponseLowerCamel") == "http_response_lower_camel"
def test_spinal_case():
assert utils.spinal_case("keep_alive") == "keep-alive"
| from openapi_python_client import utils
def test_snake_case_uppercase_str():
assert utils.snake_case("HTTP") == "http"
assert utils.snake_case("HTTP RESPONSE") == "http_response"
def test_snake_case_from_pascal_with_acronyms():
assert utils.snake_case("HTTPResponse") == "http_response"
assert utils.snake_case("APIClientHTTPResponse") == "api_client_http_response"
assert utils.snake_case("OAuthClientHTTPResponse") == "o_auth_client_http_response"
def test_snake_case_from_pascal():
assert utils.snake_case("HttpResponsePascalCase") == "http_response_pascal_case"
def test_snake_case_from_camel():
assert utils.snake_case("httpResponseLowerCamel") == "http_response_lower_camel"
def test_kebab_case():
assert utils.kebab_case("keep_alive") == "keep-alive"
| path_disclosure | {
"code": [
"def test_spinal_case():",
" assert utils.spinal_case(\"keep_alive\") == \"keep-alive\""
],
"line_no": [
23,
24
]
} | {
"code": [
"def test_kebab_case():",
" assert utils.kebab_case(\"keep_alive\") == \"keep-alive\""
],
"line_no": [
23,
24
]
} | from openapi_python_client import utils
def FUNC_0():
assert utils.snake_case("HTTP") == "http"
assert utils.snake_case("HTTP RESPONSE") == "http_response"
def FUNC_1():
assert utils.snake_case("HTTPResponse") == "http_response"
assert utils.snake_case("APIClientHTTPResponse") == "api_client_http_response"
assert utils.snake_case("OAuthClientHTTPResponse") == "o_auth_client_http_response"
def FUNC_2():
assert utils.snake_case("HttpResponsePascalCase") == "http_response_pascal_case"
def FUNC_3():
assert utils.snake_case("httpResponseLowerCamel") == "http_response_lower_camel"
def FUNC_4():
assert utils.spinal_case("keep_alive") == "keep-alive"
| from openapi_python_client import utils
def FUNC_0():
assert utils.snake_case("HTTP") == "http"
assert utils.snake_case("HTTP RESPONSE") == "http_response"
def FUNC_1():
assert utils.snake_case("HTTPResponse") == "http_response"
assert utils.snake_case("APIClientHTTPResponse") == "api_client_http_response"
assert utils.snake_case("OAuthClientHTTPResponse") == "o_auth_client_http_response"
def FUNC_2():
assert utils.snake_case("HttpResponsePascalCase") == "http_response_pascal_case"
def FUNC_3():
assert utils.snake_case("httpResponseLowerCamel") == "http_response_lower_camel"
def FUNC_4():
assert utils.kebab_case("keep_alive") == "keep-alive"
| [
2,
3,
7,
8,
13,
14,
17,
18,
21,
22,
25
] | [
2,
3,
7,
8,
13,
14,
17,
18,
21,
22,
25
] |
1CWE-79
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic - a standalone music server
# Copyright (c) 2012 - 2015 Tom Wallroth & Tilman Boerner
#
# Project page:
# http://fomori.org/cherrymusic/
# Sources on github:
# http://github.com/devsnd/cherrymusic/
#
# CherryMusic is based on
# jPlayer (GPL/MIT license) http://www.jplayer.org/
# CherryPy (BSD license) http://www.cherrypy.org/
#
# licensed under GNU GPL version 3 (or later)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
"""This class provides the api to talk to the client.
It will then call the cherrymodel, to get the
requested information"""
import os # shouldn't have to list any folder in the future!
import json
import cherrypy
import codecs
import sys
try:
from urllib.parse import unquote
except ImportError:
from backport.urllib.parse import unquote
try:
from urllib import parse
except ImportError:
from backport.urllib import parse
import audiotranscode
from tinytag import TinyTag
from cherrymusicserver import userdb
from cherrymusicserver import log
from cherrymusicserver import albumartfetcher
from cherrymusicserver import service
from cherrymusicserver.pathprovider import readRes
from cherrymusicserver.pathprovider import albumArtFilePath
import cherrymusicserver as cherry
import cherrymusicserver.metainfo as metainfo
from cherrymusicserver.util import Performance, MemoryZipFile
from cherrymusicserver.ext import zipstream
import time
debug = True
@service.user(model='cherrymodel', playlistdb='playlist',
useroptions='useroptions', userdb='users')
class HTTPHandler(object):
def __init__(self, config):
self.config = config
template_main = 'res/dist/main.html'
template_login = 'res/login.html'
template_firstrun = 'res/firstrun.html'
self.mainpage = readRes(template_main)
self.loginpage = readRes(template_login)
self.firstrunpage = readRes(template_firstrun)
self.handlers = {
'search': self.api_search,
'rememberplaylist': self.api_rememberplaylist,
'saveplaylist': self.api_saveplaylist,
'loadplaylist': self.api_loadplaylist,
'generaterandomplaylist': self.api_generaterandomplaylist,
'deleteplaylist': self.api_deleteplaylist,
'getmotd': self.api_getmotd,
'restoreplaylist': self.api_restoreplaylist,
'getplayables': self.api_getplayables,
'getuserlist': self.api_getuserlist,
'adduser': self.api_adduser,
'userdelete': self.api_userdelete,
'userchangepassword': self.api_userchangepassword,
'showplaylists': self.api_showplaylists,
'logout': self.api_logout,
'downloadpls': self.api_downloadpls,
'downloadm3u': self.api_downloadm3u,
'getsonginfo': self.api_getsonginfo,
'getencoders': self.api_getencoders,
'getdecoders': self.api_getdecoders,
'transcodingenabled': self.api_transcodingenabled,
'updatedb': self.api_updatedb,
'getconfiguration': self.api_getconfiguration,
'compactlistdir': self.api_compactlistdir,
'listdir': self.api_listdir,
'fetchalbumart': self.api_fetchalbumart,
'fetchalbumarturls': self.api_fetchalbumarturls,
'albumart_set': self.api_albumart_set,
'heartbeat': self.api_heartbeat,
'getuseroptions': self.api_getuseroptions,
'setuseroption': self.api_setuseroption,
'changeplaylist': self.api_changeplaylist,
'downloadcheck': self.api_downloadcheck,
'setuseroptionfor': self.api_setuseroptionfor,
}
def issecure(self, url):
return parse.urlparse(url).scheme == 'https'
def getBaseUrl(self, redirect_unencrypted=False):
ipAndPort = parse.urlparse(cherrypy.url()).netloc
is_secure_connection = self.issecure(cherrypy.url())
ssl_enabled = cherry.config['server.ssl_enabled']
if ssl_enabled and not is_secure_connection:
log.d(_('Not secure, redirecting...'))
ip = ipAndPort[:ipAndPort.rindex(':')]
url = 'https://' + ip + ':' + str(cherry.config['server.ssl_port'])
if redirect_unencrypted:
raise cherrypy.HTTPRedirect(url, 302)
else:
url = 'http://' + ipAndPort
return url
def index(self, *args, **kwargs):
self.getBaseUrl(redirect_unencrypted=True)
firstrun = 0 == self.userdb.getUserCount()
show_page = self.mainpage #generated main.html from devel.html
if 'devel' in kwargs:
#reload pages everytime in devel mode
show_page = readRes('res/devel.html')
self.loginpage = readRes('res/login.html')
self.firstrunpage = readRes('res/firstrun.html')
if 'login' in kwargs:
username = kwargs.get('username', '')
password = kwargs.get('password', '')
login_action = kwargs.get('login', '')
if login_action == 'login':
self.session_auth(username, password)
if cherrypy.session['username']:
username = cherrypy.session['username']
log.i(_('user {name} just logged in.').format(name=username))
elif login_action == 'create admin user':
if firstrun:
if username.strip() and password.strip():
self.userdb.addUser(username, password, True)
self.session_auth(username, password)
return show_page
else:
return "No, you can't."
if firstrun:
return self.firstrunpage
else:
if self.isAuthorized():
return show_page
else:
return self.loginpage
index.exposed = True
def isAuthorized(self):
try:
sessionUsername = cherrypy.session.get('username', None)
sessionUserId = cherrypy.session.get('userid', -1)
nameById = self.userdb.getNameById(sessionUserId)
except (UnicodeDecodeError, ValueError) as e:
# workaround for python2/python3 jump, filed bug in cherrypy
# https://bitbucket.org/cherrypy/cherrypy/issue/1216/sessions-python2-3-compability-unsupported
log.w(_('''
Dropping all sessions! Try not to change between python 2 and 3,
everybody has to relogin now.'''))
cherrypy.session.delete()
sessionUsername = None
if sessionUsername is None:
if self.autoLoginActive():
cherrypy.session['username'] = self.userdb.getNameById(1)
cherrypy.session['userid'] = 1
cherrypy.session['admin'] = True
return True
else:
return False
elif sessionUsername != nameById:
self.api_logout(value=None)
return False
return True
def autoLoginActive(self):
is_loopback = cherrypy.request.remote.ip in ('127.0.0.1', '::1')
if is_loopback and cherry.config['server.localhost_auto_login']:
return True
return False
def session_auth(self, username, password):
user = self.userdb.auth(username, password)
allow_remote = cherry.config['server.permit_remote_admin_login']
is_loopback = cherrypy.request.remote.ip in ('127.0.0.1', '::1')
if not is_loopback and user.isadmin and not allow_remote:
log.i(_('Rejected remote admin login from user: {name}').format(name=user.name))
user = userdb.User.nobody()
cherrypy.session['username'] = user.name
cherrypy.session['userid'] = user.uid
cherrypy.session['admin'] = user.isadmin
def getUserId(self):
try:
return cherrypy.session['userid']
except KeyError:
cherrypy.lib.sessions.expire()
cherrypy.HTTPRedirect(cherrypy.url(), 302)
return ''
def trans(self, newformat, *path, **params):
''' Transcodes the track given as ``path`` into ``newformat``.
Streams the response of the corresponding
``audiotranscode.AudioTranscode().transcodeStream()`` call.
params:
bitrate: int for kbps. None or < 1 for default
'''
if not self.isAuthorized():
raise cherrypy.HTTPRedirect(self.getBaseUrl(), 302)
cherrypy.session.release_lock()
if cherry.config['media.transcode'] and path:
# bitrate
bitrate = params.pop('bitrate', None) or None # catch empty strings
if bitrate:
try:
bitrate = max(0, int(bitrate)) or None # None if < 1
except (TypeError, ValueError):
raise cherrypy.HTTPError(400, "Bad query: "
"bitrate ({0!r}) must be an integer".format(str(bitrate)))
# path
path = os.path.sep.join(path)
if sys.version_info < (3, 0): # workaround for #327 (cherrypy issue)
path = path.decode('utf-8') # make it work with non-ascii
else:
path = codecs.decode(codecs.encode(path, 'latin1'), 'utf-8')
fullpath = os.path.join(cherry.config['media.basedir'], path)
starttime = int(params.pop('starttime', 0))
transcoder = audiotranscode.AudioTranscode()
mimetype = audiotranscode.mime_type(newformat)
cherrypy.response.headers["Content-Type"] = mimetype
try:
return transcoder.transcode_stream(fullpath, newformat,
bitrate=bitrate, starttime=starttime)
except (audiotranscode.TranscodeError, IOError) as e:
raise cherrypy.HTTPError(404, e.value)
trans.exposed = True
trans._cp_config = {'response.stream': True}
def api(self, *args, **kwargs):
"""calls the appropriate handler from the handlers
dict, if available. handlers having noauth set to
true do not need authentification to work.
"""
#check action
action = args[0] if args else ''
if not action in self.handlers:
return "Error: no such action. '%s'" % action
#authorize if not explicitly deactivated
handler = self.handlers[action]
needsAuth = not ('noauth' in dir(handler) and handler.noauth)
if needsAuth and not self.isAuthorized():
raise cherrypy.HTTPError(401, 'Unauthorized')
handler_args = {}
if 'data' in kwargs:
handler_args = json.loads(kwargs['data'])
is_binary = ('binary' in dir(handler) and handler.binary)
if is_binary:
return handler(**handler_args)
else:
return json.dumps({'data': handler(**handler_args)})
api.exposed = True
def download_check_files(self, filelist):
# only admins and allowed users may download
if not cherrypy.session['admin']:
uo = self.useroptions.forUser(self.getUserId())
if not uo.getOptionValue('media.may_download'):
return 'not_permitted'
# make sure nobody tries to escape from basedir
for f in filelist:
if '/../' in f:
return 'invalid_file'
# make sure all files are smaller than maximum download size
size_limit = cherry.config['media.maximum_download_size']
try:
if self.model.file_size_within_limit(filelist, size_limit):
return 'ok'
else:
return 'too_big'
except OSError as e: # use OSError for python2 compatibility
return str(e)
def api_downloadcheck(self, filelist):
status = self.download_check_files(filelist)
if status == 'not_permitted':
return """You are not allowed to download files."""
elif status == 'invalid_file':
return "Error: invalid filename found in {list}".format(list=filelist)
elif status == 'too_big':
size_limit = cherry.config['media.maximum_download_size']
return """Can't download: Playlist is bigger than {maxsize} mB.
The server administrator can change this configuration.
""".format(maxsize=size_limit/1024/1024)
elif status == 'ok':
return status
else:
message = "Error status check for download: {status!r}".format(status=status)
log.e(message)
return message
def download(self, value):
if not self.isAuthorized():
raise cherrypy.HTTPError(401, 'Unauthorized')
filelist = [filepath for filepath in json.loads(unquote(value))]
dlstatus = self.download_check_files(filelist)
if dlstatus == 'ok':
_save_and_release_session()
zipmime = 'application/x-zip-compressed'
cherrypy.response.headers["Content-Type"] = zipmime
zipname = 'attachment; filename="music.zip"'
cherrypy.response.headers['Content-Disposition'] = zipname
basedir = cherry.config['media.basedir']
fullpath_filelist = [os.path.join(basedir, f) for f in filelist]
return zipstream.ZipStream(fullpath_filelist)
else:
return dlstatus
download.exposed = True
download._cp_config = {'response.stream': True}
def api_getuseroptions(self):
uo = self.useroptions.forUser(self.getUserId())
uco = uo.getChangableOptions()
if cherrypy.session['admin']:
uco['media'].update({'may_download': True})
else:
uco['media'].update({'may_download': uo.getOptionValue('media.may_download')})
return uco
def api_heartbeat(self):
uo = self.useroptions.forUser(self.getUserId())
uo.setOption('last_time_online', int(time.time()))
def api_setuseroption(self, optionkey, optionval):
uo = self.useroptions.forUser(self.getUserId())
uo.setOption(optionkey, optionval)
return "success"
def api_setuseroptionfor(self, userid, optionkey, optionval):
if cherrypy.session['admin']:
uo = self.useroptions.forUser(userid)
uo.setOption(optionkey, optionval)
return "success"
else:
return "error: not permitted. Only admins can change other users options"
def api_fetchalbumarturls(self, searchterm):
if not cherrypy.session['admin']:
raise cherrypy.HTTPError(401, 'Unauthorized')
_save_and_release_session()
fetcher = albumartfetcher.AlbumArtFetcher()
imgurls = fetcher.fetchurls(searchterm)
# show no more than 10 images
return imgurls[:min(len(imgurls), 10)]
def api_albumart_set(self, directory, imageurl):
if not cherrypy.session['admin']:
raise cherrypy.HTTPError(401, 'Unauthorized')
b64imgpath = albumArtFilePath(directory)
fetcher = albumartfetcher.AlbumArtFetcher()
data, header = fetcher.retrieveData(imageurl)
self.albumartcache_save(b64imgpath, data)
def api_fetchalbumart(self, directory):
_save_and_release_session()
default_folder_image = "../res/img/folder.png"
log.i('Fetching album art for: %s' % directory)
filepath = os.path.join(cherry.config['media.basedir'], directory)
if os.path.isfile(filepath):
# if the given path is a file, try to get the image from ID3
tag = TinyTag.get(filepath, image=True)
image_data = tag.get_image()
if image_data:
log.d('Image found in tag.')
header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}
cherrypy.response.headers.update(header)
return image_data
else:
# if the file does not contain an image, display the image of the
# parent directory
directory = os.path.dirname(directory)
#try getting a cached album art image
b64imgpath = albumArtFilePath(directory)
img_data = self.albumartcache_load(b64imgpath)
if img_data:
cherrypy.response.headers["Content-Length"] = len(img_data)
return img_data
#try getting album art inside local folder
fetcher = albumartfetcher.AlbumArtFetcher()
localpath = os.path.join(cherry.config['media.basedir'], directory)
header, data, resized = fetcher.fetchLocal(localpath)
if header:
if resized:
#cache resized image for next time
self.albumartcache_save(b64imgpath, data)
cherrypy.response.headers.update(header)
return data
elif cherry.config['media.fetch_album_art']:
#fetch album art from online source
try:
foldername = os.path.basename(directory)
keywords = foldername
log.i(_("Fetching album art for keywords {keywords!r}").format(keywords=keywords))
header, data = fetcher.fetch(keywords)
if header:
cherrypy.response.headers.update(header)
self.albumartcache_save(b64imgpath, data)
return data
else:
# albumart fetcher failed, so we serve a standard image
raise cherrypy.HTTPRedirect(default_folder_image, 302)
except:
# albumart fetcher threw exception, so we serve a standard image
raise cherrypy.HTTPRedirect(default_folder_image, 302)
else:
# no local album art found, online fetching deactivated, show default
raise cherrypy.HTTPRedirect(default_folder_image, 302)
api_fetchalbumart.noauth = True
api_fetchalbumart.binary = True
def albumartcache_load(self, imgb64path):
if os.path.exists(imgb64path):
with open(imgb64path, 'rb') as f:
return f.read()
def albumartcache_save(self, path, data):
with open(path, 'wb') as f:
f.write(data)
def api_compactlistdir(self, directory, filterstr=None):
try:
files_to_list = self.model.listdir(directory, filterstr)
except ValueError:
raise cherrypy.HTTPError(400, 'Bad Request')
return [entry.to_dict() for entry in files_to_list]
def api_listdir(self, directory):
try:
return [entry.to_dict() for entry in self.model.listdir(directory)]
except ValueError:
raise cherrypy.HTTPError(400, 'Bad Request')
def api_search(self, searchstring):
if not searchstring.strip():
jsonresults = '[]'
else:
with Performance(_('processing whole search request')):
searchresults = self.model.search(searchstring.strip())
with Performance(_('rendering search results as json')):
jsonresults = [entry.to_dict() for entry in searchresults]
return jsonresults
def api_rememberplaylist(self, playlist):
cherrypy.session['playlist'] = playlist
def api_saveplaylist(self, playlist, public, playlistname, overwrite=False):
res = self.playlistdb.savePlaylist(
userid=self.getUserId(),
public=1 if public else 0,
playlist=playlist,
playlisttitle=playlistname,
overwrite=overwrite)
if res == "success":
return res
else:
raise cherrypy.HTTPError(400, res)
def api_deleteplaylist(self, playlistid):
res = self.playlistdb.deletePlaylist(playlistid,
self.getUserId(),
override_owner=False)
if res == "success":
return res
else:
# not the ideal status code but we don't know the actual
# cause without parsing res
raise cherrypy.HTTPError(400, res)
def api_loadplaylist(self, playlistid):
return [entry.to_dict() for entry in self.playlistdb.loadPlaylist(
playlistid=playlistid,
userid=self.getUserId()
)]
def api_generaterandomplaylist(self):
return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]
def api_changeplaylist(self, plid, attribute, value):
if attribute == 'public':
is_valid = type(value) == bool and type(plid) == int
if is_valid:
return self.playlistdb.setPublic(userid=self.getUserId(),
plid=plid,
public=value)
def api_getmotd(self):
if cherrypy.session['admin'] and cherry.config['general.update_notification']:
_save_and_release_session()
new_versions = self.model.check_for_updates()
if new_versions:
newest_version = new_versions[0]['version']
features = []
fixes = []
for version in new_versions:
for update in version['features']:
if update.startswith('FEATURE:'):
features.append(update[len('FEATURE:'):])
elif update.startswith('FIX:'):
fixes.append(update[len('FIX:'):])
elif update.startswith('FIXED:'):
fixes.append(update[len('FIXED:'):])
retdata = {'type': 'update', 'data': {}}
retdata['data']['version'] = newest_version
retdata['data']['features'] = features
retdata['data']['fixes'] = fixes
return retdata
return {'type': 'wisdom', 'data': self.model.motd()}
def api_restoreplaylist(self):
session_playlist = cherrypy.session.get('playlist', [])
return session_playlist
def api_getplayables(self):
"""DEPRECATED"""
return json.dumps(cherry.config['media.playable'])
def api_getuserlist(self):
if cherrypy.session['admin']:
userlist = self.userdb.getUserList()
for user in userlist:
if user['id'] == cherrypy.session['userid']:
user['deletable'] = False
user_options = self.useroptions.forUser(user['id'])
t = user_options.getOptionValue('last_time_online')
may_download = user_options.getOptionValue('media.may_download')
user['last_time_online'] = t
user['may_download'] = may_download
sortfunc = lambda user: user['last_time_online']
userlist = sorted(userlist, key=sortfunc, reverse=True)
return json.dumps({'time': int(time.time()),
'userlist': userlist})
else:
return json.dumps({'time': 0, 'userlist': []})
def api_adduser(self, username, password, isadmin):
if cherrypy.session['admin']:
if self.userdb.addUser(username, password, isadmin):
return 'added new user: %s' % username
else:
return 'error, cannot add new user!' % username
else:
return "You didn't think that would work, did you?"
def api_userchangepassword(self, oldpassword, newpassword, username=''):
isself = username == ''
if isself:
username = cherrypy.session['username']
authed_user = self.userdb.auth(username, oldpassword)
is_authenticated = userdb.User.nobody() != authed_user
if not is_authenticated:
raise cherrypy.HTTPError(403, "Forbidden")
if isself or cherrypy.session['admin']:
return self.userdb.changePassword(username, newpassword)
else:
raise cherrypy.HTTPError(403, "Forbidden")
def api_userdelete(self, userid):
is_self = cherrypy.session['userid'] == userid
if cherrypy.session['admin'] and not is_self:
deleted = self.userdb.deleteUser(userid)
return 'success' if deleted else 'failed'
else:
return "You didn't think that would work, did you?"
def api_showplaylists(self, sortby="created", filterby=''):
playlists = self.playlistdb.showPlaylists(self.getUserId(), filterby)
curr_time = int(time.time())
is_reverse = False
#translate userids to usernames:
for pl in playlists:
pl['username'] = self.userdb.getNameById(pl['userid'])
pl['type'] = 'playlist'
pl['age'] = curr_time - pl['created']
if sortby[0] == '-':
is_reverse = True
sortby = sortby[1:]
if not sortby in ('username', 'age', 'title', 'default'):
sortby = 'created'
if sortby == 'default':
sortby = 'age'
is_reverse = False
playlists = sorted(playlists, key=lambda x: x[sortby], reverse = is_reverse)
return playlists
def api_logout(self):
cherrypy.lib.sessions.expire()
api_logout.no_auth = True
def api_downloadpls(self, plid, hostaddr):
userid = self.getUserId()
pls = self.playlistdb.createPLS(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid)
if pls and name:
return self.serve_string_as_file(pls, name+'.pls')
api_downloadpls.binary = True
def api_downloadm3u(self, plid, hostaddr):
userid = self.getUserId()
pls = self.playlistdb.createM3U(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid)
if pls and name:
return self.serve_string_as_file(pls, name+'.m3u')
api_downloadm3u.binary = True
def export_playlists(self, format, all=False, hostaddr=''):
userid = self.getUserId()
if not userid:
raise cherrypy.HTTPError(401, _("Please log in"))
hostaddr = (hostaddr.strip().rstrip('/') + cherry.config['server.rootpath']).rstrip('/')
format = format.lower()
if format == 'm3u':
filemaker = self.playlistdb.createM3U
elif format == 'pls':
filemaker = self.playlistdb.createPLS
else:
raise cherrypy.HTTPError(400,
_('Unknown playlist format: {format!r}').format(format=format))
playlists = self.playlistdb.showPlaylists(userid, include_public=all)
if not playlists:
raise cherrypy.HTTPError(404, _('No playlists found'))
with MemoryZipFile() as zip:
for pl in playlists:
plid = pl['plid']
plstr = filemaker(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid) + '.' + format
if not pl['owner']:
username = self.userdb.getNameById(pl['userid'])
name = username + '/' + name
zip.writestr(name, plstr)
zipmime = 'application/x-zip-compressed'
zipname = 'attachment; filename="playlists.zip"'
cherrypy.response.headers["Content-Type"] = zipmime
cherrypy.response.headers['Content-Disposition'] = zipname
return zip.getbytes()
export_playlists.exposed = True
def api_getsonginfo(self, path):
basedir = cherry.config['media.basedir']
abspath = os.path.join(basedir, path)
return json.dumps(metainfo.getSongInfo(abspath).dict())
def api_getencoders(self):
return json.dumps(audiotranscode.getEncoders())
def api_getdecoders(self):
return json.dumps(audiotranscode.getDecoders())
def api_transcodingenabled(self):
return json.dumps(cherry.config['media.transcode'])
def api_updatedb(self):
self.model.updateLibrary()
return 'success'
def api_getconfiguration(self):
clientconfigkeys = {
'transcodingenabled': cherry.config['media.transcode'],
'fetchalbumart': cherry.config['media.fetch_album_art'],
'isadmin': cherrypy.session['admin'],
'username': cherrypy.session['username'],
'servepath': 'serve/',
'transcodepath': 'trans/',
'auto_login': self.autoLoginActive(),
'version': cherry.REPO_VERSION or cherry.VERSION,
}
if cherry.config['media.transcode']:
decoders = list(self.model.transcoder.available_decoder_formats())
clientconfigkeys['getdecoders'] = decoders
encoders = list(self.model.transcoder.available_encoder_formats())
clientconfigkeys['getencoders'] = encoders
else:
clientconfigkeys['getdecoders'] = []
clientconfigkeys['getencoders'] = []
return clientconfigkeys
def serve_string_as_file(self, string, filename):
content_disposition = 'attachment; filename="'+filename+'"'
cherrypy.response.headers["Content-Type"] = "application/x-download"
cherrypy.response.headers["Content-Disposition"] = content_disposition
return codecs.encode(string, "UTF-8")
def _save_and_release_session():
""" workaround to cleanly release FileSessions in Cherrypy >= 3.3
From https://github.com/devsnd/cherrymusic/issues/483:
> CherryPy >=3.3.0 (up to current version, 3.6) makes it impossible to
> explicitly release FileSession locks, because:
> 1. FileSession.save() asserts that the session is locked; and
> 2. _cptools.SessionTool always adds a hook to call sessions.save
> before the response is finalized.
> If we still want to release the session in a controller, I guess the
> best way to work around this is to remove the hook before the
> controller returns:
"""
cherrypy.session.save()
hooks = cherrypy.serving.request.hooks['before_finalize']
forbidden = cherrypy.lib.sessions.save
hooks[:] = [h for h in hooks if h.callback is not forbidden]
# there's likely only one hook, since a 2nd call to save would always fail;
# but let's be safe, and block all calls to save :)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic - a standalone music server
# Copyright (c) 2012 - 2015 Tom Wallroth & Tilman Boerner
#
# Project page:
# http://fomori.org/cherrymusic/
# Sources on github:
# http://github.com/devsnd/cherrymusic/
#
# CherryMusic is based on
# jPlayer (GPL/MIT license) http://www.jplayer.org/
# CherryPy (BSD license) http://www.cherrypy.org/
#
# licensed under GNU GPL version 3 (or later)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
"""This class provides the api to talk to the client.
It will then call the cherrymodel, to get the
requested information"""
import os # shouldn't have to list any folder in the future!
import json
import cherrypy
import codecs
import sys
try:
from urllib.parse import unquote
except ImportError:
from backport.urllib.parse import unquote
try:
from urllib import parse
except ImportError:
from backport.urllib import parse
import audiotranscode
from tinytag import TinyTag
from cherrymusicserver import userdb
from cherrymusicserver import log
from cherrymusicserver import albumartfetcher
from cherrymusicserver import service
from cherrymusicserver.pathprovider import readRes
from cherrymusicserver.pathprovider import albumArtFilePath
import cherrymusicserver as cherry
import cherrymusicserver.metainfo as metainfo
from cherrymusicserver.util import Performance, MemoryZipFile
from cherrymusicserver.ext import zipstream
import time
debug = True
@service.user(model='cherrymodel', playlistdb='playlist',
useroptions='useroptions', userdb='users')
class HTTPHandler(object):
def __init__(self, config):
self.config = config
template_main = 'res/dist/main.html'
template_login = 'res/login.html'
template_firstrun = 'res/firstrun.html'
self.mainpage = readRes(template_main)
self.loginpage = readRes(template_login)
self.firstrunpage = readRes(template_firstrun)
self.handlers = {
'search': self.api_search,
'rememberplaylist': self.api_rememberplaylist,
'saveplaylist': self.api_saveplaylist,
'loadplaylist': self.api_loadplaylist,
'generaterandomplaylist': self.api_generaterandomplaylist,
'deleteplaylist': self.api_deleteplaylist,
'getmotd': self.api_getmotd,
'restoreplaylist': self.api_restoreplaylist,
'getplayables': self.api_getplayables,
'getuserlist': self.api_getuserlist,
'adduser': self.api_adduser,
'userdelete': self.api_userdelete,
'userchangepassword': self.api_userchangepassword,
'showplaylists': self.api_showplaylists,
'logout': self.api_logout,
'downloadpls': self.api_downloadpls,
'downloadm3u': self.api_downloadm3u,
'getsonginfo': self.api_getsonginfo,
'getencoders': self.api_getencoders,
'getdecoders': self.api_getdecoders,
'transcodingenabled': self.api_transcodingenabled,
'updatedb': self.api_updatedb,
'getconfiguration': self.api_getconfiguration,
'compactlistdir': self.api_compactlistdir,
'listdir': self.api_listdir,
'fetchalbumart': self.api_fetchalbumart,
'fetchalbumarturls': self.api_fetchalbumarturls,
'albumart_set': self.api_albumart_set,
'heartbeat': self.api_heartbeat,
'getuseroptions': self.api_getuseroptions,
'setuseroption': self.api_setuseroption,
'changeplaylist': self.api_changeplaylist,
'downloadcheck': self.api_downloadcheck,
'setuseroptionfor': self.api_setuseroptionfor,
}
def issecure(self, url):
return parse.urlparse(url).scheme == 'https'
def getBaseUrl(self, redirect_unencrypted=False):
ipAndPort = parse.urlparse(cherrypy.url()).netloc
is_secure_connection = self.issecure(cherrypy.url())
ssl_enabled = cherry.config['server.ssl_enabled']
if ssl_enabled and not is_secure_connection:
log.d(_('Not secure, redirecting...'))
ip = ipAndPort[:ipAndPort.rindex(':')]
url = 'https://' + ip + ':' + str(cherry.config['server.ssl_port'])
if redirect_unencrypted:
raise cherrypy.HTTPRedirect(url, 302)
else:
url = 'http://' + ipAndPort
return url
def index(self, *args, **kwargs):
self.getBaseUrl(redirect_unencrypted=True)
firstrun = 0 == self.userdb.getUserCount()
show_page = self.mainpage #generated main.html from devel.html
if 'devel' in kwargs:
#reload pages everytime in devel mode
show_page = readRes('res/devel.html')
self.loginpage = readRes('res/login.html')
self.firstrunpage = readRes('res/firstrun.html')
if 'login' in kwargs:
username = kwargs.get('username', '')
password = kwargs.get('password', '')
login_action = kwargs.get('login', '')
if login_action == 'login':
self.session_auth(username, password)
if cherrypy.session['username']:
username = cherrypy.session['username']
log.i(_('user {name} just logged in.').format(name=username))
elif login_action == 'create admin user':
if firstrun:
if username.strip() and password.strip():
self.userdb.addUser(username, password, True)
self.session_auth(username, password)
return show_page
else:
return "No, you can't."
if firstrun:
return self.firstrunpage
else:
if self.isAuthorized():
return show_page
else:
return self.loginpage
index.exposed = True
def isAuthorized(self):
try:
sessionUsername = cherrypy.session.get('username', None)
sessionUserId = cherrypy.session.get('userid', -1)
nameById = self.userdb.getNameById(sessionUserId)
except (UnicodeDecodeError, ValueError) as e:
# workaround for python2/python3 jump, filed bug in cherrypy
# https://bitbucket.org/cherrypy/cherrypy/issue/1216/sessions-python2-3-compability-unsupported
log.w(_('''
Dropping all sessions! Try not to change between python 2 and 3,
everybody has to relogin now.'''))
cherrypy.session.delete()
sessionUsername = None
if sessionUsername is None:
if self.autoLoginActive():
cherrypy.session['username'] = self.userdb.getNameById(1)
cherrypy.session['userid'] = 1
cherrypy.session['admin'] = True
return True
else:
return False
elif sessionUsername != nameById:
self.api_logout(value=None)
return False
return True
def autoLoginActive(self):
is_loopback = cherrypy.request.remote.ip in ('127.0.0.1', '::1')
if is_loopback and cherry.config['server.localhost_auto_login']:
return True
return False
def session_auth(self, username, password):
user = self.userdb.auth(username, password)
allow_remote = cherry.config['server.permit_remote_admin_login']
is_loopback = cherrypy.request.remote.ip in ('127.0.0.1', '::1')
if not is_loopback and user.isadmin and not allow_remote:
log.i(_('Rejected remote admin login from user: {name}').format(name=user.name))
user = userdb.User.nobody()
cherrypy.session['username'] = user.name
cherrypy.session['userid'] = user.uid
cherrypy.session['admin'] = user.isadmin
def getUserId(self):
try:
return cherrypy.session['userid']
except KeyError:
cherrypy.lib.sessions.expire()
cherrypy.HTTPRedirect(cherrypy.url(), 302)
return ''
def trans(self, newformat, *path, **params):
''' Transcodes the track given as ``path`` into ``newformat``.
Streams the response of the corresponding
``audiotranscode.AudioTranscode().transcodeStream()`` call.
params:
bitrate: int for kbps. None or < 1 for default
'''
if not self.isAuthorized():
raise cherrypy.HTTPRedirect(self.getBaseUrl(), 302)
cherrypy.session.release_lock()
if cherry.config['media.transcode'] and path:
# bitrate
bitrate = params.pop('bitrate', None) or None # catch empty strings
if bitrate:
try:
bitrate = max(0, int(bitrate)) or None # None if < 1
except (TypeError, ValueError):
raise cherrypy.HTTPError(400, "Bad query: "
"bitrate ({0!r}) must be an integer".format(str(bitrate)))
# path
path = os.path.sep.join(path)
if sys.version_info < (3, 0): # workaround for #327 (cherrypy issue)
path = path.decode('utf-8') # make it work with non-ascii
else:
path = codecs.decode(codecs.encode(path, 'latin1'), 'utf-8')
fullpath = os.path.join(cherry.config['media.basedir'], path)
starttime = int(params.pop('starttime', 0))
transcoder = audiotranscode.AudioTranscode()
mimetype = audiotranscode.mime_type(newformat)
cherrypy.response.headers["Content-Type"] = mimetype
try:
return transcoder.transcode_stream(fullpath, newformat,
bitrate=bitrate, starttime=starttime)
except (audiotranscode.TranscodeError, IOError) as e:
raise cherrypy.HTTPError(404, e.value)
trans.exposed = True
trans._cp_config = {'response.stream': True}
def api(self, *args, **kwargs):
"""calls the appropriate handler from the handlers
dict, if available. handlers having noauth set to
true do not need authentification to work.
"""
#check action
action = args[0] if args else ''
if not action in self.handlers:
return "Error: no such action. '%s'" % action
#authorize if not explicitly deactivated
handler = self.handlers[action]
needsAuth = not ('noauth' in dir(handler) and handler.noauth)
if needsAuth and not self.isAuthorized():
raise cherrypy.HTTPError(401, 'Unauthorized')
handler_args = {}
if 'data' in kwargs:
handler_args = json.loads(kwargs['data'])
is_binary = ('binary' in dir(handler) and handler.binary)
if is_binary:
return handler(**handler_args)
else:
return json.dumps({'data': handler(**handler_args)})
api.exposed = True
def download_check_files(self, filelist):
# only admins and allowed users may download
if not cherrypy.session['admin']:
uo = self.useroptions.forUser(self.getUserId())
if not uo.getOptionValue('media.may_download'):
return 'not_permitted'
# make sure nobody tries to escape from basedir
for f in filelist:
# don't allow to traverse up in the file system
if '/../' in f or f.startswith('../'):
return 'invalid_file'
# CVE-2015-8309: do not allow absolute file paths
if os.path.isabs(f):
return 'invalid_file'
# make sure all files are smaller than maximum download size
size_limit = cherry.config['media.maximum_download_size']
try:
if self.model.file_size_within_limit(filelist, size_limit):
return 'ok'
else:
return 'too_big'
except OSError as e: # use OSError for python2 compatibility
return str(e)
def api_downloadcheck(self, filelist):
status = self.download_check_files(filelist)
if status == 'not_permitted':
return """You are not allowed to download files."""
elif status == 'invalid_file':
return "Error: invalid filename found in {list}".format(list=filelist)
elif status == 'too_big':
size_limit = cherry.config['media.maximum_download_size']
return """Can't download: Playlist is bigger than {maxsize} mB.
The server administrator can change this configuration.
""".format(maxsize=size_limit/1024/1024)
elif status == 'ok':
return status
else:
message = "Error status check for download: {status!r}".format(status=status)
log.e(message)
return message
def download(self, value):
if not self.isAuthorized():
raise cherrypy.HTTPError(401, 'Unauthorized')
filelist = [filepath for filepath in json.loads(unquote(value))]
dlstatus = self.download_check_files(filelist)
if dlstatus == 'ok':
_save_and_release_session()
zipmime = 'application/x-zip-compressed'
cherrypy.response.headers["Content-Type"] = zipmime
zipname = 'attachment; filename="music.zip"'
cherrypy.response.headers['Content-Disposition'] = zipname
basedir = cherry.config['media.basedir']
fullpath_filelist = [os.path.join(basedir, f) for f in filelist]
return zipstream.ZipStream(fullpath_filelist)
else:
return dlstatus
download.exposed = True
download._cp_config = {'response.stream': True}
def api_getuseroptions(self):
uo = self.useroptions.forUser(self.getUserId())
uco = uo.getChangableOptions()
if cherrypy.session['admin']:
uco['media'].update({'may_download': True})
else:
uco['media'].update({'may_download': uo.getOptionValue('media.may_download')})
return uco
def api_heartbeat(self):
uo = self.useroptions.forUser(self.getUserId())
uo.setOption('last_time_online', int(time.time()))
def api_setuseroption(self, optionkey, optionval):
uo = self.useroptions.forUser(self.getUserId())
uo.setOption(optionkey, optionval)
return "success"
def api_setuseroptionfor(self, userid, optionkey, optionval):
if cherrypy.session['admin']:
uo = self.useroptions.forUser(userid)
uo.setOption(optionkey, optionval)
return "success"
else:
return "error: not permitted. Only admins can change other users options"
def api_fetchalbumarturls(self, searchterm):
if not cherrypy.session['admin']:
raise cherrypy.HTTPError(401, 'Unauthorized')
_save_and_release_session()
fetcher = albumartfetcher.AlbumArtFetcher()
imgurls = fetcher.fetchurls(searchterm)
# show no more than 10 images
return imgurls[:min(len(imgurls), 10)]
def api_albumart_set(self, directory, imageurl):
if not cherrypy.session['admin']:
raise cherrypy.HTTPError(401, 'Unauthorized')
b64imgpath = albumArtFilePath(directory)
fetcher = albumartfetcher.AlbumArtFetcher()
data, header = fetcher.retrieveData(imageurl)
self.albumartcache_save(b64imgpath, data)
def api_fetchalbumart(self, directory):
_save_and_release_session()
default_folder_image = "../res/img/folder.png"
log.i('Fetching album art for: %s' % directory)
filepath = os.path.join(cherry.config['media.basedir'], directory)
if os.path.isfile(filepath):
# if the given path is a file, try to get the image from ID3
tag = TinyTag.get(filepath, image=True)
image_data = tag.get_image()
if image_data:
log.d('Image found in tag.')
header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}
cherrypy.response.headers.update(header)
return image_data
else:
# if the file does not contain an image, display the image of the
# parent directory
directory = os.path.dirname(directory)
#try getting a cached album art image
b64imgpath = albumArtFilePath(directory)
img_data = self.albumartcache_load(b64imgpath)
if img_data:
cherrypy.response.headers["Content-Length"] = len(img_data)
return img_data
#try getting album art inside local folder
fetcher = albumartfetcher.AlbumArtFetcher()
localpath = os.path.join(cherry.config['media.basedir'], directory)
header, data, resized = fetcher.fetchLocal(localpath)
if header:
if resized:
#cache resized image for next time
self.albumartcache_save(b64imgpath, data)
cherrypy.response.headers.update(header)
return data
elif cherry.config['media.fetch_album_art']:
#fetch album art from online source
try:
foldername = os.path.basename(directory)
keywords = foldername
log.i(_("Fetching album art for keywords {keywords!r}").format(keywords=keywords))
header, data = fetcher.fetch(keywords)
if header:
cherrypy.response.headers.update(header)
self.albumartcache_save(b64imgpath, data)
return data
else:
# albumart fetcher failed, so we serve a standard image
raise cherrypy.HTTPRedirect(default_folder_image, 302)
except:
# albumart fetcher threw exception, so we serve a standard image
raise cherrypy.HTTPRedirect(default_folder_image, 302)
else:
# no local album art found, online fetching deactivated, show default
raise cherrypy.HTTPRedirect(default_folder_image, 302)
api_fetchalbumart.noauth = True
api_fetchalbumart.binary = True
def albumartcache_load(self, imgb64path):
if os.path.exists(imgb64path):
with open(imgb64path, 'rb') as f:
return f.read()
def albumartcache_save(self, path, data):
with open(path, 'wb') as f:
f.write(data)
def api_compactlistdir(self, directory, filterstr=None):
try:
files_to_list = self.model.listdir(directory, filterstr)
except ValueError:
raise cherrypy.HTTPError(400, 'Bad Request')
return [entry.to_dict() for entry in files_to_list]
def api_listdir(self, directory):
try:
return [entry.to_dict() for entry in self.model.listdir(directory)]
except ValueError:
raise cherrypy.HTTPError(400, 'Bad Request')
def api_search(self, searchstring):
if not searchstring.strip():
jsonresults = '[]'
else:
with Performance(_('processing whole search request')):
searchresults = self.model.search(searchstring.strip())
with Performance(_('rendering search results as json')):
jsonresults = [entry.to_dict() for entry in searchresults]
return jsonresults
def api_rememberplaylist(self, playlist):
cherrypy.session['playlist'] = playlist
def api_saveplaylist(self, playlist, public, playlistname, overwrite=False):
res = self.playlistdb.savePlaylist(
userid=self.getUserId(),
public=1 if public else 0,
playlist=playlist,
playlisttitle=playlistname,
overwrite=overwrite)
if res == "success":
return res
else:
raise cherrypy.HTTPError(400, res)
def api_deleteplaylist(self, playlistid):
res = self.playlistdb.deletePlaylist(playlistid,
self.getUserId(),
override_owner=False)
if res == "success":
return res
else:
# not the ideal status code but we don't know the actual
# cause without parsing res
raise cherrypy.HTTPError(400, res)
def api_loadplaylist(self, playlistid):
return [entry.to_dict() for entry in self.playlistdb.loadPlaylist(
playlistid=playlistid,
userid=self.getUserId()
)]
def api_generaterandomplaylist(self):
return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]
def api_changeplaylist(self, plid, attribute, value):
if attribute == 'public':
is_valid = type(value) == bool and type(plid) == int
if is_valid:
return self.playlistdb.setPublic(userid=self.getUserId(),
plid=plid,
public=value)
def api_getmotd(self):
if cherrypy.session['admin'] and cherry.config['general.update_notification']:
_save_and_release_session()
new_versions = self.model.check_for_updates()
if new_versions:
newest_version = new_versions[0]['version']
features = []
fixes = []
for version in new_versions:
for update in version['features']:
if update.startswith('FEATURE:'):
features.append(update[len('FEATURE:'):])
elif update.startswith('FIX:'):
fixes.append(update[len('FIX:'):])
elif update.startswith('FIXED:'):
fixes.append(update[len('FIXED:'):])
retdata = {'type': 'update', 'data': {}}
retdata['data']['version'] = newest_version
retdata['data']['features'] = features
retdata['data']['fixes'] = fixes
return retdata
return {'type': 'wisdom', 'data': self.model.motd()}
def api_restoreplaylist(self):
session_playlist = cherrypy.session.get('playlist', [])
return session_playlist
def api_getplayables(self):
"""DEPRECATED"""
return json.dumps(cherry.config['media.playable'])
def api_getuserlist(self):
if cherrypy.session['admin']:
userlist = self.userdb.getUserList()
for user in userlist:
if user['id'] == cherrypy.session['userid']:
user['deletable'] = False
user_options = self.useroptions.forUser(user['id'])
t = user_options.getOptionValue('last_time_online')
may_download = user_options.getOptionValue('media.may_download')
user['last_time_online'] = t
user['may_download'] = may_download
sortfunc = lambda user: user['last_time_online']
userlist = sorted(userlist, key=sortfunc, reverse=True)
return json.dumps({'time': int(time.time()),
'userlist': userlist})
else:
return json.dumps({'time': 0, 'userlist': []})
def api_adduser(self, username, password, isadmin):
if cherrypy.session['admin']:
if self.userdb.addUser(username, password, isadmin):
return 'added new user: %s' % username
else:
return 'error, cannot add new user!' % username
else:
return "You didn't think that would work, did you?"
def api_userchangepassword(self, oldpassword, newpassword, username=''):
isself = username == ''
if isself:
username = cherrypy.session['username']
authed_user = self.userdb.auth(username, oldpassword)
is_authenticated = userdb.User.nobody() != authed_user
if not is_authenticated:
raise cherrypy.HTTPError(403, "Forbidden")
if isself or cherrypy.session['admin']:
return self.userdb.changePassword(username, newpassword)
else:
raise cherrypy.HTTPError(403, "Forbidden")
def api_userdelete(self, userid):
is_self = cherrypy.session['userid'] == userid
if cherrypy.session['admin'] and not is_self:
deleted = self.userdb.deleteUser(userid)
return 'success' if deleted else 'failed'
else:
return "You didn't think that would work, did you?"
def api_showplaylists(self, sortby="created", filterby=''):
playlists = self.playlistdb.showPlaylists(self.getUserId(), filterby)
curr_time = int(time.time())
is_reverse = False
#translate userids to usernames:
for pl in playlists:
pl['username'] = self.userdb.getNameById(pl['userid'])
pl['type'] = 'playlist'
pl['age'] = curr_time - pl['created']
if sortby[0] == '-':
is_reverse = True
sortby = sortby[1:]
if not sortby in ('username', 'age', 'title', 'default'):
sortby = 'created'
if sortby == 'default':
sortby = 'age'
is_reverse = False
playlists = sorted(playlists, key=lambda x: x[sortby], reverse = is_reverse)
return playlists
def api_logout(self):
cherrypy.lib.sessions.expire()
api_logout.no_auth = True
def api_downloadpls(self, plid, hostaddr):
userid = self.getUserId()
pls = self.playlistdb.createPLS(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid)
if pls and name:
return self.serve_string_as_file(pls, name+'.pls')
api_downloadpls.binary = True
def api_downloadm3u(self, plid, hostaddr):
userid = self.getUserId()
pls = self.playlistdb.createM3U(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid)
if pls and name:
return self.serve_string_as_file(pls, name+'.m3u')
api_downloadm3u.binary = True
def export_playlists(self, format, all=False, hostaddr=''):
userid = self.getUserId()
if not userid:
raise cherrypy.HTTPError(401, _("Please log in"))
hostaddr = (hostaddr.strip().rstrip('/') + cherry.config['server.rootpath']).rstrip('/')
format = format.lower()
if format == 'm3u':
filemaker = self.playlistdb.createM3U
elif format == 'pls':
filemaker = self.playlistdb.createPLS
else:
raise cherrypy.HTTPError(400,
_('Unknown playlist format: {format!r}').format(format=format))
playlists = self.playlistdb.showPlaylists(userid, include_public=all)
if not playlists:
raise cherrypy.HTTPError(404, _('No playlists found'))
with MemoryZipFile() as zip:
for pl in playlists:
plid = pl['plid']
plstr = filemaker(plid=plid, userid=userid, addrstr=hostaddr)
name = self.playlistdb.getName(plid, userid) + '.' + format
if not pl['owner']:
username = self.userdb.getNameById(pl['userid'])
name = username + '/' + name
zip.writestr(name, plstr)
zipmime = 'application/x-zip-compressed'
zipname = 'attachment; filename="playlists.zip"'
cherrypy.response.headers["Content-Type"] = zipmime
cherrypy.response.headers['Content-Disposition'] = zipname
return zip.getbytes()
export_playlists.exposed = True
def api_getsonginfo(self, path):
basedir = cherry.config['media.basedir']
abspath = os.path.join(basedir, path)
return json.dumps(metainfo.getSongInfo(abspath).dict())
def api_getencoders(self):
return json.dumps(audiotranscode.getEncoders())
def api_getdecoders(self):
return json.dumps(audiotranscode.getDecoders())
def api_transcodingenabled(self):
return json.dumps(cherry.config['media.transcode'])
def api_updatedb(self):
self.model.updateLibrary()
return 'success'
def api_getconfiguration(self):
clientconfigkeys = {
'transcodingenabled': cherry.config['media.transcode'],
'fetchalbumart': cherry.config['media.fetch_album_art'],
'isadmin': cherrypy.session['admin'],
'username': cherrypy.session['username'],
'servepath': 'serve/',
'transcodepath': 'trans/',
'auto_login': self.autoLoginActive(),
'version': cherry.REPO_VERSION or cherry.VERSION,
}
if cherry.config['media.transcode']:
decoders = list(self.model.transcoder.available_decoder_formats())
clientconfigkeys['getdecoders'] = decoders
encoders = list(self.model.transcoder.available_encoder_formats())
clientconfigkeys['getencoders'] = encoders
else:
clientconfigkeys['getdecoders'] = []
clientconfigkeys['getencoders'] = []
return clientconfigkeys
def serve_string_as_file(self, string, filename):
content_disposition = 'attachment; filename="'+filename+'"'
cherrypy.response.headers["Content-Type"] = "application/x-download"
cherrypy.response.headers["Content-Disposition"] = content_disposition
return codecs.encode(string, "UTF-8")
def _save_and_release_session():
""" workaround to cleanly release FileSessions in Cherrypy >= 3.3
From https://github.com/devsnd/cherrymusic/issues/483:
> CherryPy >=3.3.0 (up to current version, 3.6) makes it impossible to
> explicitly release FileSession locks, because:
> 1. FileSession.save() asserts that the session is locked; and
> 2. _cptools.SessionTool always adds a hook to call sessions.save
> before the response is finalized.
> If we still want to release the session in a controller, I guess the
> best way to work around this is to remove the hook before the
> controller returns:
"""
cherrypy.session.save()
hooks = cherrypy.serving.request.hooks['before_finalize']
forbidden = cherrypy.lib.sessions.save
hooks[:] = [h for h in hooks if h.callback is not forbidden]
# there's likely only one hook, since a 2nd call to save would always fail;
# but let's be safe, and block all calls to save :)
| xss | {
"code": [
" if '/../' in f:"
],
"line_no": [
303
]
} | {
"code": [
" if '/../' in f or f.startswith('../'):",
" if os.path.isabs(f):"
],
"line_no": [
304,
307
]
} |
import os # shouldn't have to list any folder in the future!
import json
import .cherrypy
import codecs
import sys
try:
from urllib.parse import unquote
except ImportError:
from backport.urllib.parse import unquote
try:
from urllib import parse
except ImportError:
from backport.urllib import parse
import audiotranscode
from tinytag import TinyTag
from cherrymusicserver import .userdb
from cherrymusicserver import log
from cherrymusicserver import albumartfetcher
from cherrymusicserver import service
from cherrymusicserver.pathprovider import readRes
from cherrymusicserver.pathprovider import albumArtFilePath
import cherrymusicserver as cherry
import cherrymusicserver.metainfo as metainfo
from cherrymusicserver.util import Performance, MemoryZipFile
from cherrymusicserver.ext import zipstream
import .time
VAR_0 = True
@service.user(model='cherrymodel', playlistdb='playlist',
useroptions='useroptions', userdb='users')
class CLASS_0(object):
def __init__(self, VAR_1):
self.config = VAR_1
VAR_42 = 'res/dist/main.html'
VAR_43 = 'res/login.html'
VAR_44 = 'res/VAR_48.html'
self.mainpage = readRes(VAR_42)
self.loginpage = readRes(VAR_43)
self.firstrunpage = readRes(VAR_44)
self.handlers = {
'search': self.api_search,
'rememberplaylist': self.api_rememberplaylist,
'saveplaylist': self.api_saveplaylist,
'loadplaylist': self.api_loadplaylist,
'generaterandomplaylist': self.api_generaterandomplaylist,
'deleteplaylist': self.api_deleteplaylist,
'getmotd': self.api_getmotd,
'restoreplaylist': self.api_restoreplaylist,
'getplayables': self.api_getplayables,
'getuserlist': self.api_getuserlist,
'adduser': self.api_adduser,
'userdelete': self.api_userdelete,
'userchangepassword': self.api_userchangepassword,
'showplaylists': self.api_showplaylists,
'logout': self.api_logout,
'downloadpls': self.api_downloadpls,
'downloadm3u': self.api_downloadm3u,
'getsonginfo': self.api_getsonginfo,
'getencoders': self.api_getencoders,
'getdecoders': self.api_getdecoders,
'transcodingenabled': self.api_transcodingenabled,
'updatedb': self.api_updatedb,
'getconfiguration': self.api_getconfiguration,
'compactlistdir': self.api_compactlistdir,
'listdir': self.api_listdir,
'fetchalbumart': self.api_fetchalbumart,
'fetchalbumarturls': self.api_fetchalbumarturls,
'albumart_set': self.api_albumart_set,
'heartbeat': self.api_heartbeat,
'getuseroptions': self.api_getuseroptions,
'setuseroption': self.api_setuseroption,
'changeplaylist': self.api_changeplaylist,
'downloadcheck': self.api_downloadcheck,
'setuseroptionfor': self.api_setuseroptionfor,
}
def FUNC_1(self, VAR_2):
return parse.urlparse(VAR_2).scheme == 'https'
def FUNC_2(self, VAR_3=False):
VAR_45 = parse.urlparse(VAR_53.url()).netloc
VAR_46 = self.issecure(VAR_53.url())
VAR_47 = cherry.config['server.ssl_enabled']
if VAR_47 and not VAR_46:
log.d(_('Not secure, redirecting...'))
VAR_88 = VAR_45[:ipAndPort.rindex(':')]
VAR_2 = 'https://' + VAR_88 + ':' + str(cherry.config['server.ssl_port'])
if VAR_3:
raise VAR_53.HTTPRedirect(VAR_2, 302)
else:
VAR_2 = 'http://' + VAR_45
return VAR_2
def FUNC_3(self, *VAR_4, **VAR_5):
self.getBaseUrl(VAR_3=True)
VAR_48 = 0 == self.userdb.getUserCount()
VAR_49 = self.mainpage #generated main.html from devel.html
if 'devel' in VAR_5:
VAR_49 = readRes('res/devel.html')
self.loginpage = readRes('res/login.html')
self.firstrunpage = readRes('res/VAR_48.html')
if 'login' in VAR_5:
VAR_6 = VAR_5.get('username', '')
VAR_7 = VAR_5.get('password', '')
VAR_89 = VAR_5.get('login', '')
if VAR_89 == 'login':
self.session_auth(VAR_6, VAR_7)
if VAR_53.session['username']:
VAR_6 = VAR_53.session['username']
log.i(_('user {VAR_81} just logged in.').format(VAR_81=VAR_6))
elif VAR_89 == 'create admin user':
if VAR_48:
if VAR_6.strip() and VAR_7.strip():
self.userdb.addUser(VAR_6, VAR_7, True)
self.session_auth(VAR_6, VAR_7)
return VAR_49
else:
return "No, you can't."
if VAR_48:
return self.firstrunpage
else:
if self.isAuthorized():
return VAR_49
else:
return self.loginpage
FUNC_3.exposed = True
def FUNC_4(self):
try:
VAR_90 = VAR_53.session.get('username', None)
VAR_91 = VAR_53.session.get('userid', -1)
VAR_92 = self.userdb.getNameById(VAR_91)
except (UnicodeDecodeError, ValueError) as e:
log.w(_('''
Dropping VAR_37 sessions! Try not to change between python 2 and 3,
everybody has to relogin now.'''))
VAR_53.session.delete()
VAR_90 = None
if VAR_90 is None:
if self.autoLoginActive():
VAR_53.session['username'] = self.userdb.getNameById(1)
VAR_53.session['userid'] = 1
VAR_53.session['admin'] = True
return True
else:
return False
elif VAR_90 != VAR_92:
self.api_logout(VAR_12=None)
return False
return True
def FUNC_5(self):
VAR_50 = VAR_53.request.remote.ip in ('127.0.0.1', '::1')
if VAR_50 and cherry.config['server.localhost_auto_login']:
return True
return False
def FUNC_6(self, VAR_6, VAR_7):
VAR_51 = self.userdb.auth(VAR_6, VAR_7)
VAR_52 = cherry.config['server.permit_remote_admin_login']
VAR_50 = VAR_53.request.remote.ip in ('127.0.0.1', '::1')
if not VAR_50 and VAR_51.isadmin and not VAR_52:
log.i(_('Rejected remote admin login from VAR_51: {VAR_81}').format(VAR_81=VAR_51.name))
VAR_51 = userdb.User.nobody()
VAR_53.session['username'] = VAR_51.name
VAR_53.session['userid'] = VAR_51.uid
VAR_53.session['admin'] = VAR_51.isadmin
def FUNC_7(self):
try:
return VAR_53.session['userid']
except KeyError:
VAR_53.lib.sessions.expire()
VAR_53.HTTPRedirect(VAR_53.url(), 302)
return ''
def FUNC_8(self, VAR_8, *VAR_9, **VAR_10):
if not self.isAuthorized():
raise VAR_53.HTTPRedirect(self.getBaseUrl(), 302)
VAR_53.session.release_lock()
if cherry.config['media.transcode'] and VAR_9:
bitrate = VAR_10.pop('bitrate', None) or None # catch empty strings
if VAR_93:
try:
VAR_93 = max(0, int(VAR_93)) or None # None if < 1
except (TypeError, ValueError):
raise VAR_53.HTTPError(400, "Bad query: "
"bitrate ({0!r}) must be an integer".format(str(VAR_93)))
VAR_9 = os.path.sep.join(VAR_9)
if sys.version_info < (3, 0): # workaround for #327 (VAR_53 issue)
VAR_9 = path.decode('utf-8') # make it work with non-ascii
else:
VAR_9 = codecs.decode(codecs.encode(VAR_9, 'latin1'), 'utf-8')
VAR_94 = os.path.join(cherry.config['media.basedir'], VAR_9)
VAR_95 = int(VAR_10.pop('starttime', 0))
VAR_96 = audiotranscode.AudioTranscode()
VAR_97 = audiotranscode.mime_type(VAR_8)
VAR_53.response.headers["Content-Type"] = VAR_97
try:
return VAR_96.transcode_stream(VAR_94, VAR_8,
VAR_93=bitrate, VAR_95=starttime)
except (audiotranscode.TranscodeError, IOError) as e:
raise VAR_53.HTTPError(404, e.value)
FUNC_8.exposed = True
FUNC_8._cp_config = {'response.stream': True}
def FUNC_9(self, *VAR_4, **VAR_5):
VAR_54 = VAR_4[0] if VAR_4 else ''
if not VAR_54 in self.handlers:
return "Error: no such VAR_54. '%s'" % VAR_54
VAR_55 = self.handlers[VAR_54]
VAR_56 = not ('noauth' in dir(VAR_55) and VAR_55.noauth)
if VAR_56 and not self.isAuthorized():
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_57 = {}
if 'data' in VAR_5:
VAR_57 = json.loads(VAR_5['data'])
VAR_58 = ('binary' in dir(VAR_55) and VAR_55.binary)
if VAR_58:
return VAR_55(**VAR_57)
else:
return json.dumps({'data': VAR_55(**VAR_57)})
FUNC_9.exposed = True
def FUNC_10(self, VAR_11):
if not VAR_53.session['admin']:
VAR_62 = self.useroptions.forUser(self.getUserId())
if not VAR_62.getOptionValue('media.may_download'):
return 'not_permitted'
for f in VAR_11:
if '/../' in f:
return 'invalid_file'
VAR_59 = cherry.config['media.maximum_download_size']
try:
if self.model.file_size_within_limit(VAR_11, VAR_59):
return 'ok'
else:
return 'too_big'
except OSError as e: # use OSError for python2 compatibility
return str(e)
def FUNC_11(self, VAR_11):
VAR_60 = self.download_check_files(VAR_11)
if VAR_60 == 'not_permitted':
return """You are not allowed to FUNC_12 files."""
elif VAR_60 == 'invalid_file':
return "Error: invalid VAR_39 found in {list}".format(list=VAR_11)
elif VAR_60 == 'too_big':
VAR_59 = cherry.config['media.maximum_download_size']
return """Can't FUNC_12: Playlist is bigger than {maxsize} mB.
The server administrator can change this configuration.
""".format(maxsize=VAR_59/1024/1024)
elif VAR_60 == 'ok':
return VAR_60
else:
VAR_125 = "Error VAR_60 check for FUNC_12: {VAR_60!r}".format(VAR_60=status)
log.e(VAR_125)
return VAR_125
def FUNC_12(self, VAR_12):
if not self.isAuthorized():
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_11 = [VAR_69 for VAR_69 in json.loads(unquote(VAR_12))]
VAR_61 = self.download_check_files(VAR_11)
if VAR_61 == 'ok':
FUNC_0()
VAR_82 = 'application/x-zip-compressed'
VAR_53.response.headers["Content-Type"] = VAR_82
VAR_83 = 'attachment; VAR_39="music.zip"'
VAR_53.response.headers['Content-Disposition'] = VAR_83
VAR_84 = cherry.config['media.basedir']
VAR_98 = [os.path.join(VAR_84, f) for f in VAR_11]
return zipstream.ZipStream(VAR_98)
else:
return VAR_61
FUNC_12.exposed = True
FUNC_12._cp_config = {'response.stream': True}
def FUNC_13(self):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_63 = VAR_62.getChangableOptions()
if VAR_53.session['admin']:
VAR_63['media'].update({'may_download': True})
else:
VAR_63['media'].update({'may_download': VAR_62.getOptionValue('media.may_download')})
return VAR_63
def FUNC_14(self):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_62.setOption('last_time_online', int(time.time()))
def FUNC_15(self, VAR_13, VAR_14):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_62.setOption(VAR_13, VAR_14)
return "success"
def FUNC_16(self, VAR_15, VAR_13, VAR_14):
if VAR_53.session['admin']:
VAR_62 = self.useroptions.forUser(VAR_15)
VAR_62.setOption(VAR_13, VAR_14)
return "success"
else:
return "error: not permitted. Only admins can change other users options"
def FUNC_17(self, VAR_16):
if not VAR_53.session['admin']:
raise VAR_53.HTTPError(401, 'Unauthorized')
FUNC_0()
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_65 = VAR_64.fetchurls(VAR_16)
return VAR_65[:min(len(VAR_65), 10)]
def FUNC_18(self, VAR_17, VAR_18):
if not VAR_53.session['admin']:
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_66 = albumArtFilePath(VAR_17)
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_20, VAR_67 = VAR_64.retrieveData(VAR_18)
self.albumartcache_save(VAR_66, VAR_20)
def FUNC_19(self, VAR_17):
FUNC_0()
VAR_68 = "../VAR_73/img/folder.png"
log.i('Fetching album art for: %s' % VAR_17)
VAR_69 = os.path.join(cherry.config['media.basedir'], VAR_17)
if os.path.isfile(VAR_69):
VAR_99 = TinyTag.get(VAR_69, image=True)
VAR_100 = VAR_99.get_image()
if VAR_100:
log.d('Image found in VAR_99.')
VAR_67 = {'Content-Type': 'image/jpg', 'Content-Length': len(VAR_100)}
VAR_53.response.headers.update(VAR_67)
return VAR_100
else:
directory = os.path.dirname(VAR_17)
VAR_66 = albumArtFilePath(VAR_17)
VAR_70 = self.albumartcache_load(VAR_66)
if VAR_70:
VAR_53.response.headers["Content-Length"] = len(VAR_70)
return VAR_70
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_71 = os.path.join(cherry.config['media.basedir'], VAR_17)
VAR_67, VAR_20, VAR_72 = VAR_64.fetchLocal(VAR_71)
if VAR_67:
if VAR_72:
self.albumartcache_save(VAR_66, VAR_20)
VAR_53.response.headers.update(VAR_67)
return VAR_20
elif cherry.config['media.fetch_album_art']:
try:
VAR_123 = os.path.basename(VAR_17)
VAR_124 = VAR_123
log.i(_("Fetching album art for VAR_124 {keywords!r}").format(VAR_124=keywords))
VAR_67, VAR_20 = VAR_64.fetch(VAR_124)
if VAR_67:
VAR_53.response.headers.update(VAR_67)
self.albumartcache_save(VAR_66, VAR_20)
return VAR_20
else:
raise VAR_53.HTTPRedirect(VAR_68, 302)
except:
raise VAR_53.HTTPRedirect(VAR_68, 302)
else:
raise VAR_53.HTTPRedirect(VAR_68, 302)
FUNC_19.noauth = True
FUNC_19.binary = True
def FUNC_20(self, VAR_19):
if os.path.exists(VAR_19):
with open(VAR_19, 'rb') as f:
return f.read()
def FUNC_21(self, VAR_9, VAR_20):
with open(VAR_9, 'wb') as f:
f.write(VAR_20)
def FUNC_22(self, VAR_17, VAR_21=None):
try:
VAR_101 = self.model.listdir(VAR_17, VAR_21)
except ValueError:
raise VAR_53.HTTPError(400, 'Bad Request')
return [entry.to_dict() for entry in VAR_101]
def FUNC_23(self, VAR_17):
try:
return [entry.to_dict() for entry in self.model.listdir(VAR_17)]
except ValueError:
raise VAR_53.HTTPError(400, 'Bad Request')
def FUNC_24(self, VAR_22):
if not VAR_22.strip():
VAR_102 = '[]'
else:
with Performance(_('processing whole search request')):
VAR_114 = self.model.search(VAR_22.strip())
with Performance(_('rendering search results as json')):
VAR_102 = [entry.to_dict() for entry in VAR_114]
return VAR_102
def FUNC_25(self, VAR_23):
VAR_53.session['playlist'] = VAR_23
def FUNC_26(self, VAR_23, VAR_24, VAR_25, VAR_26=False):
VAR_73 = self.playlistdb.savePlaylist(
VAR_15=self.getUserId(),
VAR_24=1 if VAR_24 else 0,
VAR_23=playlist,
playlisttitle=VAR_25,
VAR_26=overwrite)
if VAR_73 == "success":
return VAR_73
else:
raise VAR_53.HTTPError(400, VAR_73)
def FUNC_27(self, VAR_27):
VAR_73 = self.playlistdb.deletePlaylist(VAR_27,
self.getUserId(),
override_owner=False)
if VAR_73 == "success":
return VAR_73
else:
raise VAR_53.HTTPError(400, VAR_73)
def FUNC_28(self, VAR_27):
return [entry.to_dict() for entry in self.playlistdb.loadPlaylist(
VAR_27=playlistid,
VAR_15=self.getUserId()
)]
def FUNC_29(self):
return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]
def FUNC_30(self, VAR_28, VAR_29, VAR_12):
if VAR_29 == 'public':
VAR_103 = type(VAR_12) == bool and type(VAR_28) == int
if VAR_103:
return self.playlistdb.setPublic(VAR_15=self.getUserId(),
VAR_28=plid,
VAR_24=VAR_12)
def FUNC_31(self):
if VAR_53.session['admin'] and cherry.config['general.update_notification']:
FUNC_0()
VAR_104 = self.model.check_for_updates()
if VAR_104:
VAR_115 = VAR_104[0]['version']
VAR_116 = []
VAR_117 = []
for version in VAR_104:
for update in version['features']:
if update.startswith('FEATURE:'):
VAR_116.append(update[len('FEATURE:'):])
elif update.startswith('FIX:'):
VAR_117.append(update[len('FIX:'):])
elif update.startswith('FIXED:'):
VAR_117.append(update[len('FIXED:'):])
VAR_118 = {'type': 'update', 'data': {}}
VAR_118['data']['version'] = VAR_115
VAR_118['data']['features'] = VAR_116
VAR_118['data']['fixes'] = VAR_117
return VAR_118
return {'type': 'wisdom', 'data': self.model.motd()}
def FUNC_32(self):
VAR_74 = VAR_53.session.get('playlist', [])
return VAR_74
def FUNC_33(self):
return json.dumps(cherry.config['media.playable'])
def FUNC_34(self):
if VAR_53.session['admin']:
VAR_105 = self.userdb.getUserList()
for VAR_51 in VAR_105:
if VAR_51['id'] == VAR_53.session['userid']:
VAR_51['deletable'] = False
VAR_119 = self.useroptions.forUser(VAR_51['id'])
VAR_120 = VAR_119.getOptionValue('last_time_online')
VAR_121 = VAR_119.getOptionValue('media.may_download')
VAR_51['last_time_online'] = VAR_120
VAR_51['may_download'] = VAR_121
VAR_106 = lambda VAR_51: VAR_51['last_time_online']
VAR_105 = sorted(VAR_105, key=VAR_106, reverse=True)
return json.dumps({'time': int(time.time()),
'userlist': VAR_105})
else:
return json.dumps({'time': 0, 'userlist': []})
def FUNC_35(self, VAR_6, VAR_7, VAR_30):
if VAR_53.session['admin']:
if self.userdb.addUser(VAR_6, VAR_7, VAR_30):
return 'added new VAR_51: %s' % VAR_6
else:
return 'error, cannot add new VAR_51!' % VAR_6
else:
return "You didn't think that would work, did you?"
def FUNC_36(self, VAR_31, VAR_32, VAR_6=''):
VAR_75 = VAR_6 == ''
if VAR_75:
VAR_6 = VAR_53.session['username']
VAR_107 = self.userdb.auth(VAR_6, VAR_31)
VAR_108 = userdb.User.nobody() != VAR_107
if not VAR_108:
raise VAR_53.HTTPError(403, "Forbidden")
if VAR_75 or VAR_53.session['admin']:
return self.userdb.changePassword(VAR_6, VAR_32)
else:
raise VAR_53.HTTPError(403, "Forbidden")
def FUNC_37(self, VAR_15):
VAR_76 = VAR_53.session['userid'] == VAR_15
if VAR_53.session['admin'] and not VAR_76:
VAR_109 = self.userdb.deleteUser(VAR_15)
return 'success' if VAR_109 else 'failed'
else:
return "You didn't think that would work, did you?"
def FUNC_38(self, VAR_33="created", VAR_34=''):
VAR_77 = self.playlistdb.showPlaylists(self.getUserId(), VAR_34)
VAR_78 = int(time.time())
VAR_79 = False
for VAR_110 in VAR_77:
VAR_110['username'] = self.userdb.getNameById(VAR_110['userid'])
VAR_110['type'] = 'playlist'
VAR_110['age'] = VAR_78 - VAR_110['created']
if VAR_33[0] == '-':
VAR_79 = True
VAR_33 = VAR_33[1:]
if not VAR_33 in ('username', 'age', 'title', 'default'):
VAR_33 = 'created'
if VAR_33 == 'default':
VAR_33 = 'age'
VAR_79 = False
VAR_77 = sorted(VAR_77, key=lambda x: x[VAR_33], reverse = VAR_79)
return VAR_77
def FUNC_39(self):
VAR_53.lib.sessions.expire()
FUNC_39.no_auth = True
def FUNC_40(self, VAR_28, VAR_35):
VAR_15 = self.getUserId()
VAR_80 = self.playlistdb.createPLS(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)
if VAR_80 and VAR_81:
return self.serve_string_as_file(VAR_80, VAR_81+'.pls')
FUNC_40.binary = True
def FUNC_41(self, VAR_28, VAR_35):
VAR_15 = self.getUserId()
VAR_80 = self.playlistdb.createM3U(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)
if VAR_80 and VAR_81:
return self.serve_string_as_file(VAR_80, VAR_81+'.m3u')
FUNC_41.binary = True
def FUNC_42(self, VAR_36, VAR_37=False, VAR_35=''):
VAR_15 = self.getUserId()
if not VAR_15:
raise VAR_53.HTTPError(401, _("Please log in"))
VAR_35 = (hostaddr.strip().rstrip('/') + cherry.config['server.rootpath']).rstrip('/')
VAR_36 = format.lower()
if VAR_36 == 'm3u':
VAR_111 = self.playlistdb.createM3U
elif VAR_36 == 'pls':
VAR_111 = self.playlistdb.createPLS
else:
raise VAR_53.HTTPError(400,
_('Unknown VAR_23 VAR_36: {format!r}').format(VAR_36=format))
VAR_77 = self.playlistdb.showPlaylists(VAR_15, include_public=VAR_37)
if not VAR_77:
raise VAR_53.HTTPError(404, _('No VAR_77 found'))
with MemoryZipFile() as zip:
for VAR_110 in VAR_77:
VAR_28 = VAR_110['plid']
VAR_122 = VAR_111(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15) + '.' + VAR_36
if not VAR_110['owner']:
VAR_6 = self.userdb.getNameById(VAR_110['userid'])
VAR_81 = VAR_6 + '/' + VAR_81
zip.writestr(VAR_81, VAR_122)
VAR_82 = 'application/x-zip-compressed'
VAR_83 = 'attachment; VAR_39="playlists.zip"'
VAR_53.response.headers["Content-Type"] = VAR_82
VAR_53.response.headers['Content-Disposition'] = VAR_83
return zip.getbytes()
FUNC_42.exposed = True
def FUNC_43(self, VAR_9):
VAR_84 = cherry.config['media.basedir']
VAR_85 = os.path.join(VAR_84, VAR_9)
return json.dumps(metainfo.getSongInfo(VAR_85).dict())
def FUNC_44(self):
return json.dumps(audiotranscode.getEncoders())
def FUNC_45(self):
return json.dumps(audiotranscode.getDecoders())
def FUNC_46(self):
return json.dumps(cherry.config['media.transcode'])
def FUNC_47(self):
self.model.updateLibrary()
return 'success'
def FUNC_48(self):
VAR_86 = {
'transcodingenabled': cherry.config['media.transcode'],
'fetchalbumart': cherry.config['media.fetch_album_art'],
'isadmin': VAR_53.session['admin'],
'username': VAR_53.session['username'],
'servepath': 'serve/',
'transcodepath': 'trans/',
'auto_login': self.autoLoginActive(),
'version': cherry.REPO_VERSION or cherry.VERSION,
}
if cherry.config['media.transcode']:
VAR_112 = list(self.model.transcoder.available_decoder_formats())
VAR_86['getdecoders'] = VAR_112
VAR_113 = list(self.model.transcoder.available_encoder_formats())
VAR_86['getencoders'] = VAR_113
else:
VAR_86['getdecoders'] = []
VAR_86['getencoders'] = []
return VAR_86
def FUNC_49(self, VAR_38, VAR_39):
VAR_87 = 'attachment; VAR_39="'+VAR_39+'"'
VAR_53.response.headers["Content-Type"] = "application/x-download"
VAR_53.response.headers["Content-Disposition"] = VAR_87
return codecs.encode(VAR_38, "UTF-8")
def FUNC_0():
VAR_53.session.save()
VAR_40 = VAR_53.serving.request.hooks['before_finalize']
VAR_41 = VAR_53.lib.sessions.save
VAR_40[:] = [h for h in VAR_40 if h.callback is not VAR_41]
|
import os # shouldn't have to list any folder in the future!
import json
import .cherrypy
import codecs
import sys
try:
from urllib.parse import unquote
except ImportError:
from backport.urllib.parse import unquote
try:
from urllib import parse
except ImportError:
from backport.urllib import parse
import audiotranscode
from tinytag import TinyTag
from cherrymusicserver import .userdb
from cherrymusicserver import log
from cherrymusicserver import albumartfetcher
from cherrymusicserver import service
from cherrymusicserver.pathprovider import readRes
from cherrymusicserver.pathprovider import albumArtFilePath
import cherrymusicserver as cherry
import cherrymusicserver.metainfo as metainfo
from cherrymusicserver.util import Performance, MemoryZipFile
from cherrymusicserver.ext import zipstream
import .time
VAR_0 = True
@service.user(model='cherrymodel', playlistdb='playlist',
useroptions='useroptions', userdb='users')
class CLASS_0(object):
def __init__(self, VAR_1):
self.config = VAR_1
VAR_42 = 'res/dist/main.html'
VAR_43 = 'res/login.html'
VAR_44 = 'res/VAR_48.html'
self.mainpage = readRes(VAR_42)
self.loginpage = readRes(VAR_43)
self.firstrunpage = readRes(VAR_44)
self.handlers = {
'search': self.api_search,
'rememberplaylist': self.api_rememberplaylist,
'saveplaylist': self.api_saveplaylist,
'loadplaylist': self.api_loadplaylist,
'generaterandomplaylist': self.api_generaterandomplaylist,
'deleteplaylist': self.api_deleteplaylist,
'getmotd': self.api_getmotd,
'restoreplaylist': self.api_restoreplaylist,
'getplayables': self.api_getplayables,
'getuserlist': self.api_getuserlist,
'adduser': self.api_adduser,
'userdelete': self.api_userdelete,
'userchangepassword': self.api_userchangepassword,
'showplaylists': self.api_showplaylists,
'logout': self.api_logout,
'downloadpls': self.api_downloadpls,
'downloadm3u': self.api_downloadm3u,
'getsonginfo': self.api_getsonginfo,
'getencoders': self.api_getencoders,
'getdecoders': self.api_getdecoders,
'transcodingenabled': self.api_transcodingenabled,
'updatedb': self.api_updatedb,
'getconfiguration': self.api_getconfiguration,
'compactlistdir': self.api_compactlistdir,
'listdir': self.api_listdir,
'fetchalbumart': self.api_fetchalbumart,
'fetchalbumarturls': self.api_fetchalbumarturls,
'albumart_set': self.api_albumart_set,
'heartbeat': self.api_heartbeat,
'getuseroptions': self.api_getuseroptions,
'setuseroption': self.api_setuseroption,
'changeplaylist': self.api_changeplaylist,
'downloadcheck': self.api_downloadcheck,
'setuseroptionfor': self.api_setuseroptionfor,
}
def FUNC_1(self, VAR_2):
return parse.urlparse(VAR_2).scheme == 'https'
def FUNC_2(self, VAR_3=False):
VAR_45 = parse.urlparse(VAR_53.url()).netloc
VAR_46 = self.issecure(VAR_53.url())
VAR_47 = cherry.config['server.ssl_enabled']
if VAR_47 and not VAR_46:
log.d(_('Not secure, redirecting...'))
VAR_88 = VAR_45[:ipAndPort.rindex(':')]
VAR_2 = 'https://' + VAR_88 + ':' + str(cherry.config['server.ssl_port'])
if VAR_3:
raise VAR_53.HTTPRedirect(VAR_2, 302)
else:
VAR_2 = 'http://' + VAR_45
return VAR_2
def FUNC_3(self, *VAR_4, **VAR_5):
self.getBaseUrl(VAR_3=True)
VAR_48 = 0 == self.userdb.getUserCount()
VAR_49 = self.mainpage #generated main.html from devel.html
if 'devel' in VAR_5:
VAR_49 = readRes('res/devel.html')
self.loginpage = readRes('res/login.html')
self.firstrunpage = readRes('res/VAR_48.html')
if 'login' in VAR_5:
VAR_6 = VAR_5.get('username', '')
VAR_7 = VAR_5.get('password', '')
VAR_89 = VAR_5.get('login', '')
if VAR_89 == 'login':
self.session_auth(VAR_6, VAR_7)
if VAR_53.session['username']:
VAR_6 = VAR_53.session['username']
log.i(_('user {VAR_81} just logged in.').format(VAR_81=VAR_6))
elif VAR_89 == 'create admin user':
if VAR_48:
if VAR_6.strip() and VAR_7.strip():
self.userdb.addUser(VAR_6, VAR_7, True)
self.session_auth(VAR_6, VAR_7)
return VAR_49
else:
return "No, you can't."
if VAR_48:
return self.firstrunpage
else:
if self.isAuthorized():
return VAR_49
else:
return self.loginpage
FUNC_3.exposed = True
def FUNC_4(self):
try:
VAR_90 = VAR_53.session.get('username', None)
VAR_91 = VAR_53.session.get('userid', -1)
VAR_92 = self.userdb.getNameById(VAR_91)
except (UnicodeDecodeError, ValueError) as e:
log.w(_('''
Dropping VAR_37 sessions! Try not to change between python 2 and 3,
everybody has to relogin now.'''))
VAR_53.session.delete()
VAR_90 = None
if VAR_90 is None:
if self.autoLoginActive():
VAR_53.session['username'] = self.userdb.getNameById(1)
VAR_53.session['userid'] = 1
VAR_53.session['admin'] = True
return True
else:
return False
elif VAR_90 != VAR_92:
self.api_logout(VAR_12=None)
return False
return True
def FUNC_5(self):
VAR_50 = VAR_53.request.remote.ip in ('127.0.0.1', '::1')
if VAR_50 and cherry.config['server.localhost_auto_login']:
return True
return False
def FUNC_6(self, VAR_6, VAR_7):
VAR_51 = self.userdb.auth(VAR_6, VAR_7)
VAR_52 = cherry.config['server.permit_remote_admin_login']
VAR_50 = VAR_53.request.remote.ip in ('127.0.0.1', '::1')
if not VAR_50 and VAR_51.isadmin and not VAR_52:
log.i(_('Rejected remote admin login from VAR_51: {VAR_81}').format(VAR_81=VAR_51.name))
VAR_51 = userdb.User.nobody()
VAR_53.session['username'] = VAR_51.name
VAR_53.session['userid'] = VAR_51.uid
VAR_53.session['admin'] = VAR_51.isadmin
def FUNC_7(self):
try:
return VAR_53.session['userid']
except KeyError:
VAR_53.lib.sessions.expire()
VAR_53.HTTPRedirect(VAR_53.url(), 302)
return ''
def FUNC_8(self, VAR_8, *VAR_9, **VAR_10):
if not self.isAuthorized():
raise VAR_53.HTTPRedirect(self.getBaseUrl(), 302)
VAR_53.session.release_lock()
if cherry.config['media.transcode'] and VAR_9:
bitrate = VAR_10.pop('bitrate', None) or None # catch empty strings
if VAR_93:
try:
VAR_93 = max(0, int(VAR_93)) or None # None if < 1
except (TypeError, ValueError):
raise VAR_53.HTTPError(400, "Bad query: "
"bitrate ({0!r}) must be an integer".format(str(VAR_93)))
VAR_9 = os.path.sep.join(VAR_9)
if sys.version_info < (3, 0): # workaround for #327 (VAR_53 issue)
VAR_9 = path.decode('utf-8') # make it work with non-ascii
else:
VAR_9 = codecs.decode(codecs.encode(VAR_9, 'latin1'), 'utf-8')
VAR_94 = os.path.join(cherry.config['media.basedir'], VAR_9)
VAR_95 = int(VAR_10.pop('starttime', 0))
VAR_96 = audiotranscode.AudioTranscode()
VAR_97 = audiotranscode.mime_type(VAR_8)
VAR_53.response.headers["Content-Type"] = VAR_97
try:
return VAR_96.transcode_stream(VAR_94, VAR_8,
VAR_93=bitrate, VAR_95=starttime)
except (audiotranscode.TranscodeError, IOError) as e:
raise VAR_53.HTTPError(404, e.value)
FUNC_8.exposed = True
FUNC_8._cp_config = {'response.stream': True}
def FUNC_9(self, *VAR_4, **VAR_5):
VAR_54 = VAR_4[0] if VAR_4 else ''
if not VAR_54 in self.handlers:
return "Error: no such VAR_54. '%s'" % VAR_54
VAR_55 = self.handlers[VAR_54]
VAR_56 = not ('noauth' in dir(VAR_55) and VAR_55.noauth)
if VAR_56 and not self.isAuthorized():
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_57 = {}
if 'data' in VAR_5:
VAR_57 = json.loads(VAR_5['data'])
VAR_58 = ('binary' in dir(VAR_55) and VAR_55.binary)
if VAR_58:
return VAR_55(**VAR_57)
else:
return json.dumps({'data': VAR_55(**VAR_57)})
FUNC_9.exposed = True
def FUNC_10(self, VAR_11):
if not VAR_53.session['admin']:
VAR_62 = self.useroptions.forUser(self.getUserId())
if not VAR_62.getOptionValue('media.may_download'):
return 'not_permitted'
for f in VAR_11:
if '/../' in f or f.startswith('../'):
return 'invalid_file'
if os.path.isabs(f):
return 'invalid_file'
VAR_59 = cherry.config['media.maximum_download_size']
try:
if self.model.file_size_within_limit(VAR_11, VAR_59):
return 'ok'
else:
return 'too_big'
except OSError as e: # use OSError for python2 compatibility
return str(e)
def FUNC_11(self, VAR_11):
VAR_60 = self.download_check_files(VAR_11)
if VAR_60 == 'not_permitted':
return """You are not allowed to FUNC_12 files."""
elif VAR_60 == 'invalid_file':
return "Error: invalid VAR_39 found in {list}".format(list=VAR_11)
elif VAR_60 == 'too_big':
VAR_59 = cherry.config['media.maximum_download_size']
return """Can't FUNC_12: Playlist is bigger than {maxsize} mB.
The server administrator can change this configuration.
""".format(maxsize=VAR_59/1024/1024)
elif VAR_60 == 'ok':
return VAR_60
else:
VAR_125 = "Error VAR_60 check for FUNC_12: {VAR_60!r}".format(VAR_60=status)
log.e(VAR_125)
return VAR_125
def FUNC_12(self, VAR_12):
if not self.isAuthorized():
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_11 = [VAR_69 for VAR_69 in json.loads(unquote(VAR_12))]
VAR_61 = self.download_check_files(VAR_11)
if VAR_61 == 'ok':
FUNC_0()
VAR_82 = 'application/x-zip-compressed'
VAR_53.response.headers["Content-Type"] = VAR_82
VAR_83 = 'attachment; VAR_39="music.zip"'
VAR_53.response.headers['Content-Disposition'] = VAR_83
VAR_84 = cherry.config['media.basedir']
VAR_98 = [os.path.join(VAR_84, f) for f in VAR_11]
return zipstream.ZipStream(VAR_98)
else:
return VAR_61
FUNC_12.exposed = True
FUNC_12._cp_config = {'response.stream': True}
def FUNC_13(self):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_63 = VAR_62.getChangableOptions()
if VAR_53.session['admin']:
VAR_63['media'].update({'may_download': True})
else:
VAR_63['media'].update({'may_download': VAR_62.getOptionValue('media.may_download')})
return VAR_63
def FUNC_14(self):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_62.setOption('last_time_online', int(time.time()))
def FUNC_15(self, VAR_13, VAR_14):
VAR_62 = self.useroptions.forUser(self.getUserId())
VAR_62.setOption(VAR_13, VAR_14)
return "success"
def FUNC_16(self, VAR_15, VAR_13, VAR_14):
if VAR_53.session['admin']:
VAR_62 = self.useroptions.forUser(VAR_15)
VAR_62.setOption(VAR_13, VAR_14)
return "success"
else:
return "error: not permitted. Only admins can change other users options"
def FUNC_17(self, VAR_16):
if not VAR_53.session['admin']:
raise VAR_53.HTTPError(401, 'Unauthorized')
FUNC_0()
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_65 = VAR_64.fetchurls(VAR_16)
return VAR_65[:min(len(VAR_65), 10)]
def FUNC_18(self, VAR_17, VAR_18):
if not VAR_53.session['admin']:
raise VAR_53.HTTPError(401, 'Unauthorized')
VAR_66 = albumArtFilePath(VAR_17)
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_20, VAR_67 = VAR_64.retrieveData(VAR_18)
self.albumartcache_save(VAR_66, VAR_20)
def FUNC_19(self, VAR_17):
FUNC_0()
VAR_68 = "../VAR_73/img/folder.png"
log.i('Fetching album art for: %s' % VAR_17)
VAR_69 = os.path.join(cherry.config['media.basedir'], VAR_17)
if os.path.isfile(VAR_69):
VAR_99 = TinyTag.get(VAR_69, image=True)
VAR_100 = VAR_99.get_image()
if VAR_100:
log.d('Image found in VAR_99.')
VAR_67 = {'Content-Type': 'image/jpg', 'Content-Length': len(VAR_100)}
VAR_53.response.headers.update(VAR_67)
return VAR_100
else:
directory = os.path.dirname(VAR_17)
VAR_66 = albumArtFilePath(VAR_17)
VAR_70 = self.albumartcache_load(VAR_66)
if VAR_70:
VAR_53.response.headers["Content-Length"] = len(VAR_70)
return VAR_70
VAR_64 = albumartfetcher.AlbumArtFetcher()
VAR_71 = os.path.join(cherry.config['media.basedir'], VAR_17)
VAR_67, VAR_20, VAR_72 = VAR_64.fetchLocal(VAR_71)
if VAR_67:
if VAR_72:
self.albumartcache_save(VAR_66, VAR_20)
VAR_53.response.headers.update(VAR_67)
return VAR_20
elif cherry.config['media.fetch_album_art']:
try:
VAR_123 = os.path.basename(VAR_17)
VAR_124 = VAR_123
log.i(_("Fetching album art for VAR_124 {keywords!r}").format(VAR_124=keywords))
VAR_67, VAR_20 = VAR_64.fetch(VAR_124)
if VAR_67:
VAR_53.response.headers.update(VAR_67)
self.albumartcache_save(VAR_66, VAR_20)
return VAR_20
else:
raise VAR_53.HTTPRedirect(VAR_68, 302)
except:
raise VAR_53.HTTPRedirect(VAR_68, 302)
else:
raise VAR_53.HTTPRedirect(VAR_68, 302)
FUNC_19.noauth = True
FUNC_19.binary = True
def FUNC_20(self, VAR_19):
if os.path.exists(VAR_19):
with open(VAR_19, 'rb') as f:
return f.read()
def FUNC_21(self, VAR_9, VAR_20):
with open(VAR_9, 'wb') as f:
f.write(VAR_20)
def FUNC_22(self, VAR_17, VAR_21=None):
try:
VAR_101 = self.model.listdir(VAR_17, VAR_21)
except ValueError:
raise VAR_53.HTTPError(400, 'Bad Request')
return [entry.to_dict() for entry in VAR_101]
def FUNC_23(self, VAR_17):
try:
return [entry.to_dict() for entry in self.model.listdir(VAR_17)]
except ValueError:
raise VAR_53.HTTPError(400, 'Bad Request')
def FUNC_24(self, VAR_22):
if not VAR_22.strip():
VAR_102 = '[]'
else:
with Performance(_('processing whole search request')):
VAR_114 = self.model.search(VAR_22.strip())
with Performance(_('rendering search results as json')):
VAR_102 = [entry.to_dict() for entry in VAR_114]
return VAR_102
def FUNC_25(self, VAR_23):
VAR_53.session['playlist'] = VAR_23
def FUNC_26(self, VAR_23, VAR_24, VAR_25, VAR_26=False):
VAR_73 = self.playlistdb.savePlaylist(
VAR_15=self.getUserId(),
VAR_24=1 if VAR_24 else 0,
VAR_23=playlist,
playlisttitle=VAR_25,
VAR_26=overwrite)
if VAR_73 == "success":
return VAR_73
else:
raise VAR_53.HTTPError(400, VAR_73)
def FUNC_27(self, VAR_27):
VAR_73 = self.playlistdb.deletePlaylist(VAR_27,
self.getUserId(),
override_owner=False)
if VAR_73 == "success":
return VAR_73
else:
raise VAR_53.HTTPError(400, VAR_73)
def FUNC_28(self, VAR_27):
return [entry.to_dict() for entry in self.playlistdb.loadPlaylist(
VAR_27=playlistid,
VAR_15=self.getUserId()
)]
def FUNC_29(self):
return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]
def FUNC_30(self, VAR_28, VAR_29, VAR_12):
if VAR_29 == 'public':
VAR_103 = type(VAR_12) == bool and type(VAR_28) == int
if VAR_103:
return self.playlistdb.setPublic(VAR_15=self.getUserId(),
VAR_28=plid,
VAR_24=VAR_12)
def FUNC_31(self):
if VAR_53.session['admin'] and cherry.config['general.update_notification']:
FUNC_0()
VAR_104 = self.model.check_for_updates()
if VAR_104:
VAR_115 = VAR_104[0]['version']
VAR_116 = []
VAR_117 = []
for version in VAR_104:
for update in version['features']:
if update.startswith('FEATURE:'):
VAR_116.append(update[len('FEATURE:'):])
elif update.startswith('FIX:'):
VAR_117.append(update[len('FIX:'):])
elif update.startswith('FIXED:'):
VAR_117.append(update[len('FIXED:'):])
VAR_118 = {'type': 'update', 'data': {}}
VAR_118['data']['version'] = VAR_115
VAR_118['data']['features'] = VAR_116
VAR_118['data']['fixes'] = VAR_117
return VAR_118
return {'type': 'wisdom', 'data': self.model.motd()}
def FUNC_32(self):
VAR_74 = VAR_53.session.get('playlist', [])
return VAR_74
def FUNC_33(self):
return json.dumps(cherry.config['media.playable'])
def FUNC_34(self):
if VAR_53.session['admin']:
VAR_105 = self.userdb.getUserList()
for VAR_51 in VAR_105:
if VAR_51['id'] == VAR_53.session['userid']:
VAR_51['deletable'] = False
VAR_119 = self.useroptions.forUser(VAR_51['id'])
VAR_120 = VAR_119.getOptionValue('last_time_online')
VAR_121 = VAR_119.getOptionValue('media.may_download')
VAR_51['last_time_online'] = VAR_120
VAR_51['may_download'] = VAR_121
VAR_106 = lambda VAR_51: VAR_51['last_time_online']
VAR_105 = sorted(VAR_105, key=VAR_106, reverse=True)
return json.dumps({'time': int(time.time()),
'userlist': VAR_105})
else:
return json.dumps({'time': 0, 'userlist': []})
def FUNC_35(self, VAR_6, VAR_7, VAR_30):
if VAR_53.session['admin']:
if self.userdb.addUser(VAR_6, VAR_7, VAR_30):
return 'added new VAR_51: %s' % VAR_6
else:
return 'error, cannot add new VAR_51!' % VAR_6
else:
return "You didn't think that would work, did you?"
def FUNC_36(self, VAR_31, VAR_32, VAR_6=''):
VAR_75 = VAR_6 == ''
if VAR_75:
VAR_6 = VAR_53.session['username']
VAR_107 = self.userdb.auth(VAR_6, VAR_31)
VAR_108 = userdb.User.nobody() != VAR_107
if not VAR_108:
raise VAR_53.HTTPError(403, "Forbidden")
if VAR_75 or VAR_53.session['admin']:
return self.userdb.changePassword(VAR_6, VAR_32)
else:
raise VAR_53.HTTPError(403, "Forbidden")
def FUNC_37(self, VAR_15):
VAR_76 = VAR_53.session['userid'] == VAR_15
if VAR_53.session['admin'] and not VAR_76:
VAR_109 = self.userdb.deleteUser(VAR_15)
return 'success' if VAR_109 else 'failed'
else:
return "You didn't think that would work, did you?"
def FUNC_38(self, VAR_33="created", VAR_34=''):
VAR_77 = self.playlistdb.showPlaylists(self.getUserId(), VAR_34)
VAR_78 = int(time.time())
VAR_79 = False
for VAR_110 in VAR_77:
VAR_110['username'] = self.userdb.getNameById(VAR_110['userid'])
VAR_110['type'] = 'playlist'
VAR_110['age'] = VAR_78 - VAR_110['created']
if VAR_33[0] == '-':
VAR_79 = True
VAR_33 = VAR_33[1:]
if not VAR_33 in ('username', 'age', 'title', 'default'):
VAR_33 = 'created'
if VAR_33 == 'default':
VAR_33 = 'age'
VAR_79 = False
VAR_77 = sorted(VAR_77, key=lambda x: x[VAR_33], reverse = VAR_79)
return VAR_77
def FUNC_39(self):
VAR_53.lib.sessions.expire()
FUNC_39.no_auth = True
def FUNC_40(self, VAR_28, VAR_35):
VAR_15 = self.getUserId()
VAR_80 = self.playlistdb.createPLS(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)
if VAR_80 and VAR_81:
return self.serve_string_as_file(VAR_80, VAR_81+'.pls')
FUNC_40.binary = True
def FUNC_41(self, VAR_28, VAR_35):
VAR_15 = self.getUserId()
VAR_80 = self.playlistdb.createM3U(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)
if VAR_80 and VAR_81:
return self.serve_string_as_file(VAR_80, VAR_81+'.m3u')
FUNC_41.binary = True
def FUNC_42(self, VAR_36, VAR_37=False, VAR_35=''):
VAR_15 = self.getUserId()
if not VAR_15:
raise VAR_53.HTTPError(401, _("Please log in"))
VAR_35 = (hostaddr.strip().rstrip('/') + cherry.config['server.rootpath']).rstrip('/')
VAR_36 = format.lower()
if VAR_36 == 'm3u':
VAR_111 = self.playlistdb.createM3U
elif VAR_36 == 'pls':
VAR_111 = self.playlistdb.createPLS
else:
raise VAR_53.HTTPError(400,
_('Unknown VAR_23 VAR_36: {format!r}').format(VAR_36=format))
VAR_77 = self.playlistdb.showPlaylists(VAR_15, include_public=VAR_37)
if not VAR_77:
raise VAR_53.HTTPError(404, _('No VAR_77 found'))
with MemoryZipFile() as zip:
for VAR_110 in VAR_77:
VAR_28 = VAR_110['plid']
VAR_122 = VAR_111(VAR_28=plid, VAR_15=userid, addrstr=VAR_35)
VAR_81 = self.playlistdb.getName(VAR_28, VAR_15) + '.' + VAR_36
if not VAR_110['owner']:
VAR_6 = self.userdb.getNameById(VAR_110['userid'])
VAR_81 = VAR_6 + '/' + VAR_81
zip.writestr(VAR_81, VAR_122)
VAR_82 = 'application/x-zip-compressed'
VAR_83 = 'attachment; VAR_39="playlists.zip"'
VAR_53.response.headers["Content-Type"] = VAR_82
VAR_53.response.headers['Content-Disposition'] = VAR_83
return zip.getbytes()
FUNC_42.exposed = True
def FUNC_43(self, VAR_9):
VAR_84 = cherry.config['media.basedir']
VAR_85 = os.path.join(VAR_84, VAR_9)
return json.dumps(metainfo.getSongInfo(VAR_85).dict())
def FUNC_44(self):
return json.dumps(audiotranscode.getEncoders())
def FUNC_45(self):
return json.dumps(audiotranscode.getDecoders())
def FUNC_46(self):
return json.dumps(cherry.config['media.transcode'])
def FUNC_47(self):
self.model.updateLibrary()
return 'success'
def FUNC_48(self):
VAR_86 = {
'transcodingenabled': cherry.config['media.transcode'],
'fetchalbumart': cherry.config['media.fetch_album_art'],
'isadmin': VAR_53.session['admin'],
'username': VAR_53.session['username'],
'servepath': 'serve/',
'transcodepath': 'trans/',
'auto_login': self.autoLoginActive(),
'version': cherry.REPO_VERSION or cherry.VERSION,
}
if cherry.config['media.transcode']:
VAR_112 = list(self.model.transcoder.available_decoder_formats())
VAR_86['getdecoders'] = VAR_112
VAR_113 = list(self.model.transcoder.available_encoder_formats())
VAR_86['getencoders'] = VAR_113
else:
VAR_86['getdecoders'] = []
VAR_86['getencoders'] = []
return VAR_86
def FUNC_49(self, VAR_38, VAR_39):
VAR_87 = 'attachment; VAR_39="'+VAR_39+'"'
VAR_53.response.headers["Content-Type"] = "application/x-download"
VAR_53.response.headers["Content-Disposition"] = VAR_87
return codecs.encode(VAR_38, "UTF-8")
def FUNC_0():
VAR_53.session.save()
VAR_40 = VAR_53.serving.request.hooks['before_finalize']
VAR_41 = VAR_53.lib.sessions.save
VAR_40[:] = [h for h in VAR_40 if h.callback is not VAR_41]
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
35,
41,
50,
51,
54,
64,
67,
69,
70,
76,
80,
84,
121,
124,
138,
144,
173,
180,
181,
199,
205,
216,
224,
227,
230,
238,
239,
247,
248,
255,
257,
268,
269,
275,
279,
292,
294,
296,
301,
305,
314,
332,
351,
360,
364,
369,
377,
384,
386,
394,
398,
401,
403,
412,
413,
415,
416,
422,
423,
427,
430,
435,
446,
449,
452,
456,
461,
465,
472,
478,
488,
491,
503,
511,
512,
514,
520,
523,
531,
554,
558,
562,
580,
589,
602,
610,
615,
630,
634,
642,
650,
656,
665,
669,
679,
686,
691,
694,
697,
700,
704,
725,
731,
732,
735,
737,
740,
742,
745,
754,
755,
756,
32,
33,
34,
734,
735,
736,
737,
738,
739,
740,
741,
742,
743,
744,
745,
746,
747,
748,
749,
226,
227,
228,
229,
230,
231,
232,
233,
271,
272,
273,
274,
560
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
35,
41,
50,
51,
54,
64,
67,
69,
70,
76,
80,
84,
121,
124,
138,
144,
173,
180,
181,
199,
205,
216,
224,
227,
230,
238,
239,
247,
248,
255,
257,
268,
269,
275,
279,
292,
294,
296,
301,
303,
306,
309,
318,
336,
355,
364,
368,
373,
381,
388,
390,
398,
402,
405,
407,
416,
417,
419,
420,
426,
427,
431,
434,
439,
450,
453,
456,
460,
465,
469,
476,
482,
492,
495,
507,
515,
516,
518,
524,
527,
535,
558,
562,
566,
584,
593,
606,
614,
619,
634,
638,
646,
654,
660,
669,
673,
683,
690,
695,
698,
701,
704,
708,
729,
735,
736,
739,
741,
744,
746,
749,
758,
759,
760,
32,
33,
34,
738,
739,
740,
741,
742,
743,
744,
745,
746,
747,
748,
749,
750,
751,
752,
753,
226,
227,
228,
229,
230,
231,
232,
233,
271,
272,
273,
274,
564
] |
2CWE-601
| import hashlib
import hmac
import logging
import time
from datetime import timedelta
from urllib.parse import urlsplit, urlunsplit
from flask import jsonify, redirect, request, url_for, session
from flask_login import LoginManager, login_user, logout_user, user_logged_in
from redash import models, settings
from redash.authentication import jwt_auth
from redash.authentication.org_resolving import current_org
from redash.settings.organization import settings as org_settings
from redash.tasks import record_event
from sqlalchemy.orm.exc import NoResultFound
from werkzeug.exceptions import Unauthorized
login_manager = LoginManager()
logger = logging.getLogger("authentication")
def get_login_url(external=False, next="/"):
if settings.MULTI_ORG and current_org == None:
login_url = "/"
elif settings.MULTI_ORG:
login_url = url_for(
"redash.login", org_slug=current_org.slug, next=next, _external=external
)
else:
login_url = url_for("redash.login", next=next, _external=external)
return login_url
def sign(key, path, expires):
if not key:
return None
h = hmac.new(key.encode(), msg=path.encode(), digestmod=hashlib.sha1)
h.update(str(expires).encode())
return h.hexdigest()
@login_manager.user_loader
def load_user(user_id_with_identity):
user = api_key_load_user_from_request(request)
if user:
return user
org = current_org._get_current_object()
try:
user_id, _ = user_id_with_identity.split("-")
user = models.User.get_by_id_and_org(user_id, org)
if user.is_disabled or user.get_id() != user_id_with_identity:
return None
return user
except (models.NoResultFound, ValueError, AttributeError):
return None
def request_loader(request):
user = None
if settings.AUTH_TYPE == "hmac":
user = hmac_load_user_from_request(request)
elif settings.AUTH_TYPE == "api_key":
user = api_key_load_user_from_request(request)
else:
logger.warning(
"Unknown authentication type ({}). Using default (HMAC).".format(
settings.AUTH_TYPE
)
)
user = hmac_load_user_from_request(request)
if org_settings["auth_jwt_login_enabled"] and user is None:
user = jwt_token_load_user_from_request(request)
return user
def hmac_load_user_from_request(request):
signature = request.args.get("signature")
expires = float(request.args.get("expires") or 0)
query_id = request.view_args.get("query_id", None)
user_id = request.args.get("user_id", None)
# TODO: 3600 should be a setting
if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.query.get(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if user.api_key and signature == calculated_signature:
return user
if query_id:
query = models.Query.query.filter(models.Query.id == query_id).one()
calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature:
return models.ApiUser(
query.api_key,
query.org,
list(query.groups.keys()),
name="ApiKey: Query {}".format(query.id),
)
return None
def get_user_from_api_key(api_key, query_id):
if not api_key:
return None
user = None
# TODO: once we switch all api key storage into the ApiKey model, this code will be much simplified
org = current_org._get_current_object()
try:
user = models.User.get_by_api_key_and_org(api_key, org)
if user.is_disabled:
user = None
except models.NoResultFound:
try:
api_key = models.ApiKey.get_by_api_key(api_key)
user = models.ApiUser(api_key, api_key.org, [])
except models.NoResultFound:
if query_id:
query = models.Query.get_by_id_and_org(query_id, org)
if query and query.api_key == api_key:
user = models.ApiUser(
api_key,
query.org,
list(query.groups.keys()),
name="ApiKey: Query {}".format(query.id),
)
return user
def get_api_key_from_request(request):
api_key = request.args.get("api_key", None)
if api_key is not None:
return api_key
if request.headers.get("Authorization"):
auth_header = request.headers.get("Authorization")
api_key = auth_header.replace("Key ", "", 1)
elif request.view_args is not None and request.view_args.get("token"):
api_key = request.view_args["token"]
return api_key
def api_key_load_user_from_request(request):
api_key = get_api_key_from_request(request)
if request.view_args is not None:
query_id = request.view_args.get("query_id", None)
user = get_user_from_api_key(api_key, query_id)
else:
user = None
return user
def jwt_token_load_user_from_request(request):
org = current_org._get_current_object()
payload = None
if org_settings["auth_jwt_auth_cookie_name"]:
jwt_token = request.cookies.get(org_settings["auth_jwt_auth_cookie_name"], None)
elif org_settings["auth_jwt_auth_header_name"]:
jwt_token = request.headers.get(org_settings["auth_jwt_auth_header_name"], None)
else:
return None
if jwt_token:
payload, token_is_valid = jwt_auth.verify_jwt_token(
jwt_token,
expected_issuer=org_settings["auth_jwt_auth_issuer"],
expected_audience=org_settings["auth_jwt_auth_audience"],
algorithms=org_settings["auth_jwt_auth_algorithms"],
public_certs_url=org_settings["auth_jwt_auth_public_certs_url"],
)
if not token_is_valid:
raise Unauthorized("Invalid JWT token")
if not payload:
return
try:
user = models.User.get_by_email_and_org(payload["email"], org)
except models.NoResultFound:
user = create_and_login_user(current_org, payload["email"], payload["email"])
return user
def log_user_logged_in(app, user):
event = {
"org_id": user.org_id,
"user_id": user.id,
"action": "login",
"object_type": "redash",
"timestamp": int(time.time()),
"user_agent": request.user_agent.string,
"ip": request.remote_addr,
}
record_event.delay(event)
@login_manager.unauthorized_handler
def redirect_to_login():
if request.is_xhr or "/api/" in request.path:
response = jsonify(
{"message": "Couldn't find resource. Please login and try again."}
)
response.status_code = 404
return response
login_url = get_login_url(next=request.url, external=False)
return redirect(login_url)
def logout_and_redirect_to_index():
logout_user()
if settings.MULTI_ORG and current_org == None:
index_url = "/"
elif settings.MULTI_ORG:
index_url = url_for("redash.index", org_slug=current_org.slug, _external=False)
else:
index_url = url_for("redash.index", _external=False)
return redirect(index_url)
def init_app(app):
from redash.authentication import (
google_oauth,
saml_auth,
remote_user_auth,
ldap_auth,
)
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
login_manager.REMEMBER_COOKIE_DURATION = settings.REMEMBER_COOKIE_DURATION
@app.before_request
def extend_session():
session.permanent = True
app.permanent_session_lifetime = timedelta(seconds=settings.SESSION_EXPIRY_TIME)
from redash.security import csrf
for auth in [google_oauth, saml_auth, remote_user_auth, ldap_auth]:
blueprint = auth.blueprint
csrf.exempt(blueprint)
app.register_blueprint(blueprint)
user_logged_in.connect(log_user_logged_in)
login_manager.request_loader(request_loader)
def create_and_login_user(org, name, email, picture=None):
try:
user_object = models.User.get_by_email_and_org(email, org)
if user_object.is_disabled:
return None
if user_object.is_invitation_pending:
user_object.is_invitation_pending = False
models.db.session.commit()
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
models.db.session.commit()
except NoResultFound:
logger.debug("Creating user object (%r)", name)
user_object = models.User(
org=org,
name=name,
email=email,
is_invitation_pending=False,
_profile_image_url=picture,
group_ids=[org.default_group.id],
)
models.db.session.add(user_object)
models.db.session.commit()
login_user(user_object, remember=True)
return user_object
def get_next_path(unsafe_next_path):
if not unsafe_next_path:
return ""
# Preventing open redirection attacks
parts = list(urlsplit(unsafe_next_path))
parts[0] = "" # clear scheme
parts[1] = "" # clear netloc
safe_next_path = urlunsplit(parts)
# If the original path was a URL, we might end up with an empty
# safe url, which will redirect to the login page. Changing to
# relative root to redirect to the app root after login.
if not safe_next_path:
safe_next_path = "./"
return safe_next_path
| import hashlib
import hmac
import logging
import time
from datetime import timedelta
from urllib.parse import urlsplit, urlunsplit
from flask import jsonify, redirect, request, url_for, session
from flask_login import LoginManager, login_user, logout_user, user_logged_in
from redash import models, settings
from redash.authentication import jwt_auth
from redash.authentication.org_resolving import current_org
from redash.settings.organization import settings as org_settings
from redash.tasks import record_event
from sqlalchemy.orm.exc import NoResultFound
from werkzeug.exceptions import Unauthorized
login_manager = LoginManager()
logger = logging.getLogger("authentication")
def get_login_url(external=False, next="/"):
if settings.MULTI_ORG and current_org == None:
login_url = "/"
elif settings.MULTI_ORG:
login_url = url_for(
"redash.login", org_slug=current_org.slug, next=next, _external=external
)
else:
login_url = url_for("redash.login", next=next, _external=external)
return login_url
def sign(key, path, expires):
if not key:
return None
h = hmac.new(key.encode(), msg=path.encode(), digestmod=hashlib.sha1)
h.update(str(expires).encode())
return h.hexdigest()
@login_manager.user_loader
def load_user(user_id_with_identity):
user = api_key_load_user_from_request(request)
if user:
return user
org = current_org._get_current_object()
try:
user_id, _ = user_id_with_identity.split("-")
user = models.User.get_by_id_and_org(user_id, org)
if user.is_disabled or user.get_id() != user_id_with_identity:
return None
return user
except (models.NoResultFound, ValueError, AttributeError):
return None
def request_loader(request):
user = None
if settings.AUTH_TYPE == "hmac":
user = hmac_load_user_from_request(request)
elif settings.AUTH_TYPE == "api_key":
user = api_key_load_user_from_request(request)
else:
logger.warning(
"Unknown authentication type ({}). Using default (HMAC).".format(
settings.AUTH_TYPE
)
)
user = hmac_load_user_from_request(request)
if org_settings["auth_jwt_login_enabled"] and user is None:
user = jwt_token_load_user_from_request(request)
return user
def hmac_load_user_from_request(request):
signature = request.args.get("signature")
expires = float(request.args.get("expires") or 0)
query_id = request.view_args.get("query_id", None)
user_id = request.args.get("user_id", None)
# TODO: 3600 should be a setting
if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.query.get(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if user.api_key and signature == calculated_signature:
return user
if query_id:
query = models.Query.query.filter(models.Query.id == query_id).one()
calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature:
return models.ApiUser(
query.api_key,
query.org,
list(query.groups.keys()),
name="ApiKey: Query {}".format(query.id),
)
return None
def get_user_from_api_key(api_key, query_id):
if not api_key:
return None
user = None
# TODO: once we switch all api key storage into the ApiKey model, this code will be much simplified
org = current_org._get_current_object()
try:
user = models.User.get_by_api_key_and_org(api_key, org)
if user.is_disabled:
user = None
except models.NoResultFound:
try:
api_key = models.ApiKey.get_by_api_key(api_key)
user = models.ApiUser(api_key, api_key.org, [])
except models.NoResultFound:
if query_id:
query = models.Query.get_by_id_and_org(query_id, org)
if query and query.api_key == api_key:
user = models.ApiUser(
api_key,
query.org,
list(query.groups.keys()),
name="ApiKey: Query {}".format(query.id),
)
return user
def get_api_key_from_request(request):
api_key = request.args.get("api_key", None)
if api_key is not None:
return api_key
if request.headers.get("Authorization"):
auth_header = request.headers.get("Authorization")
api_key = auth_header.replace("Key ", "", 1)
elif request.view_args is not None and request.view_args.get("token"):
api_key = request.view_args["token"]
return api_key
def api_key_load_user_from_request(request):
api_key = get_api_key_from_request(request)
if request.view_args is not None:
query_id = request.view_args.get("query_id", None)
user = get_user_from_api_key(api_key, query_id)
else:
user = None
return user
def jwt_token_load_user_from_request(request):
org = current_org._get_current_object()
payload = None
if org_settings["auth_jwt_auth_cookie_name"]:
jwt_token = request.cookies.get(org_settings["auth_jwt_auth_cookie_name"], None)
elif org_settings["auth_jwt_auth_header_name"]:
jwt_token = request.headers.get(org_settings["auth_jwt_auth_header_name"], None)
else:
return None
if jwt_token:
payload, token_is_valid = jwt_auth.verify_jwt_token(
jwt_token,
expected_issuer=org_settings["auth_jwt_auth_issuer"],
expected_audience=org_settings["auth_jwt_auth_audience"],
algorithms=org_settings["auth_jwt_auth_algorithms"],
public_certs_url=org_settings["auth_jwt_auth_public_certs_url"],
)
if not token_is_valid:
raise Unauthorized("Invalid JWT token")
if not payload:
return
try:
user = models.User.get_by_email_and_org(payload["email"], org)
except models.NoResultFound:
user = create_and_login_user(current_org, payload["email"], payload["email"])
return user
def log_user_logged_in(app, user):
event = {
"org_id": user.org_id,
"user_id": user.id,
"action": "login",
"object_type": "redash",
"timestamp": int(time.time()),
"user_agent": request.user_agent.string,
"ip": request.remote_addr,
}
record_event.delay(event)
@login_manager.unauthorized_handler
def redirect_to_login():
if request.is_xhr or "/api/" in request.path:
response = jsonify(
{"message": "Couldn't find resource. Please login and try again."}
)
response.status_code = 404
return response
login_url = get_login_url(next=request.url, external=False)
return redirect(login_url)
def logout_and_redirect_to_index():
logout_user()
if settings.MULTI_ORG and current_org == None:
index_url = "/"
elif settings.MULTI_ORG:
index_url = url_for("redash.index", org_slug=current_org.slug, _external=False)
else:
index_url = url_for("redash.index", _external=False)
return redirect(index_url)
def init_app(app):
from redash.authentication import (
saml_auth,
remote_user_auth,
ldap_auth,
)
from redash.authentication.google_oauth import create_google_oauth_blueprint
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
login_manager.REMEMBER_COOKIE_DURATION = settings.REMEMBER_COOKIE_DURATION
@app.before_request
def extend_session():
session.permanent = True
app.permanent_session_lifetime = timedelta(seconds=settings.SESSION_EXPIRY_TIME)
from redash.security import csrf
# Authlib's flask oauth client requires a Flask app to initialize
for blueprint in [create_google_oauth_blueprint(app), saml_auth.blueprint, remote_user_auth.blueprint, ldap_auth.blueprint, ]:
csrf.exempt(blueprint)
app.register_blueprint(blueprint)
user_logged_in.connect(log_user_logged_in)
login_manager.request_loader(request_loader)
def create_and_login_user(org, name, email, picture=None):
try:
user_object = models.User.get_by_email_and_org(email, org)
if user_object.is_disabled:
return None
if user_object.is_invitation_pending:
user_object.is_invitation_pending = False
models.db.session.commit()
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
models.db.session.commit()
except NoResultFound:
logger.debug("Creating user object (%r)", name)
user_object = models.User(
org=org,
name=name,
email=email,
is_invitation_pending=False,
_profile_image_url=picture,
group_ids=[org.default_group.id],
)
models.db.session.add(user_object)
models.db.session.commit()
login_user(user_object, remember=True)
return user_object
def get_next_path(unsafe_next_path):
if not unsafe_next_path:
return ""
# Preventing open redirection attacks
parts = list(urlsplit(unsafe_next_path))
parts[0] = "" # clear scheme
parts[1] = "" # clear netloc
safe_next_path = urlunsplit(parts)
# If the original path was a URL, we might end up with an empty
# safe url, which will redirect to the login page. Changing to
# relative root to redirect to the app root after login.
if not safe_next_path:
safe_next_path = "./"
return safe_next_path
| open_redirect | {
"code": [
" google_oauth,",
" for auth in [google_oauth, saml_auth, remote_user_auth, ldap_auth]:",
" blueprint = auth.blueprint"
],
"line_no": [
246,
262,
263
]
} | {
"code": [
" for blueprint in [create_google_oauth_blueprint(app), saml_auth.blueprint, remote_user_auth.blueprint, ldap_auth.blueprint, ]:"
],
"line_no": [
265
]
} | import .hashlib
import .hmac
import logging
import time
from datetime import timedelta
from urllib.parse import urlsplit, urlunsplit
from flask import jsonify, redirect, VAR_8, url_for, session
from flask_login import LoginManager, login_user, logout_user, user_logged_in
from redash import models, settings
from redash.authentication import jwt_auth
from redash.authentication.org_resolving import current_org
from redash.settings.organization import settings as org_settings
from redash.tasks import record_event
from sqlalchemy.orm.exc import NoResultFound
from werkzeug.exceptions import Unauthorized
VAR_0 = LoginManager()
VAR_1 = logging.getLogger("authentication")
def FUNC_0(VAR_2=False, VAR_3="/"):
if settings.MULTI_ORG and current_org == None:
VAR_23 = "/"
elif settings.MULTI_ORG:
VAR_23 = url_for(
"redash.login", org_slug=current_org.slug, VAR_3=next, _external=VAR_2
)
else:
VAR_23 = url_for("redash.login", VAR_3=next, _external=VAR_2)
return VAR_23
def FUNC_1(VAR_4, VAR_5, VAR_6):
if not VAR_4:
return None
VAR_18 = hmac.new(VAR_4.encode(), msg=VAR_5.encode(), digestmod=hashlib.sha1)
VAR_18.update(str(VAR_6).encode())
return VAR_18.hexdigest()
@VAR_0.user_loader
def FUNC_2(VAR_7):
VAR_12 = FUNC_7(VAR_8)
if VAR_12:
return VAR_12
VAR_13 = current_org._get_current_object()
try:
VAR_20, VAR_26 = VAR_7.split("-")
VAR_12 = models.User.get_by_id_and_org(VAR_20, VAR_13)
if VAR_12.is_disabled or VAR_12.get_id() != VAR_7:
return None
return VAR_12
except (models.NoResultFound, ValueError, AttributeError):
return None
def FUNC_3(VAR_8):
VAR_12 = None
if settings.AUTH_TYPE == "hmac":
VAR_12 = FUNC_4(VAR_8)
elif settings.AUTH_TYPE == "api_key":
VAR_12 = FUNC_7(VAR_8)
else:
VAR_1.warning(
"Unknown authentication type ({}). Using default (HMAC).".format(
settings.AUTH_TYPE
)
)
VAR_12 = FUNC_4(VAR_8)
if org_settings["auth_jwt_login_enabled"] and VAR_12 is None:
VAR_12 = FUNC_8(VAR_8)
return VAR_12
def FUNC_4(VAR_8):
VAR_19 = VAR_8.args.get("signature")
VAR_6 = float(VAR_8.args.get("expires") or 0)
VAR_10 = VAR_8.view_args.get("query_id", None)
VAR_20 = VAR_8.args.get("user_id", None)
if VAR_19 and time.time() < VAR_6 <= time.time() + 3600:
if VAR_20:
VAR_12 = models.User.query.get(VAR_20)
VAR_34 = FUNC_1(VAR_12.api_key, VAR_8.path, VAR_6)
if VAR_12.api_key and VAR_19 == VAR_34:
return VAR_12
if VAR_10:
VAR_35 = models.Query.query.filter(models.Query.id == VAR_10).one()
VAR_34 = FUNC_1(VAR_35.api_key, VAR_8.path, VAR_6)
if VAR_35.api_key and VAR_19 == VAR_34:
return models.ApiUser(
VAR_35.api_key,
VAR_35.org,
list(VAR_35.groups.keys()),
VAR_14="ApiKey: Query {}".format(VAR_35.id),
)
return None
def FUNC_5(VAR_9, VAR_10):
if not VAR_9:
return None
VAR_12 = None
VAR_13 = current_org._get_current_object()
try:
VAR_12 = models.User.get_by_api_key_and_org(VAR_9, VAR_13)
if VAR_12.is_disabled:
VAR_12 = None
except models.NoResultFound:
try:
VAR_9 = models.ApiKey.get_by_api_key(VAR_9)
VAR_12 = models.ApiUser(VAR_9, api_key.org, [])
except models.NoResultFound:
if VAR_10:
VAR_35 = models.Query.get_by_id_and_org(VAR_10, VAR_13)
if VAR_35 and VAR_35.api_key == VAR_9:
VAR_12 = models.ApiUser(
VAR_9,
VAR_35.org,
list(VAR_35.groups.keys()),
VAR_14="ApiKey: Query {}".format(VAR_35.id),
)
return VAR_12
def FUNC_6(VAR_8):
VAR_9 = VAR_8.args.get("api_key", None)
if VAR_9 is not None:
return VAR_9
if VAR_8.headers.get("Authorization"):
VAR_27 = VAR_8.headers.get("Authorization")
VAR_9 = VAR_27.replace("Key ", "", 1)
elif VAR_8.view_args is not None and VAR_8.view_args.get("token"):
VAR_9 = VAR_8.view_args["token"]
return VAR_9
def FUNC_7(VAR_8):
VAR_9 = FUNC_6(VAR_8)
if VAR_8.view_args is not None:
VAR_10 = VAR_8.view_args.get("query_id", None)
VAR_12 = FUNC_5(VAR_9, VAR_10)
else:
VAR_12 = None
return VAR_12
def FUNC_8(VAR_8):
VAR_13 = current_org._get_current_object()
VAR_21 = None
if org_settings["auth_jwt_auth_cookie_name"]:
VAR_28 = VAR_8.cookies.get(org_settings["auth_jwt_auth_cookie_name"], None)
elif org_settings["auth_jwt_auth_header_name"]:
VAR_28 = VAR_8.headers.get(org_settings["auth_jwt_auth_header_name"], None)
else:
return None
if VAR_28:
VAR_21, VAR_29 = jwt_auth.verify_jwt_token(
VAR_28,
expected_issuer=org_settings["auth_jwt_auth_issuer"],
expected_audience=org_settings["auth_jwt_auth_audience"],
algorithms=org_settings["auth_jwt_auth_algorithms"],
public_certs_url=org_settings["auth_jwt_auth_public_certs_url"],
)
if not VAR_29:
raise Unauthorized("Invalid JWT token")
if not VAR_21:
return
try:
VAR_12 = models.User.get_by_email_and_org(VAR_21["email"], VAR_13)
except models.NoResultFound:
VAR_12 = FUNC_13(current_org, VAR_21["email"], VAR_21["email"])
return VAR_12
def FUNC_9(VAR_11, VAR_12):
VAR_22 = {
"org_id": VAR_12.org_id,
"user_id": VAR_12.id,
"action": "login",
"object_type": "redash",
"timestamp": int(time.time()),
"user_agent": VAR_8.user_agent.string,
"ip": VAR_8.remote_addr,
}
record_event.delay(VAR_22)
@VAR_0.unauthorized_handler
def FUNC_10():
if VAR_8.is_xhr or "/api/" in VAR_8.path:
VAR_30 = jsonify(
{"message": "Couldn't find resource. Please login and try again."}
)
VAR_30.status_code = 404
return VAR_30
VAR_23 = FUNC_0(VAR_3=VAR_8.url, VAR_2=False)
return redirect(VAR_23)
def FUNC_11():
logout_user()
if settings.MULTI_ORG and current_org == None:
VAR_31 = "/"
elif settings.MULTI_ORG:
VAR_31 = url_for("redash.index", org_slug=current_org.slug, _external=False)
else:
VAR_31 = url_for("redash.index", _external=False)
return redirect(VAR_31)
def FUNC_12(VAR_11):
from redash.authentication import (
google_oauth,
saml_auth,
remote_user_auth,
ldap_auth,
)
VAR_0.init_app(VAR_11)
VAR_0.anonymous_user = models.AnonymousUser
VAR_0.REMEMBER_COOKIE_DURATION = settings.REMEMBER_COOKIE_DURATION
@VAR_11.before_request
def FUNC_15():
session.permanent = True
VAR_11.permanent_session_lifetime = timedelta(seconds=settings.SESSION_EXPIRY_TIME)
from redash.security import csrf
for auth in [google_oauth, saml_auth, remote_user_auth, ldap_auth]:
VAR_32 = auth.blueprint
csrf.exempt(VAR_32)
VAR_11.register_blueprint(VAR_32)
user_logged_in.connect(FUNC_9)
VAR_0.request_loader(FUNC_3)
def FUNC_13(VAR_13, VAR_14, VAR_15, VAR_16=None):
try:
VAR_33 = models.User.get_by_email_and_org(VAR_15, VAR_13)
if VAR_33.is_disabled:
return None
if VAR_33.is_invitation_pending:
VAR_33.is_invitation_pending = False
models.db.session.commit()
if VAR_33.name != VAR_14:
VAR_1.debug("Updating VAR_12 VAR_14 (%r -> %r)", VAR_33.name, VAR_14)
VAR_33.name = VAR_14
models.db.session.commit()
except NoResultFound:
VAR_1.debug("Creating VAR_12 object (%r)", VAR_14)
VAR_33 = models.User(
VAR_13=org,
VAR_14=name,
VAR_15=email,
is_invitation_pending=False,
_profile_image_url=VAR_16,
group_ids=[VAR_13.default_group.id],
)
models.db.session.add(VAR_33)
models.db.session.commit()
login_user(VAR_33, remember=True)
return VAR_33
def FUNC_14(VAR_17):
if not VAR_17:
return ""
VAR_24 = list(urlsplit(VAR_17))
VAR_24[0] = "" # clear scheme
VAR_24[1] = "" # clear netloc
VAR_25 = urlunsplit(VAR_24)
if not VAR_25:
safe_next_path = "./"
return VAR_25
| import .hashlib
import .hmac
import logging
import time
from datetime import timedelta
from urllib.parse import urlsplit, urlunsplit
from flask import jsonify, redirect, VAR_8, url_for, session
from flask_login import LoginManager, login_user, logout_user, user_logged_in
from redash import models, settings
from redash.authentication import jwt_auth
from redash.authentication.org_resolving import current_org
from redash.settings.organization import settings as org_settings
from redash.tasks import record_event
from sqlalchemy.orm.exc import NoResultFound
from werkzeug.exceptions import Unauthorized
VAR_0 = LoginManager()
VAR_1 = logging.getLogger("authentication")
def FUNC_0(VAR_2=False, VAR_3="/"):
if settings.MULTI_ORG and current_org == None:
VAR_23 = "/"
elif settings.MULTI_ORG:
VAR_23 = url_for(
"redash.login", org_slug=current_org.slug, VAR_3=next, _external=VAR_2
)
else:
VAR_23 = url_for("redash.login", VAR_3=next, _external=VAR_2)
return VAR_23
def FUNC_1(VAR_4, VAR_5, VAR_6):
if not VAR_4:
return None
VAR_18 = hmac.new(VAR_4.encode(), msg=VAR_5.encode(), digestmod=hashlib.sha1)
VAR_18.update(str(VAR_6).encode())
return VAR_18.hexdigest()
@VAR_0.user_loader
def FUNC_2(VAR_7):
VAR_12 = FUNC_7(VAR_8)
if VAR_12:
return VAR_12
VAR_13 = current_org._get_current_object()
try:
VAR_20, VAR_26 = VAR_7.split("-")
VAR_12 = models.User.get_by_id_and_org(VAR_20, VAR_13)
if VAR_12.is_disabled or VAR_12.get_id() != VAR_7:
return None
return VAR_12
except (models.NoResultFound, ValueError, AttributeError):
return None
def FUNC_3(VAR_8):
VAR_12 = None
if settings.AUTH_TYPE == "hmac":
VAR_12 = FUNC_4(VAR_8)
elif settings.AUTH_TYPE == "api_key":
VAR_12 = FUNC_7(VAR_8)
else:
VAR_1.warning(
"Unknown authentication type ({}). Using default (HMAC).".format(
settings.AUTH_TYPE
)
)
VAR_12 = FUNC_4(VAR_8)
if org_settings["auth_jwt_login_enabled"] and VAR_12 is None:
VAR_12 = FUNC_8(VAR_8)
return VAR_12
def FUNC_4(VAR_8):
VAR_19 = VAR_8.args.get("signature")
VAR_6 = float(VAR_8.args.get("expires") or 0)
VAR_10 = VAR_8.view_args.get("query_id", None)
VAR_20 = VAR_8.args.get("user_id", None)
if VAR_19 and time.time() < VAR_6 <= time.time() + 3600:
if VAR_20:
VAR_12 = models.User.query.get(VAR_20)
VAR_33 = FUNC_1(VAR_12.api_key, VAR_8.path, VAR_6)
if VAR_12.api_key and VAR_19 == VAR_33:
return VAR_12
if VAR_10:
VAR_34 = models.Query.query.filter(models.Query.id == VAR_10).one()
VAR_33 = FUNC_1(VAR_34.api_key, VAR_8.path, VAR_6)
if VAR_34.api_key and VAR_19 == VAR_33:
return models.ApiUser(
VAR_34.api_key,
VAR_34.org,
list(VAR_34.groups.keys()),
VAR_14="ApiKey: Query {}".format(VAR_34.id),
)
return None
def FUNC_5(VAR_9, VAR_10):
if not VAR_9:
return None
VAR_12 = None
VAR_13 = current_org._get_current_object()
try:
VAR_12 = models.User.get_by_api_key_and_org(VAR_9, VAR_13)
if VAR_12.is_disabled:
VAR_12 = None
except models.NoResultFound:
try:
VAR_9 = models.ApiKey.get_by_api_key(VAR_9)
VAR_12 = models.ApiUser(VAR_9, api_key.org, [])
except models.NoResultFound:
if VAR_10:
VAR_34 = models.Query.get_by_id_and_org(VAR_10, VAR_13)
if VAR_34 and VAR_34.api_key == VAR_9:
VAR_12 = models.ApiUser(
VAR_9,
VAR_34.org,
list(VAR_34.groups.keys()),
VAR_14="ApiKey: Query {}".format(VAR_34.id),
)
return VAR_12
def FUNC_6(VAR_8):
VAR_9 = VAR_8.args.get("api_key", None)
if VAR_9 is not None:
return VAR_9
if VAR_8.headers.get("Authorization"):
VAR_27 = VAR_8.headers.get("Authorization")
VAR_9 = VAR_27.replace("Key ", "", 1)
elif VAR_8.view_args is not None and VAR_8.view_args.get("token"):
VAR_9 = VAR_8.view_args["token"]
return VAR_9
def FUNC_7(VAR_8):
VAR_9 = FUNC_6(VAR_8)
if VAR_8.view_args is not None:
VAR_10 = VAR_8.view_args.get("query_id", None)
VAR_12 = FUNC_5(VAR_9, VAR_10)
else:
VAR_12 = None
return VAR_12
def FUNC_8(VAR_8):
VAR_13 = current_org._get_current_object()
VAR_21 = None
if org_settings["auth_jwt_auth_cookie_name"]:
VAR_28 = VAR_8.cookies.get(org_settings["auth_jwt_auth_cookie_name"], None)
elif org_settings["auth_jwt_auth_header_name"]:
VAR_28 = VAR_8.headers.get(org_settings["auth_jwt_auth_header_name"], None)
else:
return None
if VAR_28:
VAR_21, VAR_29 = jwt_auth.verify_jwt_token(
VAR_28,
expected_issuer=org_settings["auth_jwt_auth_issuer"],
expected_audience=org_settings["auth_jwt_auth_audience"],
algorithms=org_settings["auth_jwt_auth_algorithms"],
public_certs_url=org_settings["auth_jwt_auth_public_certs_url"],
)
if not VAR_29:
raise Unauthorized("Invalid JWT token")
if not VAR_21:
return
try:
VAR_12 = models.User.get_by_email_and_org(VAR_21["email"], VAR_13)
except models.NoResultFound:
VAR_12 = FUNC_13(current_org, VAR_21["email"], VAR_21["email"])
return VAR_12
def FUNC_9(VAR_11, VAR_12):
VAR_22 = {
"org_id": VAR_12.org_id,
"user_id": VAR_12.id,
"action": "login",
"object_type": "redash",
"timestamp": int(time.time()),
"user_agent": VAR_8.user_agent.string,
"ip": VAR_8.remote_addr,
}
record_event.delay(VAR_22)
@VAR_0.unauthorized_handler
def FUNC_10():
if VAR_8.is_xhr or "/api/" in VAR_8.path:
VAR_30 = jsonify(
{"message": "Couldn't find resource. Please login and try again."}
)
VAR_30.status_code = 404
return VAR_30
VAR_23 = FUNC_0(VAR_3=VAR_8.url, VAR_2=False)
return redirect(VAR_23)
def FUNC_11():
logout_user()
if settings.MULTI_ORG and current_org == None:
VAR_31 = "/"
elif settings.MULTI_ORG:
VAR_31 = url_for("redash.index", org_slug=current_org.slug, _external=False)
else:
VAR_31 = url_for("redash.index", _external=False)
return redirect(VAR_31)
def FUNC_12(VAR_11):
from redash.authentication import (
saml_auth,
remote_user_auth,
ldap_auth,
)
from redash.authentication.google_oauth import create_google_oauth_blueprint
VAR_0.init_app(VAR_11)
VAR_0.anonymous_user = models.AnonymousUser
VAR_0.REMEMBER_COOKIE_DURATION = settings.REMEMBER_COOKIE_DURATION
@VAR_11.before_request
def FUNC_15():
session.permanent = True
VAR_11.permanent_session_lifetime = timedelta(seconds=settings.SESSION_EXPIRY_TIME)
from redash.security import csrf
for blueprint in [create_google_oauth_blueprint(VAR_11), saml_auth.blueprint, remote_user_auth.blueprint, ldap_auth.blueprint, ]:
csrf.exempt(blueprint)
VAR_11.register_blueprint(blueprint)
user_logged_in.connect(FUNC_9)
VAR_0.request_loader(FUNC_3)
def FUNC_13(VAR_13, VAR_14, VAR_15, VAR_16=None):
try:
VAR_32 = models.User.get_by_email_and_org(VAR_15, VAR_13)
if VAR_32.is_disabled:
return None
if VAR_32.is_invitation_pending:
VAR_32.is_invitation_pending = False
models.db.session.commit()
if VAR_32.name != VAR_14:
VAR_1.debug("Updating VAR_12 VAR_14 (%r -> %r)", VAR_32.name, VAR_14)
VAR_32.name = VAR_14
models.db.session.commit()
except NoResultFound:
VAR_1.debug("Creating VAR_12 object (%r)", VAR_14)
VAR_32 = models.User(
VAR_13=org,
VAR_14=name,
VAR_15=email,
is_invitation_pending=False,
_profile_image_url=VAR_16,
group_ids=[VAR_13.default_group.id],
)
models.db.session.add(VAR_32)
models.db.session.commit()
login_user(VAR_32, remember=True)
return VAR_32
def FUNC_14(VAR_17):
if not VAR_17:
return ""
VAR_24 = list(urlsplit(VAR_17))
VAR_24[0] = "" # clear scheme
VAR_24[1] = "" # clear netloc
VAR_25 = urlunsplit(VAR_24)
if not VAR_25:
safe_next_path = "./"
return VAR_25
| [
7,
17,
20,
21,
31,
33,
34,
38,
41,
43,
44,
50,
52,
58,
62,
63,
77,
81,
82,
88,
89,
94,
97,
101,
109,
111,
112,
116,
118,
119,
139,
141,
142,
145,
148,
154,
156,
157,
165,
167,
168,
171,
173,
180,
191,
194,
199,
201,
202,
213,
215,
216,
225,
227,
229,
230,
233,
240,
242,
243,
251,
255,
260,
266,
269,
270,
295,
297,
299,
300,
304,
305,
310,
311,
312,
313,
316,
318
] | [
7,
17,
20,
21,
31,
33,
34,
38,
41,
43,
44,
50,
52,
58,
62,
63,
77,
81,
82,
88,
89,
94,
97,
101,
109,
111,
112,
116,
118,
119,
139,
141,
142,
145,
148,
154,
156,
157,
165,
167,
168,
171,
173,
180,
191,
194,
199,
201,
202,
213,
215,
216,
225,
227,
229,
230,
233,
240,
242,
243,
250,
252,
256,
261,
263,
264,
268,
271,
272,
297,
299,
301,
302,
306,
307,
312,
313,
314,
315,
318,
320
] |
1CWE-79
| import logging
from django.template.response import TemplateResponse
from django.utils.safestring import mark_safe
import orjson
from bs4 import BeautifulSoup
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
logger = logging.getLogger(__name__)
class UnsortedAttributes(HTMLFormatter):
"""
Prevent beautifulsoup from re-ordering attributes.
"""
def attributes(self, tag: Tag):
for k, v in tag.attrs.items():
yield k, v
class UnicornTemplateResponse(TemplateResponse):
def __init__(
self,
template,
request,
context=None,
content_type=None,
status=None,
charset=None,
using=None,
component=None,
init_js=False,
**kwargs,
):
super().__init__(
template=template,
request=request,
context=context,
content_type=content_type,
status=status,
charset=charset,
using=using,
)
self.component = component
self.init_js = init_js
@timed
def render(self):
response = super().render()
if not self.component or not self.component.component_id:
return response
content = response.content.decode("utf-8")
frontend_context_variables = self.component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
checksum = generate_checksum(orjson.dumps(frontend_context_variables_dict))
soup = BeautifulSoup(content, features="html.parser")
root_element = get_root_element(soup)
root_element["unicorn:id"] = self.component.component_id
root_element["unicorn:name"] = self.component.component_name
root_element["unicorn:key"] = self.component.component_key
root_element["unicorn:checksum"] = checksum
# Generate the hash based on the rendered content (without script tag)
hash = generate_checksum(UnicornTemplateResponse._desoupify(soup))
if self.init_js:
init = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(frontend_context_variables),
"calls": self.component.calls,
"hash": hash,
}
init = orjson.dumps(init).decode("utf-8")
json_element_id = f"unicorn:data:{self.component.component_id}"
init_script = f"Unicorn.componentInit(JSON.parse(document.getElementById('{json_element_id}').textContent));"
json_tag = soup.new_tag("script")
json_tag["type"] = "application/json"
json_tag["id"] = json_element_id
json_tag.string = sanitize_html(init)
if self.component.parent:
self.component._init_script = init_script
self.component._json_tag = json_tag
else:
json_tags = []
json_tags.append(json_tag)
for child in self.component.children:
init_script = f"{init_script} {child._init_script}"
json_tags.append(child._json_tag)
script_tag = soup.new_tag("script")
script_tag["type"] = "module"
script_tag.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {init_script} }}"
root_element.insert_after(script_tag)
for t in json_tags:
root_element.insert_after(t)
rendered_template = UnicornTemplateResponse._desoupify(soup)
rendered_template = mark_safe(rendered_template)
self.component.rendered(rendered_template)
response.content = rendered_template
return response
@staticmethod
def _desoupify(soup):
soup.smooth()
return soup.encode(formatter=UnsortedAttributes()).decode("utf-8")
def get_root_element(soup: BeautifulSoup) -> Tag:
"""
Gets the first tag element.
Returns:
BeautifulSoup tag element.
Raises an Exception if an element cannot be found.
"""
for element in soup.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| import logging
from django.template.response import TemplateResponse
import orjson
from bs4 import BeautifulSoup
from bs4.dammit import EntitySubstitution
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
logger = logging.getLogger(__name__)
class UnsortedAttributes(HTMLFormatter):
"""
Prevent beautifulsoup from re-ordering attributes.
"""
def __init__(self):
super().__init__(entity_substitution=EntitySubstitution.substitute_html)
def attributes(self, tag: Tag):
for k, v in tag.attrs.items():
yield k, v
class UnicornTemplateResponse(TemplateResponse):
def __init__(
self,
template,
request,
context=None,
content_type=None,
status=None,
charset=None,
using=None,
component=None,
init_js=False,
**kwargs,
):
super().__init__(
template=template,
request=request,
context=context,
content_type=content_type,
status=status,
charset=charset,
using=using,
)
self.component = component
self.init_js = init_js
@timed
def render(self):
response = super().render()
if not self.component or not self.component.component_id:
return response
content = response.content.decode("utf-8")
frontend_context_variables = self.component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
checksum = generate_checksum(orjson.dumps(frontend_context_variables_dict))
soup = BeautifulSoup(content, features="html.parser")
root_element = get_root_element(soup)
root_element["unicorn:id"] = self.component.component_id
root_element["unicorn:name"] = self.component.component_name
root_element["unicorn:key"] = self.component.component_key
root_element["unicorn:checksum"] = checksum
# Generate the hash based on the rendered content (without script tag)
hash = generate_checksum(UnicornTemplateResponse._desoupify(soup))
if self.init_js:
init = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(frontend_context_variables),
"calls": self.component.calls,
"hash": hash,
}
init = orjson.dumps(init).decode("utf-8")
json_element_id = f"unicorn:data:{self.component.component_id}"
init_script = f"Unicorn.componentInit(JSON.parse(document.getElementById('{json_element_id}').textContent));"
json_tag = soup.new_tag("script")
json_tag["type"] = "application/json"
json_tag["id"] = json_element_id
json_tag.string = sanitize_html(init)
if self.component.parent:
self.component._init_script = init_script
self.component._json_tag = json_tag
else:
json_tags = []
json_tags.append(json_tag)
for child in self.component.children:
init_script = f"{init_script} {child._init_script}"
json_tags.append(child._json_tag)
script_tag = soup.new_tag("script")
script_tag["type"] = "module"
script_tag.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {init_script} }}"
root_element.insert_after(script_tag)
for t in json_tags:
root_element.insert_after(t)
rendered_template = UnicornTemplateResponse._desoupify(soup)
self.component.rendered(rendered_template)
response.content = rendered_template
return response
@staticmethod
def _desoupify(soup):
soup.smooth()
return soup.encode(formatter=UnsortedAttributes()).decode("utf-8")
def get_root_element(soup: BeautifulSoup) -> Tag:
"""
Gets the first tag element.
Returns:
BeautifulSoup tag element.
Raises an Exception if an element cannot be found.
"""
for element in soup.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| xss | {
"code": [
"from django.utils.safestring import mark_safe",
" rendered_template = mark_safe(rendered_template)"
],
"line_no": [
4,
118
]
} | {
"code": [
"from bs4.dammit import EntitySubstitution",
" def __init__(self):",
" super().__init__(entity_substitution=EntitySubstitution.substitute_html)"
],
"line_no": [
7,
25,
26
]
} | import logging
from django.template.response import TemplateResponse
from django.utils.safestring import mark_safe
import orjson
from bs4 import BeautifulSoup
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
VAR_0 = logging.getLogger(__name__)
class CLASS_0(HTMLFormatter):
def FUNC_1(self, VAR_2: Tag):
for k, v in VAR_2.attrs.items():
yield k, v
class CLASS_1(TemplateResponse):
def __init__(
self,
VAR_3,
VAR_4,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8=None,
VAR_9=None,
VAR_10=None,
VAR_11=False,
**VAR_12,
):
super().__init__(
VAR_3=template,
VAR_4=request,
VAR_5=context,
VAR_6=content_type,
VAR_7=status,
VAR_8=charset,
VAR_9=using,
)
self.component = VAR_10
self.init_js = VAR_11
@timed
def FUNC_2(self):
VAR_13 = super().render()
if not self.component or not self.component.component_id:
return VAR_13
VAR_14 = VAR_13.content.decode("utf-8")
VAR_15 = self.component.get_frontend_context_variables()
VAR_16 = orjson.loads(VAR_15)
VAR_17 = generate_checksum(orjson.dumps(VAR_16))
VAR_1 = BeautifulSoup(VAR_14, features="html.parser")
VAR_18 = FUNC_0(VAR_1)
VAR_18["unicorn:id"] = self.component.component_id
VAR_18["unicorn:name"] = self.component.component_name
VAR_18["unicorn:key"] = self.component.component_key
VAR_18["unicorn:checksum"] = VAR_17
VAR_19 = generate_checksum(CLASS_1._desoupify(VAR_1))
if self.init_js:
VAR_21 = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(VAR_15),
"calls": self.component.calls,
"hash": VAR_19,
}
VAR_21 = orjson.dumps(VAR_21).decode("utf-8")
VAR_22 = f"unicorn:data:{self.component.component_id}"
VAR_23 = f"Unicorn.componentInit(JSON.parse(document.getElementById('{VAR_22}').textContent));"
VAR_24 = VAR_1.new_tag("script")
VAR_24["type"] = "application/json"
VAR_24["id"] = VAR_22
VAR_24.string = sanitize_html(VAR_21)
if self.component.parent:
self.component._init_script = VAR_23
self.component._json_tag = VAR_24
else:
VAR_25 = []
json_tags.append(VAR_24)
for child in self.component.children:
VAR_23 = f"{VAR_23} {child._init_script}"
VAR_25.append(child._json_tag)
VAR_26 = VAR_1.new_tag("script")
VAR_26["type"] = "module"
VAR_26.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {VAR_23} }}"
VAR_18.insert_after(VAR_26)
for t in VAR_25:
VAR_18.insert_after(t)
VAR_20 = CLASS_1._desoupify(VAR_1)
VAR_20 = mark_safe(VAR_20)
self.component.rendered(VAR_20)
VAR_13.content = VAR_20
return VAR_13
@staticmethod
def FUNC_3(VAR_1):
VAR_1.smooth()
return VAR_1.encode(formatter=CLASS_0()).decode("utf-8")
def FUNC_0(VAR_1: BeautifulSoup) -> Tag:
for element in VAR_1.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| import logging
from django.template.response import TemplateResponse
import orjson
from bs4 import BeautifulSoup
from bs4.dammit import EntitySubstitution
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
VAR_0 = logging.getLogger(__name__)
class CLASS_0(HTMLFormatter):
def __init__(self):
super().__init__(entity_substitution=EntitySubstitution.substitute_html)
def FUNC_1(self, VAR_2: Tag):
for k, v in VAR_2.attrs.items():
yield k, v
class CLASS_1(TemplateResponse):
def __init__(
self,
VAR_3,
VAR_4,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8=None,
VAR_9=None,
VAR_10=None,
VAR_11=False,
**VAR_12,
):
super().__init__(
VAR_3=template,
VAR_4=request,
VAR_5=context,
VAR_6=content_type,
VAR_7=status,
VAR_8=charset,
VAR_9=using,
)
self.component = VAR_10
self.init_js = VAR_11
@timed
def FUNC_2(self):
VAR_13 = super().render()
if not self.component or not self.component.component_id:
return VAR_13
VAR_14 = VAR_13.content.decode("utf-8")
VAR_15 = self.component.get_frontend_context_variables()
VAR_16 = orjson.loads(VAR_15)
VAR_17 = generate_checksum(orjson.dumps(VAR_16))
VAR_1 = BeautifulSoup(VAR_14, features="html.parser")
VAR_18 = FUNC_0(VAR_1)
VAR_18["unicorn:id"] = self.component.component_id
VAR_18["unicorn:name"] = self.component.component_name
VAR_18["unicorn:key"] = self.component.component_key
VAR_18["unicorn:checksum"] = VAR_17
VAR_19 = generate_checksum(CLASS_1._desoupify(VAR_1))
if self.init_js:
VAR_21 = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(VAR_15),
"calls": self.component.calls,
"hash": VAR_19,
}
VAR_21 = orjson.dumps(VAR_21).decode("utf-8")
VAR_22 = f"unicorn:data:{self.component.component_id}"
VAR_23 = f"Unicorn.componentInit(JSON.parse(document.getElementById('{VAR_22}').textContent));"
VAR_24 = VAR_1.new_tag("script")
VAR_24["type"] = "application/json"
VAR_24["id"] = VAR_22
VAR_24.string = sanitize_html(VAR_21)
if self.component.parent:
self.component._init_script = VAR_23
self.component._json_tag = VAR_24
else:
VAR_25 = []
json_tags.append(VAR_24)
for child in self.component.children:
VAR_23 = f"{VAR_23} {child._init_script}"
VAR_25.append(child._json_tag)
VAR_26 = VAR_1.new_tag("script")
VAR_26["type"] = "module"
VAR_26.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {VAR_23} }}"
VAR_18.insert_after(VAR_26)
for t in VAR_25:
VAR_18.insert_after(t)
VAR_20 = CLASS_1._desoupify(VAR_1)
self.component.rendered(VAR_20)
VAR_13.content = VAR_20
return VAR_13
@staticmethod
def FUNC_3(VAR_1):
VAR_1.smooth()
return VAR_1.encode(formatter=CLASS_0()).decode("utf-8")
def FUNC_0(VAR_1: BeautifulSoup) -> Tag:
for element in VAR_1.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| [
2,
5,
10,
12,
15,
16,
18,
19,
24,
28,
29,
53,
56,
60,
63,
65,
69,
76,
77,
79,
92,
97,
104,
108,
113,
116,
120,
122,
124,
129,
130,
134,
137,
143,
145,
21,
22,
23,
132,
133,
134,
135,
136,
137,
138,
139
] | [
2,
4,
10,
12,
15,
16,
18,
19,
24,
27,
31,
32,
56,
59,
63,
66,
68,
72,
79,
80,
82,
95,
100,
107,
111,
116,
119,
122,
124,
126,
131,
132,
136,
139,
145,
147,
21,
22,
23,
134,
135,
136,
137,
138,
139,
140,
141
] |
0CWE-22
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class Spec:
def __init__(
self,
inputFilename,
debug=False,
token=None,
lineNumbers=False,
fileRequester=None,
testing=False,
):
self.valid = False
self.lineNumbers = lineNumbers
if lineNumbers:
# line-numbers are too hacky, so force this to be a dry run
constants.dryRun = True
if inputFilename is None:
inputFilename = findImplicitInputFile()
if inputFilename is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(inputFilename)
self.transitiveDependencies = set()
self.debug = debug
self.token = token
self.testing = testing
if fileRequester is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = fileRequester
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def initializeState(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(fileRequester=self.dataFile, testing=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = styleColors
self.extraStyles["style-darkmode"] = styleDarkMode
self.extraStyles["style-md-lists"] = styleMdLists
self.extraStyles["style-autolinks"] = styleAutolinks
self.extraStyles["style-selflinks"] = styleSelflinks
self.extraStyles["style-counters"] = styleCounters
self.extraScripts = defaultdict(str)
try:
inputContent = self.inputSource.read()
self.lines = inputContent.lines
if inputContent.date is not None:
self.mdBaseline.addParsedData("Date", inputContent.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def recordDependencies(self, *inputSources):
self.transitiveDependencies.update(inputSources)
def preprocess(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def assembleDocument(self):
# Textual hacks
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
# Extract and process metadata
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
# First load the metadata sources from 'local' data
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
# Using that to determine the Group and Status, load the correct defaults.include boilerplate
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
# Using all of that, load up the text macros so I can sub them into the computed-metadata file.
self.md.fillTextMacros(self.macros, doc=self)
jsonEscapedMacros = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
computedMdText = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
macros=jsonEscapedMacros,
)
self.mdOverridingDefaults = metadata.fromJson(
data=computedMdText, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
# Finally, compute the "implicit" things.
self.md.computeImplicitMetadata(doc=self)
# And compute macros again, in case the preceding steps changed them.
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
# Initialize things
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
# Deal with further <pre> blocks, and markdown
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
# Note that, currently, markdown.parse returns an array of strings, not of Line objects.
self.refs.setSpecData(self.md)
# Convert to a single string of html now, for convenience.
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
# Build the document
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def processDocument(self):
# Fill in and clean up a bunch of data
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
# Handle all the links
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
# Add MDN panels after all IDs/anchors have been added
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
# Any final HTML cleanups
cleanupHTML(self)
if self.md.prepTR:
# Don't try and override the W3C's icon.
for el in findAll("[rel ~= 'icon']", self):
removeNode(el)
# Make sure the W3C stylesheet is after all other styles.
for el in findAll("link", self):
if el.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), el)
# Ensure that all W3C links are https.
for el in findAll("a", self):
href = el.get("href", "")
if href.startswith("http://www.w3.org") or href.startswith(
"http://lists.w3.org"
):
el.set("href", "https" + href[4:])
text = el.text or ""
if text.startswith("http://www.w3.org") or text.startswith(
"http://lists.w3.org"
):
el.text = "https" + text[4:]
# Loaded from .include files
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def serialize(self):
try:
rendered = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
rendered = finalHackyCleanup(rendered)
return rendered
def fixMissingOutputFilename(self, outputFilename):
if outputFilename is None:
# More sensible defaults!
if not isinstance(self.inputSource, FileInputSource):
outputFilename = "-"
elif self.inputSource.sourceName.endswith(".bs"):
outputFilename = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
outputFilename = self.inputSource.sourceName[0:-9] + ".html"
else:
outputFilename = "-"
return outputFilename
def finish(self, outputFilename=None, newline=None):
self.printResultMessage()
outputFilename = self.fixMissingOutputFilename(outputFilename)
rendered = self.serialize()
if not constants.dryRun:
try:
if outputFilename == "-":
sys.stdout.write(rendered)
else:
with open(
outputFilename, "w", encoding="utf-8", newline=newline
) as f:
f.write(rendered)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
outputFilename,
e,
)
def printResultMessage(self):
# If I reach this point, I've succeeded, but maybe with reservations.
fatals = messageCounts["fatal"]
links = messageCounts["linkerror"]
warnings = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if fatals:
success("Successfully generated, but fatal errors were suppressed")
return
if links:
success("Successfully generated, with {0} linking errors", links)
return
if warnings:
success("Successfully generated, with warnings")
return
def watch(self, outputFilename, port=None, localhost=False):
import time
outputFilename = self.fixMissingOutputFilename(outputFilename)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if outputFilename == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if port:
# Serve the folder on an HTTP server
import http.server
import socketserver
import threading
class SilentServer(http.server.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
pass
socketserver.TCPServer.allow_reuse_address = True
server = socketserver.TCPServer(
("localhost" if localhost else "", port), SilentServer
)
print(f"Serving at port {port}")
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
else:
server = None
mdCommandLine = self.mdCommandLine
try:
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
# Comparing mtimes with "!=" handles when a file starts or
# stops existing, and it's fine to rebuild if an mtime
# somehow gets older.
if any(
input.mtime() != lastModified
for input, lastModified in lastInputModified.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = mdCommandLine
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if server:
server.shutdown()
thread.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def fixText(self, text, moreMacros={}):
# Do several textual replacements that need to happen *before* the document is parsed as h.
# If markdown shorthands are on, remove all `foo`s while processing,
# so their contents don't accidentally trigger other stuff.
# Also handle markdown escapes.
if "markdown" in self.md.markupShorthands:
textFunctor = MarkdownCodeSpans(text)
else:
textFunctor = Functor(text)
macros = dict(self.macros, **moreMacros)
textFunctor = textFunctor.map(curry(replaceMacros, macros=macros))
textFunctor = textFunctor.map(fixTypography)
if "css" in self.md.markupShorthands:
textFunctor = textFunctor.map(replaceAwkwardCSSShorthands)
return textFunctor.extract()
def printTargets(self):
p("Exported terms:")
for el in findAll("[data-export]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
p("Unexported terms:")
for el in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
def isOpaqueElement(self, el):
if el.tag in self.md.opaqueElements:
return True
if el.get("data-opaque") is not None:
return True
return False
def findImplicitInputFile():
"""
Find what input file the user *probably* wants to use,
by scanning the current folder.
In preference order:
1. index.bs
2. Overview.bs
3. the first file with a .bs extension
4. the first file with a .src.html extension
"""
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
allBs = glob.glob("*.bs")
if allBs:
return allBs[0]
allHtml = glob.glob("*.src.html")
if allHtml:
return allHtml[0]
return None
constants.specClass = Spec
styleColors = """
/* Any --*-text not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--text: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-text: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #3980b5;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #005a9c;
--hr-text: var(--text);
--algo-border: #def;
--del-text: red;
--del-bg: transparent;
--ins-text: #080;
--ins-bg: transparent;
--a-normal-text: #034575;
--a-normal-underline: #bbb;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-text: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-text: var(--text);
--issueheading-text: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-text: var(--text);
--exampleheading-text: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-text: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-text: var(--text);
--advisementheading-text: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-text: var(--text);
--amendmentheading-text: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-text: var(--text);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-text: #707070;
--indextable-hover-text: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
styleDarkMode = """
@media (prefers-color-scheme: dark) {
:root {
--text: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/text%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/text%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #080808;
--tocnav-active-text: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #6af;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #8af;
--hr-text: var(--text);
--algo-border: #456;
--del-text: #f44;
--del-bg: transparent;
--ins-text: #4a4;
--ins-bg: transparent;
--a-normal-text: #6af;
--a-normal-underline: #555;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-text: #f44;
--a-active-underline: var(--a-active-text);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-text: var(--text);
--issueheading-text: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-text: var(--text);
--exampleheading-text: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-text: var(--text);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-text: var(--text);
--advisementheading-text: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-text: var(--text);
--amendmentheading-text: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-text: var(--text);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-text: #aaa;
--indextable-hover-text: var(--text);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
styleMdLists = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
styleAutolinks = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-text);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
styleSelflinks = """
:root {
--selflink-text: white;
--selflink-bg: gray;
--selflink-hover-text: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
text-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-text);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-text);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
styleDarkMode += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-text: black;
--selflink-bg: silver;
--selflink-hover-text: white;
}
}
"""
styleCounters = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class Spec:
def __init__(
self,
inputFilename,
debug=False,
token=None,
lineNumbers=False,
fileRequester=None,
testing=False,
):
self.valid = False
self.lineNumbers = lineNumbers
if lineNumbers:
# line-numbers are too hacky, so force this to be a dry run
constants.dryRun = True
if inputFilename is None:
inputFilename = findImplicitInputFile()
if inputFilename is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(inputFilename, chroot=constants.chroot)
self.transitiveDependencies = set()
self.debug = debug
self.token = token
self.testing = testing
if fileRequester is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = fileRequester
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def initializeState(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(fileRequester=self.dataFile, testing=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = styleColors
self.extraStyles["style-darkmode"] = styleDarkMode
self.extraStyles["style-md-lists"] = styleMdLists
self.extraStyles["style-autolinks"] = styleAutolinks
self.extraStyles["style-selflinks"] = styleSelflinks
self.extraStyles["style-counters"] = styleCounters
self.extraScripts = defaultdict(str)
try:
inputContent = self.inputSource.read()
self.lines = inputContent.lines
if inputContent.date is not None:
self.mdBaseline.addParsedData("Date", inputContent.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def recordDependencies(self, *inputSources):
self.transitiveDependencies.update(inputSources)
def preprocess(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def assembleDocument(self):
# Textual hacks
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
# Extract and process metadata
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
# First load the metadata sources from 'local' data
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
# Using that to determine the Group and Status, load the correct defaults.include boilerplate
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
# Using all of that, load up the text macros so I can sub them into the computed-metadata file.
self.md.fillTextMacros(self.macros, doc=self)
jsonEscapedMacros = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
computedMdText = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
macros=jsonEscapedMacros,
)
self.mdOverridingDefaults = metadata.fromJson(
data=computedMdText, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
# Finally, compute the "implicit" things.
self.md.computeImplicitMetadata(doc=self)
# And compute macros again, in case the preceding steps changed them.
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
# Initialize things
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
# Deal with further <pre> blocks, and markdown
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
# Note that, currently, markdown.parse returns an array of strings, not of Line objects.
self.refs.setSpecData(self.md)
# Convert to a single string of html now, for convenience.
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
# Build the document
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def processDocument(self):
# Fill in and clean up a bunch of data
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
# Handle all the links
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
# Add MDN panels after all IDs/anchors have been added
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
# Any final HTML cleanups
cleanupHTML(self)
if self.md.prepTR:
# Don't try and override the W3C's icon.
for el in findAll("[rel ~= 'icon']", self):
removeNode(el)
# Make sure the W3C stylesheet is after all other styles.
for el in findAll("link", self):
if el.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), el)
# Ensure that all W3C links are https.
for el in findAll("a", self):
href = el.get("href", "")
if href.startswith("http://www.w3.org") or href.startswith(
"http://lists.w3.org"
):
el.set("href", "https" + href[4:])
text = el.text or ""
if text.startswith("http://www.w3.org") or text.startswith(
"http://lists.w3.org"
):
el.text = "https" + text[4:]
# Loaded from .include files
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def serialize(self):
try:
rendered = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
rendered = finalHackyCleanup(rendered)
return rendered
def fixMissingOutputFilename(self, outputFilename):
if outputFilename is None:
# More sensible defaults!
if not isinstance(self.inputSource, FileInputSource):
outputFilename = "-"
elif self.inputSource.sourceName.endswith(".bs"):
outputFilename = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
outputFilename = self.inputSource.sourceName[0:-9] + ".html"
else:
outputFilename = "-"
return outputFilename
def finish(self, outputFilename=None, newline=None):
self.printResultMessage()
outputFilename = self.fixMissingOutputFilename(outputFilename)
rendered = self.serialize()
if not constants.dryRun:
try:
if outputFilename == "-":
sys.stdout.write(rendered)
else:
with open(
outputFilename, "w", encoding="utf-8", newline=newline
) as f:
f.write(rendered)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
outputFilename,
e,
)
def printResultMessage(self):
# If I reach this point, I've succeeded, but maybe with reservations.
fatals = messageCounts["fatal"]
links = messageCounts["linkerror"]
warnings = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if fatals:
success("Successfully generated, but fatal errors were suppressed")
return
if links:
success("Successfully generated, with {0} linking errors", links)
return
if warnings:
success("Successfully generated, with warnings")
return
def watch(self, outputFilename, port=None, localhost=False):
import time
outputFilename = self.fixMissingOutputFilename(outputFilename)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if outputFilename == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if port:
# Serve the folder on an HTTP server
import http.server
import socketserver
import threading
class SilentServer(http.server.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
pass
socketserver.TCPServer.allow_reuse_address = True
server = socketserver.TCPServer(
("localhost" if localhost else "", port), SilentServer
)
print(f"Serving at port {port}")
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
else:
server = None
mdCommandLine = self.mdCommandLine
try:
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
# Comparing mtimes with "!=" handles when a file starts or
# stops existing, and it's fine to rebuild if an mtime
# somehow gets older.
if any(
input.mtime() != lastModified
for input, lastModified in lastInputModified.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = mdCommandLine
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if server:
server.shutdown()
thread.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def fixText(self, text, moreMacros={}):
# Do several textual replacements that need to happen *before* the document is parsed as h.
# If markdown shorthands are on, remove all `foo`s while processing,
# so their contents don't accidentally trigger other stuff.
# Also handle markdown escapes.
if "markdown" in self.md.markupShorthands:
textFunctor = MarkdownCodeSpans(text)
else:
textFunctor = Functor(text)
macros = dict(self.macros, **moreMacros)
textFunctor = textFunctor.map(curry(replaceMacros, macros=macros))
textFunctor = textFunctor.map(fixTypography)
if "css" in self.md.markupShorthands:
textFunctor = textFunctor.map(replaceAwkwardCSSShorthands)
return textFunctor.extract()
def printTargets(self):
p("Exported terms:")
for el in findAll("[data-export]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
p("Unexported terms:")
for el in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
def isOpaqueElement(self, el):
if el.tag in self.md.opaqueElements:
return True
if el.get("data-opaque") is not None:
return True
return False
def findImplicitInputFile():
"""
Find what input file the user *probably* wants to use,
by scanning the current folder.
In preference order:
1. index.bs
2. Overview.bs
3. the first file with a .bs extension
4. the first file with a .src.html extension
"""
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
allBs = glob.glob("*.bs")
if allBs:
return allBs[0]
allHtml = glob.glob("*.src.html")
if allHtml:
return allHtml[0]
return None
constants.specClass = Spec
styleColors = """
/* Any --*-text not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--text: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-text: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #3980b5;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #005a9c;
--hr-text: var(--text);
--algo-border: #def;
--del-text: red;
--del-bg: transparent;
--ins-text: #080;
--ins-bg: transparent;
--a-normal-text: #034575;
--a-normal-underline: #bbb;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-text: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-text: var(--text);
--issueheading-text: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-text: var(--text);
--exampleheading-text: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-text: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-text: var(--text);
--advisementheading-text: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-text: var(--text);
--amendmentheading-text: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-text: var(--text);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-text: #707070;
--indextable-hover-text: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
styleDarkMode = """
@media (prefers-color-scheme: dark) {
:root {
--text: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/text%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/text%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #080808;
--tocnav-active-text: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #6af;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #8af;
--hr-text: var(--text);
--algo-border: #456;
--del-text: #f44;
--del-bg: transparent;
--ins-text: #4a4;
--ins-bg: transparent;
--a-normal-text: #6af;
--a-normal-underline: #555;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-text: #f44;
--a-active-underline: var(--a-active-text);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-text: var(--text);
--issueheading-text: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-text: var(--text);
--exampleheading-text: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-text: var(--text);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-text: var(--text);
--advisementheading-text: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-text: var(--text);
--amendmentheading-text: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-text: var(--text);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-text: #aaa;
--indextable-hover-text: var(--text);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
styleMdLists = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
styleAutolinks = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-text);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
styleSelflinks = """
:root {
--selflink-text: white;
--selflink-bg: gray;
--selflink-hover-text: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
text-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-text);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-text);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
styleDarkMode += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-text: black;
--selflink-bg: silver;
--selflink-hover-text: white;
}
}
"""
styleCounters = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| path_disclosure | {
"code": [
" self.inputSource = InputSource(inputFilename)"
],
"line_no": [
61
]
} | {
"code": [
" self.inputSource = InputSource(inputFilename, chroot=constants.chroot)"
],
"line_no": [
61
]
} | import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class CLASS_0:
def __init__(
self,
VAR_6,
VAR_7=False,
VAR_8=None,
VAR_9=False,
VAR_10=None,
VAR_11=False,
):
self.valid = False
self.lineNumbers = VAR_9
if VAR_9:
constants.dryRun = True
if VAR_6 is None:
VAR_6 = FUNC_0()
if VAR_6 is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(VAR_6)
self.transitiveDependencies = set()
self.debug = VAR_7
self.token = VAR_8
self.testing = VAR_11
if VAR_10 is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = VAR_10
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def FUNC_1(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(VAR_10=self.dataFile, VAR_11=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = VAR_0
self.extraStyles["style-darkmode"] = VAR_1
self.extraStyles["style-md-lists"] = VAR_2
self.extraStyles["style-autolinks"] = VAR_3
self.extraStyles["style-selflinks"] = VAR_4
self.extraStyles["style-counters"] = VAR_5
self.extraScripts = defaultdict(str)
try:
VAR_31 = self.inputSource.read()
self.lines = VAR_31.lines
if VAR_31.date is not None:
self.mdBaseline.addParsedData("Date", VAR_31.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def FUNC_2(self, *VAR_12):
self.transitiveDependencies.update(VAR_12)
def FUNC_3(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def FUNC_4(self):
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
self.md.fillTextMacros(self.macros, doc=self)
VAR_22 = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
VAR_23 = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
VAR_29=VAR_22,
)
self.mdOverridingDefaults = metadata.fromJson(
data=VAR_23, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
self.md.computeImplicitMetadata(doc=self)
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
self.refs.setSpecData(self.md)
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def FUNC_5(self):
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
cleanupHTML(self)
if self.md.prepTR:
for VAR_19 in findAll("[rel ~= 'icon']", self):
removeNode(VAR_19)
for VAR_19 in findAll("link", self):
if VAR_19.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), VAR_19)
for VAR_19 in findAll("a", self):
VAR_35 = VAR_19.get("href", "")
if VAR_35.startswith("http://www.w3.org") or VAR_35.startswith(
"http://lists.w3.org"
):
VAR_19.set("href", "https" + VAR_35[4:])
VAR_17 = VAR_19.text or ""
if VAR_17.startswith("http://www.w3.org") or VAR_17.startswith(
"http://lists.w3.org"
):
VAR_19.text = "https" + VAR_17[4:]
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def FUNC_6(self):
try:
VAR_24 = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
VAR_24 = finalHackyCleanup(VAR_24)
return VAR_24
def FUNC_7(self, VAR_13):
if VAR_13 is None:
if not isinstance(self.inputSource, FileInputSource):
VAR_13 = "-"
elif self.inputSource.sourceName.endswith(".bs"):
VAR_13 = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
VAR_13 = self.inputSource.sourceName[0:-9] + ".html"
else:
VAR_13 = "-"
return VAR_13
def FUNC_8(self, VAR_13=None, VAR_14=None):
self.printResultMessage()
VAR_13 = self.fixMissingOutputFilename(VAR_13)
VAR_24 = self.serialize()
if not constants.dryRun:
try:
if VAR_13 == "-":
sys.stdout.write(VAR_24)
else:
with open(
VAR_13, "w", encoding="utf-8", VAR_14=newline
) as f:
f.write(VAR_24)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
VAR_13,
e,
)
def FUNC_9(self):
VAR_25 = messageCounts["fatal"]
VAR_26 = messageCounts["linkerror"]
VAR_27 = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if VAR_25:
success("Successfully generated, but fatal errors were suppressed")
return
if VAR_26:
success("Successfully generated, with {0} linking errors", VAR_26)
return
if VAR_27:
success("Successfully generated, with warnings")
return
def FUNC_10(self, VAR_13, VAR_15=None, VAR_16=False):
import time
VAR_13 = self.fixMissingOutputFilename(VAR_13)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if VAR_13 == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if VAR_15:
import http.server
import socketserver
import .threading
class CLASS_1(http.server.SimpleHTTPRequestHandler):
def FUNC_14(self, VAR_36, *VAR_37):
pass
socketserver.TCPServer.allow_reuse_address = True
VAR_32 = socketserver.TCPServer(
("localhost" if VAR_16 else "", VAR_15), CLASS_1
)
print(f"Serving at VAR_15 {port}")
VAR_33 = threading.Thread(target=VAR_32.serve_forever)
VAR_33.daemon = True
VAR_33.start()
else:
VAR_32 = None
VAR_28 = self.mdCommandLine
try:
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
if any(
input.mtime() != lastModified
for input, lastModified in VAR_34.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = VAR_28
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if VAR_32:
server.shutdown()
VAR_33.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def FUNC_11(self, VAR_17, VAR_18={}):
if "markdown" in self.md.markupShorthands:
VAR_30 = MarkdownCodeSpans(VAR_17)
else:
VAR_30 = Functor(VAR_17)
VAR_29 = dict(self.macros, **VAR_18)
VAR_30 = VAR_30.map(curry(replaceMacros, VAR_29=macros))
VAR_30 = VAR_30.map(fixTypography)
if "css" in self.md.markupShorthands:
VAR_30 = VAR_30.map(replaceAwkwardCSSShorthands)
return VAR_30.extract()
def FUNC_12(self):
p("Exported terms:")
for VAR_19 in findAll("[data-export]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
p("Unexported terms:")
for VAR_19 in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
def FUNC_13(self, VAR_19):
if VAR_19.tag in self.md.opaqueElements:
return True
if VAR_19.get("data-opaque") is not None:
return True
return False
def FUNC_0():
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
VAR_20 = glob.glob("*.bs")
if VAR_20:
return VAR_20[0]
VAR_21 = glob.glob("*.src.html")
if VAR_21:
return VAR_21[0]
return None
constants.specClass = CLASS_0
VAR_0 = """
/* Any --*-VAR_17 not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--VAR_17: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-VAR_17: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #3980b5;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #005a9c;
--hr-VAR_17: var(--VAR_17);
--algo-border: #def;
--del-VAR_17: red;
--del-bg: transparent;
--ins-VAR_17: #080;
--ins-bg: transparent;
--a-normal-VAR_17: #034575;
--a-normal-underline: #bbb;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-VAR_17: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-VAR_17: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-VAR_17: var(--VAR_17);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-VAR_17: #707070;
--indextable-hover-VAR_17: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
VAR_1 = """
@media (prefers-color-scheme: dark) {
:root {
--VAR_17: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/VAR_17%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/VAR_17%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #080808;
--tocnav-active-VAR_17: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #6af;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #8af;
--hr-VAR_17: var(--VAR_17);
--algo-border: #456;
--del-VAR_17: #f44;
--del-bg: transparent;
--ins-VAR_17: #4a4;
--ins-bg: transparent;
--a-normal-VAR_17: #6af;
--a-normal-underline: #555;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-VAR_17: #f44;
--a-active-underline: var(--a-active-VAR_17);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-VAR_17: var(--VAR_17);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-VAR_17: var(--VAR_17);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-VAR_17: #aaa;
--indextable-hover-VAR_17: var(--VAR_17);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
VAR_2 = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
VAR_3 = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-VAR_17);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
VAR_4 = """
:root {
--selflink-VAR_17: white;
--selflink-bg: gray;
--selflink-hover-VAR_17: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
VAR_17-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-VAR_17);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-VAR_17);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
VAR_1 += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-VAR_17: black;
--selflink-bg: silver;
--selflink-hover-VAR_17: white;
}
}
"""
VAR_5 = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class CLASS_0:
def __init__(
self,
VAR_6,
VAR_7=False,
VAR_8=None,
VAR_9=False,
VAR_10=None,
VAR_11=False,
):
self.valid = False
self.lineNumbers = VAR_9
if VAR_9:
constants.dryRun = True
if VAR_6 is None:
VAR_6 = FUNC_0()
if VAR_6 is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(VAR_6, chroot=constants.chroot)
self.transitiveDependencies = set()
self.debug = VAR_7
self.token = VAR_8
self.testing = VAR_11
if VAR_10 is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = VAR_10
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def FUNC_1(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(VAR_10=self.dataFile, VAR_11=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = VAR_0
self.extraStyles["style-darkmode"] = VAR_1
self.extraStyles["style-md-lists"] = VAR_2
self.extraStyles["style-autolinks"] = VAR_3
self.extraStyles["style-selflinks"] = VAR_4
self.extraStyles["style-counters"] = VAR_5
self.extraScripts = defaultdict(str)
try:
VAR_31 = self.inputSource.read()
self.lines = VAR_31.lines
if VAR_31.date is not None:
self.mdBaseline.addParsedData("Date", VAR_31.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def FUNC_2(self, *VAR_12):
self.transitiveDependencies.update(VAR_12)
def FUNC_3(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def FUNC_4(self):
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
self.md.fillTextMacros(self.macros, doc=self)
VAR_22 = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
VAR_23 = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
VAR_29=VAR_22,
)
self.mdOverridingDefaults = metadata.fromJson(
data=VAR_23, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
self.md.computeImplicitMetadata(doc=self)
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
self.refs.setSpecData(self.md)
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def FUNC_5(self):
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
cleanupHTML(self)
if self.md.prepTR:
for VAR_19 in findAll("[rel ~= 'icon']", self):
removeNode(VAR_19)
for VAR_19 in findAll("link", self):
if VAR_19.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), VAR_19)
for VAR_19 in findAll("a", self):
VAR_35 = VAR_19.get("href", "")
if VAR_35.startswith("http://www.w3.org") or VAR_35.startswith(
"http://lists.w3.org"
):
VAR_19.set("href", "https" + VAR_35[4:])
VAR_17 = VAR_19.text or ""
if VAR_17.startswith("http://www.w3.org") or VAR_17.startswith(
"http://lists.w3.org"
):
VAR_19.text = "https" + VAR_17[4:]
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def FUNC_6(self):
try:
VAR_24 = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
VAR_24 = finalHackyCleanup(VAR_24)
return VAR_24
def FUNC_7(self, VAR_13):
if VAR_13 is None:
if not isinstance(self.inputSource, FileInputSource):
VAR_13 = "-"
elif self.inputSource.sourceName.endswith(".bs"):
VAR_13 = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
VAR_13 = self.inputSource.sourceName[0:-9] + ".html"
else:
VAR_13 = "-"
return VAR_13
def FUNC_8(self, VAR_13=None, VAR_14=None):
self.printResultMessage()
VAR_13 = self.fixMissingOutputFilename(VAR_13)
VAR_24 = self.serialize()
if not constants.dryRun:
try:
if VAR_13 == "-":
sys.stdout.write(VAR_24)
else:
with open(
VAR_13, "w", encoding="utf-8", VAR_14=newline
) as f:
f.write(VAR_24)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
VAR_13,
e,
)
def FUNC_9(self):
VAR_25 = messageCounts["fatal"]
VAR_26 = messageCounts["linkerror"]
VAR_27 = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if VAR_25:
success("Successfully generated, but fatal errors were suppressed")
return
if VAR_26:
success("Successfully generated, with {0} linking errors", VAR_26)
return
if VAR_27:
success("Successfully generated, with warnings")
return
def FUNC_10(self, VAR_13, VAR_15=None, VAR_16=False):
import time
VAR_13 = self.fixMissingOutputFilename(VAR_13)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if VAR_13 == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if VAR_15:
import http.server
import socketserver
import .threading
class CLASS_1(http.server.SimpleHTTPRequestHandler):
def FUNC_14(self, VAR_36, *VAR_37):
pass
socketserver.TCPServer.allow_reuse_address = True
VAR_32 = socketserver.TCPServer(
("localhost" if VAR_16 else "", VAR_15), CLASS_1
)
print(f"Serving at VAR_15 {port}")
VAR_33 = threading.Thread(target=VAR_32.serve_forever)
VAR_33.daemon = True
VAR_33.start()
else:
VAR_32 = None
VAR_28 = self.mdCommandLine
try:
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
if any(
input.mtime() != lastModified
for input, lastModified in VAR_34.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = VAR_28
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if VAR_32:
server.shutdown()
VAR_33.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def FUNC_11(self, VAR_17, VAR_18={}):
if "markdown" in self.md.markupShorthands:
VAR_30 = MarkdownCodeSpans(VAR_17)
else:
VAR_30 = Functor(VAR_17)
VAR_29 = dict(self.macros, **VAR_18)
VAR_30 = VAR_30.map(curry(replaceMacros, VAR_29=macros))
VAR_30 = VAR_30.map(fixTypography)
if "css" in self.md.markupShorthands:
VAR_30 = VAR_30.map(replaceAwkwardCSSShorthands)
return VAR_30.extract()
def FUNC_12(self):
p("Exported terms:")
for VAR_19 in findAll("[data-export]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
p("Unexported terms:")
for VAR_19 in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
def FUNC_13(self, VAR_19):
if VAR_19.tag in self.md.opaqueElements:
return True
if VAR_19.get("data-opaque") is not None:
return True
return False
def FUNC_0():
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
VAR_20 = glob.glob("*.bs")
if VAR_20:
return VAR_20[0]
VAR_21 = glob.glob("*.src.html")
if VAR_21:
return VAR_21[0]
return None
constants.specClass = CLASS_0
VAR_0 = """
/* Any --*-VAR_17 not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--VAR_17: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-VAR_17: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #3980b5;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #005a9c;
--hr-VAR_17: var(--VAR_17);
--algo-border: #def;
--del-VAR_17: red;
--del-bg: transparent;
--ins-VAR_17: #080;
--ins-bg: transparent;
--a-normal-VAR_17: #034575;
--a-normal-underline: #bbb;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-VAR_17: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-VAR_17: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-VAR_17: var(--VAR_17);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-VAR_17: #707070;
--indextable-hover-VAR_17: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
VAR_1 = """
@media (prefers-color-scheme: dark) {
:root {
--VAR_17: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/VAR_17%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/VAR_17%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #080808;
--tocnav-active-VAR_17: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #6af;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #8af;
--hr-VAR_17: var(--VAR_17);
--algo-border: #456;
--del-VAR_17: #f44;
--del-bg: transparent;
--ins-VAR_17: #4a4;
--ins-bg: transparent;
--a-normal-VAR_17: #6af;
--a-normal-underline: #555;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-VAR_17: #f44;
--a-active-underline: var(--a-active-VAR_17);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-VAR_17: var(--VAR_17);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-VAR_17: var(--VAR_17);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-VAR_17: #aaa;
--indextable-hover-VAR_17: var(--VAR_17);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
VAR_2 = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
VAR_3 = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-VAR_17);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
VAR_4 = """
:root {
--selflink-VAR_17: white;
--selflink-bg: gray;
--selflink-hover-VAR_17: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
VAR_17-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-VAR_17);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-VAR_17);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
VAR_1 += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-VAR_17: black;
--selflink-bg: silver;
--selflink-hover-VAR_17: white;
}
}
"""
VAR_5 = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| [
6,
37,
38,
52,
70,
84,
112,
127,
129,
132,
137,
139,
145,
147,
149,
157,
174,
176,
180,
181,
184,
185,
193,
194,
196,
197,
201,
202,
209,
211,
241,
242,
268,
282,
283,
286,
289,
293,
305,
307,
309,
320,
323,
333,
353,
355,
370,
373,
380,
382,
386,
390,
395,
402,
404,
414,
415,
416,
440,
442,
443,
444,
445,
446,
451,
457,
459,
469,
476,
477,
488,
493,
497,
501,
503,
504,
506,
511,
514,
516,
520,
527,
532,
537,
539,
541,
543,
548,
556,
560,
565,
570,
576,
580,
585,
589,
594,
599,
601,
603,
606,
612,
615,
621,
623,
627,
634,
639,
644,
646,
648,
650,
655,
663,
665,
669,
674,
679,
685,
689,
694,
698,
703,
708,
710,
712,
715,
721,
728,
738,
781,
788,
796,
800,
847,
861,
862,
873,
884,
891,
479,
480,
481,
482,
483,
484,
485,
486,
487
] | [
6,
37,
38,
52,
70,
84,
112,
127,
129,
132,
137,
139,
145,
147,
149,
157,
174,
176,
180,
181,
184,
185,
193,
194,
196,
197,
201,
202,
209,
211,
241,
242,
268,
282,
283,
286,
289,
293,
305,
307,
309,
320,
323,
333,
353,
355,
370,
373,
380,
382,
386,
390,
395,
402,
404,
414,
415,
416,
440,
442,
443,
444,
445,
446,
451,
457,
459,
469,
476,
477,
488,
493,
497,
501,
503,
504,
506,
511,
514,
516,
520,
527,
532,
537,
539,
541,
543,
548,
556,
560,
565,
570,
576,
580,
585,
589,
594,
599,
601,
603,
606,
612,
615,
621,
623,
627,
634,
639,
644,
646,
648,
650,
655,
663,
665,
669,
674,
679,
685,
689,
694,
698,
703,
708,
710,
712,
715,
721,
728,
738,
781,
788,
796,
800,
847,
861,
862,
873,
884,
891,
479,
480,
481,
482,
483,
484,
485,
486,
487
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method):
fn = getattr(self, method, None)
if not fn:
raise NotFound("Method {0} not found".format(method))
elif not getattr(fn, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint, is_whitelisted
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(fn):
"""Decorator: Whitelist method to be called remotely via REST API."""
frappe.whitelist()(fn)
return fn
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method_name):
method = getattr(self, method_name, None)
if not fn:
raise NotFound("Method {0} not found".format(method_name))
is_whitelisted(getattr(method, '__func__', method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| xss | {
"code": [
"from frappe import _, msgprint",
"\tdef whitelist(f):",
"\t\tf.whitelisted = True",
"\t\treturn f",
"\tdef is_whitelisted(self, method):",
"\t\tfn = getattr(self, method, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method))",
"\t\telif not getattr(fn, \"whitelisted\", False):",
"\t\t\traise Forbidden(\"Method {0} not whitelisted\".format(method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1155,
1156
]
} | {
"code": [
"from frappe import _, msgprint, is_whitelisted",
"\tdef whitelist(fn):",
"\t\tfrappe.whitelist()(fn)",
"\t\treturn fn",
"\tdef is_whitelisted(self, method_name):",
"\t\tmethod = getattr(self, method_name, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method_name))",
"\t\tis_whitelisted(getattr(method, '__func__', method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1156
]
} |
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_81
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
VAR_6.whitelisted = True
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_80 = frappe.db.get_singles_dict(self.doctype)
if not VAR_80:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_80["name"] = self.doctype
del VAR_80["__islocal"]
super(CLASS_0, self).__init__(VAR_80)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_81 = DOCTYPE_TABLE_FIELDS
else:
VAR_81 = self.meta.get_table_fields()
for VAR_19 in VAR_81:
VAR_50 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_50:
self.set(VAR_19.fieldname, VAR_50)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_82 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_82.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_48 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_48.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_48:
VAR_83 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_48))),
[self.name, self.doctype, VAR_18] + VAR_48)
if len(VAR_83) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_83))), tuple(row[0] for row in VAR_83))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_49 = self.get_doc_before_save()
return VAR_49.get(VAR_18)!=self.get(VAR_18) if VAR_49 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_84 = self.as_dict()
for VAR_43, VAR_26 in iteritems(VAR_84):
if VAR_26==None:
VAR_84[VAR_43] = ""
return VAR_84
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_84 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_50 = self.get_all_children()
for VAR_21 in VAR_50:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_87 = FUNC_81(VAR_19)
frappe.throw(VAR_87, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_51 = self.meta.get_workflow()
if VAR_51:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_51, self._action)
def FUNC_26(self):
VAR_52 = self.meta.get_set_only_once_fields()
if VAR_52 and self._doc_before_save:
for field in VAR_52:
VAR_97 = False
VAR_26 = self.get(field.fieldname)
VAR_53 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_81:
VAR_97 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_97 = str(VAR_26) != str(VAR_53)
else:
VAR_97 = VAR_26 != VAR_53
if VAR_97:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_53 = self._doc_before_save.get(VAR_18)
VAR_54 = True
if len(VAR_53) != len(VAR_26):
VAR_54 = False
else:
for i, VAR_21 in enumerate(VAR_53):
VAR_98 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_99 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_43 in ('modified', 'modified_by', 'creation'):
del VAR_98[VAR_43]
del VAR_99[VAR_43]
if VAR_99 != VAR_98:
VAR_54 = False
break
return VAR_54
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_55 = False
VAR_56 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_56 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_56:
if VAR_19.permlevel > 0:
VAR_55 = True
break
if not VAR_55:
return
VAR_57 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_57 = self.get_permlevel_access()
VAR_58 = self.meta.get_high_permlevel_fields()
if VAR_58:
self.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_58 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_58:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_59 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_59 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_85 = self.meta.permissions
return VAR_85
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_60 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_60)
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_60)
def FUNC_34(self):
VAR_61 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_100 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_100 = VAR_100 and VAR_100[0][0]
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
else:
VAR_101 = frappe.db.sql("""select VAR_100, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_101:
frappe.throw(_("Record does not exist"))
else:
VAR_101 = tmp[0]
VAR_100 = cstr(VAR_101.modified)
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
self.check_docstatus_transition(VAR_101.docstatus)
if VAR_61:
frappe.msgprint(_("Error: CLASS_0 has been VAR_100 after you have opened it") \
+ (" (%s, %s). " % (VAR_100, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_33=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_62 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_62.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_62:
return
for VAR_18, VAR_87 in VAR_62:
msgprint(VAR_87)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_62)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_63, VAR_64 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_86 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_63.extend(VAR_86[0])
VAR_64.extend(VAR_86[1])
if VAR_63:
VAR_87 = ", ".join((each[2] for each in VAR_63))
frappe.throw(_("Could not find {0}").format(VAR_87),
frappe.LinkValidationError)
if VAR_64:
VAR_87 = ", ".join((each[2] for each in VAR_64))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_87),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_65 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_81]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_65.extend(VAR_26)
return VAR_65
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_72 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_72 = lambda self, *VAR_0, **VAR_1: None
VAR_72.__name__ = str(VAR_25)
VAR_66 = CLASS_0.hook(VAR_72)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_66
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_88 = frappe.cache().hget('notifications', self.doctype)
if VAR_88==None:
VAR_88 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_88)
self.flags.notifications = VAR_88
if not self.flags.notifications:
return
def FUNC_82(VAR_67):
if not VAR_67.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_67.name, VAR_67.event)
self.flags.notifications_executed.append(VAR_67.name)
VAR_68 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_68['on_change'] = 'Value Change'
for VAR_67 in self.flags.notifications:
VAR_89 = VAR_68.get(VAR_25, None)
if VAR_89 and VAR_67.event == VAR_89:
FUNC_82(VAR_67)
elif VAR_67.event=='Method' and VAR_25 == VAR_67.method:
FUNC_82(VAR_67)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_69 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_90 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_90, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_70 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_70.for_insert(self)
VAR_70.insert(VAR_11=True)
elif VAR_70.set_diff(self._doc_before_save, self):
VAR_70.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_6):
def FUNC_83(self, VAR_71):
if isinstance(VAR_71, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_71)
else:
self._return_value = VAR_71 or self.get("_return_value")
def FUNC_84(VAR_72, *VAR_73):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_72(self, *VAR_0, **VAR_1))
for VAR_6 in VAR_73:
FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_73 = []
VAR_25 = VAR_6.__name__
VAR_91 = frappe.get_doc_hooks()
for handler in VAR_91.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_91.get("*", {}).get(VAR_25, []):
VAR_73.append(frappe.get_attr(handler))
VAR_92 = FUNC_84(VAR_6, *VAR_73)
return VAR_92(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_25):
VAR_72 = getattr(self, VAR_25, None)
if not VAR_72:
raise NotFound("Method {0} not found".format(VAR_25))
elif not getattr(VAR_72, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(VAR_25))
def FUNC_62(self, VAR_18, VAR_30, VAR_31, VAR_32=None, VAR_33=None):
VAR_74 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_32:
doc = self
VAR_75 = VAR_32.get_value(VAR_18)
VAR_19 = VAR_32.meta.get_field(VAR_18)
VAR_31 = VAR_32.cast(VAR_31, VAR_19)
if not frappe.compare(VAR_75, VAR_30, VAR_31):
VAR_93 = VAR_32.meta.get_label(VAR_18)
VAR_94 = VAR_74.get(VAR_30, condition)
if VAR_32.parentfield:
VAR_87 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_32.idx, VAR_93, VAR_94, VAR_31)
else:
VAR_87 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_93, VAR_94, VAR_31)
msgprint(VAR_87, VAR_33=raise_exception or True)
def FUNC_63(self, VAR_34, VAR_33=None):
if not (isinstance(self.get(VAR_34), list) and len(self.get(VAR_34)) > 0):
VAR_93 = self.meta.get_label(VAR_34)
frappe.throw(_("Table {0} cannot be empty").format(VAR_93), VAR_33 or frappe.EmptyTableError)
def FUNC_64(self, VAR_32, VAR_35=None):
if not VAR_35:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_32.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_35:
VAR_32.set(VAR_18, flt(VAR_32.get(VAR_18), self.precision(VAR_18, VAR_32.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_36='Comment', VAR_37=None, VAR_38=None, VAR_39=None, VAR_40=None, VAR_41=None):
VAR_66 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_36,
"comment_email": VAR_38 or frappe.session.user,
"comment_by": VAR_41,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_37 or VAR_36,
"link_doctype": VAR_39,
"link_name": VAR_40
}).insert(VAR_11=True)
return VAR_66
def FUNC_67(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if self.meta.track_seen:
VAR_95 = self.get('_seen') or []
VAR_95 = frappe.parse_json(VAR_95)
if VAR_42 not in VAR_95:
_seen.append(VAR_42)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_95), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_76 = getattr(self, "_liked_by", None)
if VAR_76:
return json.loads(VAR_76)
else:
return []
def FUNC_71(self, VAR_43, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_43] = VAR_26
def FUNC_72(self, VAR_43=None):
if not VAR_43:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_43]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_44=None):
VAR_77 = self.get_signature()
if file_lock.lock_exists(VAR_77):
VAR_96 = True
if VAR_44:
for i in range(VAR_44):
time.sleep(1)
if not file_lock.lock_exists(VAR_77):
VAR_96 = False
break
if VAR_96:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_77)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_45, VAR_46):
if date_diff(self.get(VAR_46), self.get(VAR_45)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_46)),
frappe.bold(self.meta.get_label(VAR_45)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_78 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_79 = set([assignment.owner for assignment in VAR_78])
return VAR_79
def FUNC_78(self, VAR_47):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_47)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_32 = frappe.get_doc(VAR_2, VAR_3)
VAR_32.unlock()
try:
getattr(VAR_32, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_87 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_87 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_32.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_87)
VAR_32.notify_update()
|
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint, FUNC_61
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_82
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
frappe.whitelist()(VAR_6)
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_81 = frappe.db.get_singles_dict(self.doctype)
if not VAR_81:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_81["name"] = self.doctype
del VAR_81["__islocal"]
super(CLASS_0, self).__init__(VAR_81)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_82 = DOCTYPE_TABLE_FIELDS
else:
VAR_82 = self.meta.get_table_fields()
for VAR_19 in VAR_82:
VAR_52 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_52:
self.set(VAR_19.fieldname, VAR_52)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_83 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_83.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_50 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_50.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_50:
VAR_84 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_50))),
[self.name, self.doctype, VAR_18] + VAR_50)
if len(VAR_84) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_84))), tuple(row[0] for row in VAR_84))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_51 = self.get_doc_before_save()
return VAR_51.get(VAR_18)!=self.get(VAR_18) if VAR_51 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_85 = self.as_dict()
for VAR_45, VAR_26 in iteritems(VAR_85):
if VAR_26==None:
VAR_85[VAR_45] = ""
return VAR_85
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_85 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_52 = self.get_all_children()
for VAR_21 in VAR_52:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_88 = FUNC_81(VAR_19)
frappe.throw(VAR_88, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_53 = self.meta.get_workflow()
if VAR_53:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_53, self._action)
def FUNC_26(self):
VAR_54 = self.meta.get_set_only_once_fields()
if VAR_54 and self._doc_before_save:
for field in VAR_54:
VAR_98 = False
VAR_26 = self.get(field.fieldname)
VAR_55 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_82:
VAR_98 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_98 = str(VAR_26) != str(VAR_55)
else:
VAR_98 = VAR_26 != VAR_55
if VAR_98:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_55 = self._doc_before_save.get(VAR_18)
VAR_56 = True
if len(VAR_55) != len(VAR_26):
VAR_56 = False
else:
for i, VAR_21 in enumerate(VAR_55):
VAR_99 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_100 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_45 in ('modified', 'modified_by', 'creation'):
del VAR_99[VAR_45]
del VAR_100[VAR_45]
if VAR_100 != VAR_99:
VAR_56 = False
break
return VAR_56
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_57 = False
VAR_58 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_58 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_58:
if VAR_19.permlevel > 0:
VAR_57 = True
break
if not VAR_57:
return
VAR_59 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_59 = self.get_permlevel_access()
VAR_60 = self.meta.get_high_permlevel_fields()
if VAR_60:
self.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_60:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_61 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_61 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_86 = self.meta.permissions
return VAR_86
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_62 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_62)
for VAR_19 in self.meta.get_table_fields():
VAR_62 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_62)
def FUNC_34(self):
VAR_63 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_101 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_101 = VAR_101 and VAR_101[0][0]
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
else:
VAR_102 = frappe.db.sql("""select VAR_101, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_102:
frappe.throw(_("Record does not exist"))
else:
VAR_102 = tmp[0]
VAR_101 = cstr(VAR_102.modified)
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
self.check_docstatus_transition(VAR_102.docstatus)
if VAR_63:
frappe.msgprint(_("Error: CLASS_0 has been VAR_101 after you have opened it") \
+ (" (%s, %s). " % (VAR_101, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_35=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_64 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_64.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_64:
return
for VAR_18, VAR_88 in VAR_64:
msgprint(VAR_88)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_64)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_65, VAR_66 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_87 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_65.extend(VAR_87[0])
VAR_66.extend(VAR_87[1])
if VAR_65:
VAR_88 = ", ".join((each[2] for each in VAR_65))
frappe.throw(_("Could not find {0}").format(VAR_88),
frappe.LinkValidationError)
if VAR_66:
VAR_88 = ", ".join((each[2] for each in VAR_66))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_88),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_67 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_82]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_67.extend(VAR_26)
return VAR_67
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_6 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_6 = lambda self, *VAR_0, **VAR_1: None
VAR_6.__name__ = str(VAR_25)
VAR_68 = CLASS_0.hook(VAR_6)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_68
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_89 = frappe.cache().hget('notifications', self.doctype)
if VAR_89==None:
VAR_89 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_89)
self.flags.notifications = VAR_89
if not self.flags.notifications:
return
def FUNC_82(VAR_69):
if not VAR_69.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_69.name, VAR_69.event)
self.flags.notifications_executed.append(VAR_69.name)
VAR_70 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_70['on_change'] = 'Value Change'
for VAR_69 in self.flags.notifications:
VAR_90 = VAR_70.get(VAR_25, None)
if VAR_90 and VAR_69.event == VAR_90:
FUNC_82(VAR_69)
elif VAR_69.event=='Method' and VAR_25 == VAR_69.method:
FUNC_82(VAR_69)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_71 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_91 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_91, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_72 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_72.for_insert(self)
VAR_72.insert(VAR_11=True)
elif VAR_72.set_diff(self._doc_before_save, self):
VAR_72.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_30):
def FUNC_83(self, VAR_73):
if isinstance(VAR_73, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_73)
else:
self._return_value = VAR_73 or self.get("_return_value")
def FUNC_84(VAR_6, *VAR_74):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_6(self, *VAR_0, **VAR_1))
for VAR_30 in VAR_74:
FUNC_83(self, VAR_30(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_74 = []
VAR_25 = VAR_30.__name__
VAR_92 = frappe.get_doc_hooks()
for handler in VAR_92.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_92.get("*", {}).get(VAR_25, []):
VAR_74.append(frappe.get_attr(handler))
VAR_93 = FUNC_84(VAR_30, *VAR_74)
return VAR_93(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_31):
VAR_25 = getattr(self, VAR_31, None)
if not VAR_6:
raise NotFound("Method {0} not found".format(VAR_31))
FUNC_61(getattr(VAR_25, '__func__', VAR_25))
def FUNC_62(self, VAR_18, VAR_32, VAR_33, VAR_34=None, VAR_35=None):
VAR_75 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_34:
doc = self
VAR_76 = VAR_34.get_value(VAR_18)
VAR_19 = VAR_34.meta.get_field(VAR_18)
VAR_33 = VAR_34.cast(VAR_33, VAR_19)
if not frappe.compare(VAR_76, VAR_32, VAR_33):
VAR_94 = VAR_34.meta.get_label(VAR_18)
VAR_95 = VAR_75.get(VAR_32, condition)
if VAR_34.parentfield:
VAR_88 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_34.idx, VAR_94, VAR_95, VAR_33)
else:
VAR_88 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_94, VAR_95, VAR_33)
msgprint(VAR_88, VAR_35=raise_exception or True)
def FUNC_63(self, VAR_36, VAR_35=None):
if not (isinstance(self.get(VAR_36), list) and len(self.get(VAR_36)) > 0):
VAR_94 = self.meta.get_label(VAR_36)
frappe.throw(_("Table {0} cannot be empty").format(VAR_94), VAR_35 or frappe.EmptyTableError)
def FUNC_64(self, VAR_34, VAR_37=None):
if not VAR_37:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_34.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_37:
VAR_34.set(VAR_18, flt(VAR_34.get(VAR_18), self.precision(VAR_18, VAR_34.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_38='Comment', VAR_39=None, VAR_40=None, VAR_41=None, VAR_42=None, VAR_43=None):
VAR_68 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_38,
"comment_email": VAR_40 or frappe.session.user,
"comment_by": VAR_43,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_39 or VAR_38,
"link_doctype": VAR_41,
"link_name": VAR_42
}).insert(VAR_11=True)
return VAR_68
def FUNC_67(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if self.meta.track_seen:
VAR_96 = self.get('_seen') or []
VAR_96 = frappe.parse_json(VAR_96)
if VAR_44 not in VAR_96:
_seen.append(VAR_44)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_96), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_77 = getattr(self, "_liked_by", None)
if VAR_77:
return json.loads(VAR_77)
else:
return []
def FUNC_71(self, VAR_45, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_45] = VAR_26
def FUNC_72(self, VAR_45=None):
if not VAR_45:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_45]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_46=None):
VAR_78 = self.get_signature()
if file_lock.lock_exists(VAR_78):
VAR_97 = True
if VAR_46:
for i in range(VAR_46):
time.sleep(1)
if not file_lock.lock_exists(VAR_78):
VAR_97 = False
break
if VAR_97:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_78)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_47, VAR_48):
if date_diff(self.get(VAR_48), self.get(VAR_47)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_48)),
frappe.bold(self.meta.get_label(VAR_47)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_79 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_80 = set([assignment.owner for assignment in VAR_79])
return VAR_80
def FUNC_78(self, VAR_49):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_49)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_34 = frappe.get_doc(VAR_2, VAR_3)
VAR_34.unlock()
try:
getattr(VAR_34, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_88 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_88 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_34.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_88)
VAR_34.notify_update()
| [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] | [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1155,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] |
1CWE-79
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
@keep_lazy_text
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
@keep_lazy_text
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
@keep_lazy_text
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def unquote_etag(etag):
"""
Unquote an ETag string; i.e. revert quote_etag().
"""
return etag.strip('"').replace('\\"', '"').replace('\\\\', '\\') if etag else etag
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or
pattern == host
)
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
# Chrome treats \ completely as /
url = url.replace('\\', '/')
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
@keep_lazy_text
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
@keep_lazy_text
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
@keep_lazy_text
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def unquote_etag(etag):
"""
Unquote an ETag string; i.e. revert quote_etag().
"""
return etag.strip('"').replace('\\"', '"').replace('\\\\', '\\') if etag else etag
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or
pattern == host
)
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
# Chrome treats \ completely as / in paths but it could be part of some
# basic auth credentials so we need to check both URLs.
return _is_safe_url(url, host) and _is_safe_url(url.replace('\\', '/'), host)
def _is_safe_url(url, host):
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
| xss | {
"code": [
" url = url.replace('\\\\', '/')"
],
"line_no": [
294
]
} | {
"code": [
"def _is_safe_url(url, host):"
],
"line_no": [
298
]
} | from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import .six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, FUNC_4 as original_urlencode,
urlparse,
)
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_36>\d{2})'
VAR_3 = r'(?P<VAR_36>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_34>\d{4})'
VAR_6 = r'(?P<VAR_34>\d{2})'
VAR_7 = r'(?P<VAR_37>\d{2}):(?P<VAR_38>\d{2}):(?P<VAR_39>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_21 %VAR_21 %VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_21-%VAR_21-%VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_21 %VAR_21 %VAR_21 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
VAR_11 = str(":/?#[]@")
VAR_12 = str("!$&'()*+,;=")
VAR_13 = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def FUNC_0(VAR_14, VAR_15='/'):
return force_text(quote(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_1(VAR_14, VAR_15=''):
return force_text(quote_plus(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_2(VAR_16):
return force_text(unquote(force_str(VAR_16)))
@keep_lazy_text
def FUNC_3(VAR_16):
return force_text(unquote_plus(force_str(VAR_16)))
def FUNC_4(VAR_17, VAR_18=0):
if isinstance(VAR_17, MultiValueDict):
VAR_17 = query.lists()
elif hasattr(VAR_17, 'items'):
VAR_17 = query.items()
return original_urlencode(
[(force_str(k),
[force_str(VAR_22) for VAR_22 in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in VAR_17],
VAR_18)
def FUNC_5(VAR_19=None):
VAR_27 = formatdate(VAR_19)
return '%VAR_21-%s-%VAR_21 GMT' % (VAR_27[:7], VAR_27[8:11], VAR_27[12:25])
def FUNC_6(VAR_19=None):
return formatdate(VAR_19, usegmt=True)
def FUNC_7(VAR_20):
for regex in VAR_8, VAR_9, VAR_10:
VAR_33 = regex.match(VAR_20)
if VAR_33 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_20 format" % VAR_20)
try:
VAR_34 = int(VAR_33.group('year'))
if VAR_34 < 100:
if VAR_34 < 70:
VAR_34 += 2000
else:
VAR_34 += 1900
VAR_35 = VAR_1.index(VAR_33.group('mon').lower()) + 1
VAR_36 = int(VAR_33.group('day'))
VAR_37 = int(VAR_33.group('hour'))
VAR_38 = int(VAR_33.group('min'))
VAR_39 = int(VAR_33.group('sec'))
VAR_40 = datetime.datetime(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38, VAR_39)
return calendar.timegm(VAR_40.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_20), sys.exc_info()[2])
def FUNC_8(VAR_20):
try:
return FUNC_7(VAR_20)
except Exception:
pass
def FUNC_9(VAR_21):
if len(VAR_21) > 13:
raise ValueError("Base36 input too large")
VAR_28 = int(VAR_21, 36)
if six.PY2 and VAR_28 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_28
def FUNC_10(VAR_22):
VAR_29 = '0123456789abcdefghijklmnopqrstuvwxyz'
if VAR_22 < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(VAR_22, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if VAR_22 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if VAR_22 < 36:
return VAR_29[VAR_22]
VAR_30 = ''
while VAR_22 != 0:
VAR_22, VAR_41 = divmod(VAR_22, 36)
VAR_30 = VAR_29[VAR_41] + VAR_30
return VAR_30
def FUNC_11(VAR_21):
return base64.urlsafe_b64encode(VAR_21).rstrip(b'\VAR_41=')
def FUNC_12(VAR_21):
VAR_21 = force_bytes(VAR_21)
try:
return base64.urlsafe_b64decode(VAR_21.ljust(len(VAR_21) + len(VAR_21) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_23):
VAR_31 = VAR_0.findall(VAR_23)
if not VAR_31:
return [VAR_23]
VAR_31 = [e.encode('ascii').decode('unicode_escape') for e in VAR_31]
return VAR_31
def FUNC_14(VAR_24):
return '"%s"' % VAR_24.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_24):
return VAR_24.strip('"').replace('\\"', '"').replace('\\\\', '\\') if VAR_24 else VAR_24
def FUNC_16(VAR_25, VAR_26):
if not VAR_26:
return False
VAR_26 = pattern.lower()
return (
VAR_26[0] == '.' and (VAR_25.endswith(VAR_26) or VAR_25 == VAR_26[1:]) or
VAR_26 == VAR_25
)
def FUNC_17(VAR_14, VAR_25=None):
if VAR_14 is not None:
VAR_14 = VAR_14.strip()
if not VAR_14:
return False
VAR_14 = VAR_14.replace('\\', '/')
if VAR_14.startswith('///'):
return False
VAR_32 = urlparse(VAR_14)
if not VAR_32.netloc and VAR_32.scheme:
return False
if unicodedata.category(VAR_14[0])[0] == 'C':
return False
return ((not VAR_32.netloc or VAR_32.netloc == VAR_25) and
(not VAR_32.scheme or VAR_32.scheme in ['http', 'https']))
| from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import .six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, FUNC_4 as original_urlencode,
urlparse,
)
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_36>\d{2})'
VAR_3 = r'(?P<VAR_36>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_34>\d{4})'
VAR_6 = r'(?P<VAR_34>\d{2})'
VAR_7 = r'(?P<VAR_37>\d{2}):(?P<VAR_38>\d{2}):(?P<VAR_39>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_21 %VAR_21 %VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_21-%VAR_21-%VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_21 %VAR_21 %VAR_21 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
VAR_11 = str(":/?#[]@")
VAR_12 = str("!$&'()*+,;=")
VAR_13 = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def FUNC_0(VAR_14, VAR_15='/'):
return force_text(quote(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_1(VAR_14, VAR_15=''):
return force_text(quote_plus(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_2(VAR_16):
return force_text(unquote(force_str(VAR_16)))
@keep_lazy_text
def FUNC_3(VAR_16):
return force_text(unquote_plus(force_str(VAR_16)))
def FUNC_4(VAR_17, VAR_18=0):
if isinstance(VAR_17, MultiValueDict):
VAR_17 = query.lists()
elif hasattr(VAR_17, 'items'):
VAR_17 = query.items()
return original_urlencode(
[(force_str(k),
[force_str(VAR_22) for VAR_22 in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in VAR_17],
VAR_18)
def FUNC_5(VAR_19=None):
VAR_27 = formatdate(VAR_19)
return '%VAR_21-%s-%VAR_21 GMT' % (VAR_27[:7], VAR_27[8:11], VAR_27[12:25])
def FUNC_6(VAR_19=None):
return formatdate(VAR_19, usegmt=True)
def FUNC_7(VAR_20):
for regex in VAR_8, VAR_9, VAR_10:
VAR_33 = regex.match(VAR_20)
if VAR_33 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_20 format" % VAR_20)
try:
VAR_34 = int(VAR_33.group('year'))
if VAR_34 < 100:
if VAR_34 < 70:
VAR_34 += 2000
else:
VAR_34 += 1900
VAR_35 = VAR_1.index(VAR_33.group('mon').lower()) + 1
VAR_36 = int(VAR_33.group('day'))
VAR_37 = int(VAR_33.group('hour'))
VAR_38 = int(VAR_33.group('min'))
VAR_39 = int(VAR_33.group('sec'))
VAR_40 = datetime.datetime(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38, VAR_39)
return calendar.timegm(VAR_40.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_20), sys.exc_info()[2])
def FUNC_8(VAR_20):
try:
return FUNC_7(VAR_20)
except Exception:
pass
def FUNC_9(VAR_21):
if len(VAR_21) > 13:
raise ValueError("Base36 input too large")
VAR_28 = int(VAR_21, 36)
if six.PY2 and VAR_28 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_28
def FUNC_10(VAR_22):
VAR_29 = '0123456789abcdefghijklmnopqrstuvwxyz'
if VAR_22 < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(VAR_22, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if VAR_22 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if VAR_22 < 36:
return VAR_29[VAR_22]
VAR_30 = ''
while VAR_22 != 0:
VAR_22, VAR_41 = divmod(VAR_22, 36)
VAR_30 = VAR_29[VAR_41] + VAR_30
return VAR_30
def FUNC_11(VAR_21):
return base64.urlsafe_b64encode(VAR_21).rstrip(b'\VAR_41=')
def FUNC_12(VAR_21):
VAR_21 = force_bytes(VAR_21)
try:
return base64.urlsafe_b64decode(VAR_21.ljust(len(VAR_21) + len(VAR_21) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_23):
VAR_31 = VAR_0.findall(VAR_23)
if not VAR_31:
return [VAR_23]
VAR_31 = [e.encode('ascii').decode('unicode_escape') for e in VAR_31]
return VAR_31
def FUNC_14(VAR_24):
return '"%s"' % VAR_24.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_24):
return VAR_24.strip('"').replace('\\"', '"').replace('\\\\', '\\') if VAR_24 else VAR_24
def FUNC_16(VAR_25, VAR_26):
if not VAR_26:
return False
VAR_26 = pattern.lower()
return (
VAR_26[0] == '.' and (VAR_25.endswith(VAR_26) or VAR_25 == VAR_26[1:]) or
VAR_26 == VAR_25
)
def FUNC_17(VAR_14, VAR_25=None):
if VAR_14 is not None:
VAR_14 = VAR_14.strip()
if not VAR_14:
return False
return FUNC_18(VAR_14, VAR_25) and FUNC_18(VAR_14.replace('\\', '/'), VAR_25)
def FUNC_18(VAR_14, VAR_25):
if VAR_14.startswith('///'):
return False
VAR_32 = urlparse(VAR_14)
if not VAR_32.netloc and VAR_32.scheme:
return False
if unicodedata.category(VAR_14[0])[0] == 'C':
return False
return ((not VAR_32.netloc or VAR_32.netloc == VAR_25) and
(not VAR_32.scheme or VAR_32.scheme in ['http', 'https']))
| [
2,
11,
20,
22,
33,
36,
41,
42,
52,
53,
63,
64,
72,
73,
81,
82,
98,
99,
103,
107,
112,
113,
118,
122,
126,
127,
131,
134,
137,
138,
139,
162,
163,
172,
173,
174,
175,
181,
182,
183,
187,
188,
192,
193,
213,
214,
221,
222,
233,
234,
243,
247,
248,
254,
255,
261,
262,
267,
274,
280,
281,
286,
293,
295,
296,
300,
301,
302,
303,
306,
307,
308,
313,
45,
46,
47,
48,
49,
50,
56,
57,
58,
59,
60,
61,
67,
68,
69,
70,
76,
77,
78,
79,
84,
85,
86,
87,
88,
101,
102,
103,
104,
105,
106,
107,
108,
109,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
165,
166,
167,
177,
178,
179,
180,
195,
196,
197,
216,
217,
218,
219,
224,
225,
226,
227,
236,
237,
238,
239,
240,
250,
251,
252,
257,
258,
259,
264,
265,
266,
267,
268,
269,
270,
271,
283,
284,
285,
286,
287,
288
] | [
2,
11,
20,
22,
33,
36,
41,
42,
52,
53,
63,
64,
72,
73,
81,
82,
98,
99,
103,
107,
112,
113,
118,
122,
126,
127,
131,
134,
137,
138,
139,
162,
163,
172,
173,
174,
175,
181,
182,
183,
187,
188,
192,
193,
213,
214,
221,
222,
233,
234,
243,
247,
248,
254,
255,
261,
262,
267,
274,
280,
281,
286,
293,
294,
296,
297,
299,
300,
304,
305,
306,
307,
310,
311,
312,
317,
45,
46,
47,
48,
49,
50,
56,
57,
58,
59,
60,
61,
67,
68,
69,
70,
76,
77,
78,
79,
84,
85,
86,
87,
88,
101,
102,
103,
104,
105,
106,
107,
108,
109,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
165,
166,
167,
177,
178,
179,
180,
195,
196,
197,
216,
217,
218,
219,
224,
225,
226,
227,
236,
237,
238,
239,
240,
250,
251,
252,
257,
258,
259,
264,
265,
266,
267,
268,
269,
270,
271,
283,
284,
285,
286,
287,
288
] |
0CWE-22
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, session, send_file, make_response
from flask_login import login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
# ******************** Initialisation of Flask App for GUI ******************** #
# GUI Flask App and set configuration from ./config.py file
gui = Flask(__name__)
gui.config.from_object(Config)
# Database object which is used to interact with the "gui.sqlite" in gangadir/gui folder
# NOTE: IT HAS NO RELATION WITH THE GANGA PERSISTENT DATABASE
db = SQLAlchemy(gui)
# Login manage for the view routes
login = LoginManager(gui)
login.login_view = "login"
login.login_message = "Please Login to Access this Page."
login.login_message_category = "warning"
# For websocket, for communication between frontend and backend
socketio = SocketIO(gui)
# ******************** The user class for database and authentication ******************** #
# ORM Class to represent Users - used to access the GUI & API resources
class User(UserMixin, db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.String(64), unique=True)
user = db.Column(db.String(32), unique=True)
password_hash = db.Column(db.String(64))
role = db.Column(db.String(32))
pinned_jobs = db.Column(db.Text)
def store_password_hash(self, password: str):
self.password_hash = generate_password_hash(password)
def verify_password(self, password: str) -> bool:
return check_password_hash(self.password_hash, password)
def generate_auth_token(self, expires_in_days: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=expires_in_days)},
gui.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
# User Loader Function for Flask Login
@login.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# ******************** Global Variables ******************** #
# Colors showed for different job statuses in the GUI based on Bootstrap CSS
status_color = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
# Allowed extensions when uploading any files to GUI
ALLOWED_EXTENSIONS = {"txt", "py"}
# Variables to globally store plugins and actions
actions = {}
plugins = {}
# ******************** Run Before First Request ******************** #
# Execute before first request
@gui.before_first_request
def initial_run():
"""
This function runs before first request. It stores actions and plugins information from the ganga. It create default session cookies. If WEB_CLI is also started then it also starts a Ganga session.
"""
global actions, plugins
# Start ganga if WEB_CLI mode is True
if gui.config['WEB_CLI'] is True:
start_ganga(gui.config['INTERNAL_PORT'], args=gui.config["GANGA_ARGS"])
session["WEB_CLI"] = True
elif gui.config['INTERNAL_PORT'] is None:
gui.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
# If user is authenticated, log them out. This happens after a fresh start of the GUI server.
if current_user.is_authenticated:
logout_user()
# Create user session defaults
create_session_defaults()
# Check if internal server is online, exit after 20s of retrying
if not ping_internal():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
# Get job actions and plugins information from ganga
try:
# Get actions and plugins data once
actions = query_internal_api("/internal/jobs/actions", "get")
plugins = query_internal_api("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
# ******************** View Routes ******************** #
# Login View
@gui.route("/login", methods=["GET", "POST"])
def login():
"""
Handles login route of the GUI.
"""
# If already authenticated, logout
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
# Login user
if request.method == "POST":
# Form data
username = request.form.get("username")
password = request.form.get("password")
# Database query
user = User.query.filter_by(user=username).first()
# If valid user, login
if user and user.verify_password(password):
login_user(user, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
# Get users from the database
users = User.query.all()
return render_template("login.html", title="Login", users=users)
# Logout View
@gui.route("/logout", methods=["GET"])
def logout():
"""
Logout user from GUI
"""
# Logout
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
# Dashboard view
@gui.route("/")
@login_required
def dashboard():
"""
Handles the dashboard route of the GUI.
"""
quick_statistics = {}
recent_jobs_info = []
pinned_jobs_info = []
try:
# Query overall statistics
quick_statistics = query_internal_api("/internal/jobs/statistics", "get")
# Query recent 10 jobs
recent_jobs_info = query_internal_api("/internal/jobs/recent", "get")
# Query pinned jobs
u = current_user
pinned_jobs_info = query_internal_api("/internal/jobs", "get", params={
"ids": u.pinned_jobs if u.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
quick_statistics=quick_statistics,
recent_jobs_info=recent_jobs_info,
pinned_jobs_info=pinned_jobs_info,
status_color=status_color)
# Config view
@gui.route("/config", methods=["GET", "POST"])
@login_required
def config_page():
"""
Handles the config route of the GUI.
"""
full_config_info = []
config_info = []
section = None
# When GUI request for specific section
if request.method == "POST":
# Get section name for request form data
section = request.form.get("section")
section = None if section in ["", None] else section
try:
# Query full config
full_config_info = query_internal_api("/internal/config", "get")
# If asked for specific section, add only that for displaying
config_info = full_config_info if section is None else [s for s in full_config_info if s["name"] == section]
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("config.html", title="Config", full_config_info=full_config_info, config_info=config_info)
#Edit gangarc
@gui.route("/config_edit",methods=["GET", "POST"])
@login_required
def edit_config_page():
"""
Edit gangarc file from the GUI
"""
gui_rc = gui.config["GANGA_RC"]
with open(gui_rc, "rt") as f:
ganga_config = f.read()
if request.method == 'POST':
config_ganga = request.form['config-data']
with open(gui_rc, 'w') as f1:
f1.write(str(config_ganga))
flash(".gangarc Edited", "success")
with open(gui_rc, "rt") as f2:
ganga_config = f2.read()
return render_template("config_edit.html", title="Edit gangarc", ganga_config=ganga_config)
@login_required
# Create view
@gui.route("/create", methods=["GET", "POST"])
def create_page():
"""
Handles create route of the GUI.
"""
# Handle file uploads
if request.method == "POST":
# Load from the uploaded file
if "loadfile" in request.files:
loadfile = request.files["loadfile"]
if loadfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if loadfile and allowed_file(loadfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
loadfile.save(save_path)
# Load the file
try:
# Query to load the file
response_info = query_internal_api("/internal/load", "get", params={"path": save_path})
except Exception as err:
# Display error in the GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# Run file using the runfile GPI function
if "runfile" in request.files:
runfile = request.files["runfile"]
if runfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, save the file
if runfile and allowed_file(runfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
runfile.save(save_path)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": save_path})
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
try:
# Query templates info
templates_info = query_internal_api("/internal/templates", "get",
params={"recent": True, "length": "6"})
except Exception as err:
# Display error to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", templates_info=templates_info)
# Runfile view
@gui.route("/create/runfile", methods=["GET", "POST"])
@login_required
def runfile_page():
"""
Quick create a runfile to be run using the runfile GPI function.
"""
# Runfile path
runfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
# Save runfile data from frontend
if request.method == "POST":
runfile_data = request.form.get("runfile-data")
with open(runfile_path, "w+") as f:
f.write(runfile_data)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": runfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error back in the GUI
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
# Templates view
@gui.route("/templates", methods=["GET", "POST"])
@login_required
def templates_page():
"""
Handles the templates route of the GUI. Displays templates in a tabular form.
"""
# Update filter values
if request.method == "POST":
# Add filter data to user session
session["templates_per_page"] = int(request.form.get("templates-per-page"))
session["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
templates_per_page = session["templates_per_page"]
try:
# Query total number of templates
templates_length = query_internal_api("/internal/templates/length", "get", params=session["templates_filter"])
# Calculate number of max pages
number_of_pages = (int(templates_length) // int(templates_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("templates_page", page=number_of_pages - 1))
# Add templates filters and range options for query params
params = session["templates_filter"].copy()
params.update({
"recent": True,
"length": templates_per_page,
"offset": current_page
})
# Query templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
# Flash error if any
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
templates_info=templates_info)
# Jobs view
@gui.route("/jobs", methods=["GET", "POST"])
@login_required
def jobs_page():
"""
Handles jobs route of the GUI. Displays jobs in a tabular view.
"""
# Update filter values
if request.method == "POST":
# Add form data to user session
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
session["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from user session
jobs_per_page = session["jobs_per_page"]
try:
# Query total number of jobs
jobs_length = query_internal_api("/internal/jobs/length", "get", params=session["jobs_filter"])
# Calculate number of max pages
number_of_pages = (int(jobs_length) // int(jobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("jobs_page", page=number_of_pages - 1))
# Add jobs filters and range options for query params
params = session["jobs_filter"].copy()
params.update({
"recent": True,
"length": jobs_per_page,
"offset": current_page
})
# Query jobs information
jobs_info = query_internal_api("/internal/jobs", "get", params=params)
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
jobs_info=jobs_info,
backends=plugins["backends"],
applications=plugins["applications"],
number_of_pages=number_of_pages,
current_page=current_page,
status_color=status_color)
# Job view
@gui.route('/jobs/<int:job_id>')
@login_required
def job_page(job_id: int):
"""
Handles job route of the GUI. Displays all the information about the job.
:param job_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
# stdout and stderr path
stdout_path = os.path.join(job_info["outputdir"], "stdout")
stderr_path = os.path.join(job_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {job_id}",
job_info=job_info,
status_color=status_color,
attribute_actions=actions.get("attributes"),
method_actions=actions.get("methods"),
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info)
# Export job
@gui.route("/jobs/<int:job_id>/export")
@login_required
def job_export(job_id: int):
"""
Sends the job file which is generated using export function of GPI.
:param job_id: int
"""
# Path to save file using export GPI function
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], f"export.txt")
try:
# Query to export the job at export path
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Send file
return send_file(export_path, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{job_id}.txt")
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Edit job
@gui.route("/jobs/<int:job_id>/edit", methods=["GET", "POST"])
@login_required
def job_edit(job_id: int):
"""
Show the exported job text on the GUI for it to be edited and submit. Will create a new job after submission.
:param job_id: int
"""
# Save paths
loadfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], "export.txt")
# Create a new job with the submitted information
if request.method == "POST":
# Save the edited job info
edited_job_info = request.form.get("edited-job-info")
with open(loadfile_path, "w+") as f:
f.write(edited_job_info)
# Load the file
try:
# Query to load the job
response_info = query_internal_api("/internal/load", "get", params={"path": loadfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(request.url)
try:
# Query to export the job text
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Read exported job file to display
with open(export_path) as f:
exported_data = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("edit_job.html", title=f"Edit Job {job_id}", job_id=job_id, exported_data=exported_data)
# Browse job directory
@gui.route("/job/<int:job_id>/browse", defaults={"path": ""})
@gui.route("/job/<int:job_id>/browse/<path:path>")
@login_required
def job_browse(job_id: int, path):
"""
Browse directory of the job.
:param job_id: int
:param path: str
"""
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Base directory of the job
job_base_dir = os.path.dirname(os.path.dirname(job_info["outputdir"]))
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Join the base and the requested path
abs_path = os.path.join(job_base_dir, path)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(job_base_dir, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", job_id=job_id))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template('job_dir.html', title=f"Job {job_id} Directory",
job_id=job_id,
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Subjobs view
@gui.route("/jobs/<int:job_id>/subjobs", methods=["GET", "POST"])
@login_required
def subjobs_page(job_id: int):
"""
Handles subjobs view of the GUI. Displays subjobs of a job in a tabular form.
:param job_id: int
"""
# Change filter values
if request.method == "POST":
# Add form data to client session
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
session["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
subjobs_per_page = session["subjobs_per_page"]
try:
# Query total number of subjobs
subjobs_length = query_internal_api(f"/internal/jobs/{job_id}/subjobs/length", "get",
params=session["subjobs_filter"])
# Calculate number of max pages
number_of_pages = (int(subjobs_length) // int(subjobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("subjobs_page", page=number_of_pages - 1, job_id=job_id))
# Add subjobs filters and range options for query params
params = session["subjobs_filter"].copy()
params.update({
"recent": True,
"length": subjobs_per_page,
"offset": current_page
})
# Query subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {job_id}",
status_color=status_color,
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
subjobs_info=subjobs_info,
job_id=job_id)
# Subjob view
@gui.route("/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@login_required
def subjob_page(job_id: int, subjob_id: int):
"""
Handles subjob route of the GUI. Displays extensive details of a subjob.
:param job_id: int
:param subjob_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_outputdir = query_internal_api(f"/internal/jobs/{job_id}/outputdir", "get")
# Query subjob information
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
# Extract browse path that can be used by job_browse route
job_dir_basepath = os.path.dirname(os.path.dirname(job_outputdir["outputdir"]))
subjob_dir_basepath = os.path.dirname(os.path.dirname(subjob_info["outputdir"]))
browse_path = subjob_dir_basepath.replace(job_dir_basepath, "")
# stdout and stderr path
stdout_path = os.path.join(subjob_info["outputdir"], "stdout")
stderr_path = os.path.join(subjob_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("subjobs_page", job_id=job_id))
return render_template("subjob.html",
title=f"Subjob {subjob_id} - Job {job_id}",
subjob_info=subjob_info,
status_color=status_color,
attribute_actions=actions["attributes"],
method_actions=actions["methods"],
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info,
job_id=job_id,
browse_path=browse_path)
# Credential view
@gui.route("/credentials")
@login_required
def credentials_page():
"""
Handles credential store view of the GUI. Displays credentials in a tabular form.
"""
try:
# Query credential store information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=credentials_info)
@gui.route("/queue", methods=["GET"])
@login_required
def queue_page():
"""
Displays queues information
"""
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=queue_info)
# Plugins view
@gui.route('/plugins')
@login_required
def plugins_page():
"""
Handles plugins route of the GUI. Displays the list of plugins.
"""
try:
# Store plugins information
plugins_info = plugins
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', plugins_info=plugins_info)
# Plugin view
@gui.route("/plugin/<plugin_name>")
@login_required
def plugin_page(plugin_name: str):
"""
Displays information about the plugin like it's docstring.
:param plugin_name: str
"""
try:
# Query plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{plugin_name}", plugin_info=plugin_info)
# Ganga logs view
@gui.route("/logs")
@login_required
def logs_page():
"""
Diplay ganga log file.
:return:
"""
ganga_log_path = gui.config["GANGA_LOG"]
gui_accesslog_path = gui.config["ACCESS_LOG"]
gui_errorlog_path = gui.config["ERROR_LOG"]
try:
# Get ganga log
with open(ganga_log_path, "rt") as f:
ganga_log_data = f.read()
# Get GUI access log
with open(gui_accesslog_path, "rt") as f:
gui_accesslog_data = f.read()
# Get GUI error log
with open(gui_errorlog_path, "rt") as f:
gui_errorlog_data = f.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", ganga_log_data=ganga_log_data,
gui_accesslog_data=gui_accesslog_data, gui_errorlog_data=gui_errorlog_data)
@gui.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@gui.route("/storage/<path:path>", methods=["GET", "POST"])
@login_required
def storage_page(path):
"""
A convenience feature to store some file remotely in gangadir/storage
"""
# Storage folder path
storage_folder = gui.config["STORAGE_FOLDER"]
# Join the storage path and the requested path
abs_path = os.path.join(storage_folder, path)
# Handle file uploads
if request.method == "POST":
# Uploaded file
if "storagefile" in request.files:
storagefile = request.files["storagefile"]
if storagefile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if storagefile:
# Directory check
if not os.path.isdir(abs_path):
flash("Error while uploading the file", "danger")
return redirect(request.url)
filename = secure_filename(storagefile.filename)
save_path = os.path.join(abs_path, filename)
storagefile.save(save_path)
# Success message
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(storage_folder, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template("storage.html", title="Storage",
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Serve CLI
@gui.route("/cli")
@login_required
def serve_cli():
return render_template("cli.html")
# Establish a websocket connection from the frontend to the server
@socketio.on("connect", namespace="/pty")
def connect():
"""
New client connected, start reading and writing from the pseudo terminal.
"""
if gui.config["CHILD_PID"] and current_user.is_authenticated:
# Start background reading and emitting the output of the pseudo terminal
socketio.start_background_task(target=read_and_forward_pty_output)
return
# Input from the frontend
@socketio.on("pty-input", namespace="/pty")
def pty_input(data):
"""
Write to the child pty. The pty sees this as if you are typing in a real terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
os.write(gui.config["FD"], data["input"].encode())
# Resize the pseudo terminal when the frontend is resized
@socketio.on("resize", namespace="/pty")
def resize(data):
"""
Resize the pseudo terminal according to the dimension at the frontend.
:param data: contains information about rows and cols of the frontend terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
set_windowsize(gui.config["FD"], data["rows"], data["cols"])
# ******************** Token Based Authentication ******************** #
# Generate token for API authentication - token validity 5 days
@gui.route("/token", methods=["POST"])
def generate_token():
"""
Using the 'user' and 'password' data from the form body, validates the user and returns a JSON Web Token (JWT).
"""
# Request form data
request_json = request.json if request.json else {}
request_user = request_json.get("username")
request_password = request_json.get("password")
# Handle no user or no password case
if not request_user or not request_password:
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# Verify user and accordingly return the token
user = User.query.filter_by(user=request_user).first()
if user and user.verify_password(request_password):
token = user.generate_auth_token().decode("UTF-8")
response_data = {"token": token}
return jsonify(response_data)
# If authentication fails, return 401 HTTP code
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# ******************** Token Authentication Decorator ******************** #
# Decorator for token protected routes
def token_required(f):
"""
Decorator which validates the request header token in 'X-Acess-Token' field, and returns the user.
"""
@wraps(f)
def decorated(*args, **kwargs):
token = None
# Extract token from headers
if "X-Access-Token" in request.headers:
token = request.headers["X-Access-Token"]
if not token:
return jsonify({"success": False, "message": "Token is missing"}), 401
# Decode the token and subsequently identify the user
try:
data = jwt.decode(token, gui.config["SECRET_KEY"], algorithms=["HS256"])
current_api_user = User.query.filter_by(public_id=data["public_id"]).first()
if current_api_user is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return f(current_api_user, *args, **kwargs)
return decorated
# ******************** Job API ******************** #
# Single job information API - GET Method
@gui.route("/api/jobs/<int:job_id>", methods=["GET"])
@token_required
def job_endpoint(current_api_user, job_id: int):
"""
Given the job_id, returns the general information related to the job in JSON format.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job information to the GPI
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_info)
# Single job attribute information API - GET Method
@gui.route("/api/jobs/<int:job_id>/<attribute>", methods=["GET"])
@token_required
def job_attribute_endpoint(current_api_user, job_id: int, attribute: str):
"""
Given the job_id and attribute, returns the attribute information in the JSON format.
:param job_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job attribute information from ganga
job_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_attribute_info)
# Single job full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/full-print", methods=["GET"])
@token_required
def job_full_print_endpoint(current_api_user, job_id: int):
"""
Return full print of the job.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Create job using template API - POST Method
@gui.route("/api/jobs/create", methods=["POST"])
@token_required
def job_create_endpoint(current_api_user):
"""
Create a new job using the existing template.
IMPORTANT: template_id NEEDS to be provided in the request body. job_name can optionally be provided in the request body.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
data = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
# Query ganga to create a job using the template id
response_info = query_internal_api("/internal/jobs/create", "post", json=data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Copy job API - PUT Method
@gui.route("/api/jobs/<int:job_id>/copy", methods=["PUT"])
@token_required
def job_copy_endpoint(current_api_user, job_id: int):
"""
Create a copy of the job.
:param current_api_user: Information of the current_api_user based on the request's JWT token
:param job_id: int
"""
try:
# Query ganga to copy the job
response_info = query_internal_api(f"/internal/jobs/{job_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job action API - PUT Method
@gui.route("/api/jobs/<int:job_id>/<action>", methods=["PUT"])
@token_required
def job_action_endpoint(current_api_user, job_id: int, action: str):
"""
Given the job_id and action in the endpoint, perform the action on the job.
The action can be any method or attribute change that can be called on the Job object.
Example:
1)
PUT http://localhost:5000/job/13/resubmit
The above request will resubmit the job with ID 13.
2)
PUT http://localhost:5000/job/13/force_status
{"force_status":"failed"}
The above request will force status of the job with ID 13 to killed. If unsuccessful will return back the error.
3)
PUT http://localhost:5000/job/13/name
{"name"="New Name"}
The above request will change the name of the job with ID 13 to "New Name". Notice how the required values
are passed in the request body with the same name as action.
NOTE: It is NECESSARY to send body in JSON format for the request to be parsed in JSON.
:param job_id: int
:param action: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
request_data = request.json
try:
# Query ganga to perform the action
response_info = query_internal_api(f"/internal/jobs/{job_id}/{action}", "put", json=request_data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job delete API - DELETE Method
@gui.route("/api/jobs/<int:job_id>", methods=["DELETE"])
@token_required
def job_delete_endpoint(current_api_user, job_id: int):
"""
Given the job id, removes the job from the job repository.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to delete the job
response_info = query_internal_api(f"/internal/jobs/{job_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Pin the Job
@gui.route("/api/jobs/<int:job_id>/pin", methods=["PUT"])
@token_required
def job_pin_endpoint(current_api_user, job_id: int):
"""
Pin the given job, which is then shown in the dashboard.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get current user
u = current_user
# Load pinned jobs of the user from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Pin job
if job_id not in pinned_jobs:
pinned_jobs.append(job_id)
# Add new pinned jobs to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={job_id})."})
# Unpin the job
@gui.route("/api/jobs/<int:job_id>/unpin", methods=["PUT"])
@token_required
def job_unpin_endpoint(current_api_user, job_id: int):
"""
Unpin the job, and make the required change to the GUI database.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get the user from the database
u = current_user
# Load user's pinned job from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Unpin the job
if job_id in pinned_jobs:
pinned_jobs.remove(job_id)
# Commit changes to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={job_id})."})
# ******************** Subjobs API ******************** #
# Subjobs API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs", methods=["GET"])
@token_required
def subjobs_endpoint(current_api_user, job_id: int):
"""
Returns a list subjobs of a particular job in a similar way as Jobs API.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide subjob status as a string for filter
* application: provide subjob application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent subjobs to old
* length: number of subjobs to be returned, provide as a int
* offset: how many subjobs to skip before returning the specified length of subjobs. Provide as int.
offset works as: number of subjobs skipped = offset * length
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
# Query ganga for subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjobs_info)
# Single subjob info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@token_required
def subjob_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Returns information of a single subjob related to a particular job
:param job_id: int
:param subjob_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob information to ganga
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_info)
# Single Subjob Attribute Info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/<attribute>", methods=["GET"])
@token_required
def subjob_attribute_endpoint(current_api_user, job_id: int, subjob_id: int, attribute: str):
"""
Given the job id, subjob id and attribute; return the attribute information in the string format via JSON.
:param job_id: int
:param subjob_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query attribute information from ganga
subjob_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_attribute_info)
# Single subjob full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/full-print", methods=["GET"])
@token_required
def subjob_full_print_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Return full print of the subjob.
:param subjob_id: int
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Copy subjob API - PUT Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/copy", methods=["PUT"])
@token_required
def subjob_copy_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Create a copy of the subjob into a new job.
:param job_id:
:param subjob_id:
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to copy subjob
response_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Jobs API ******************** #
# Jobs API - GET Method
@gui.route("/api/jobs", methods=["GET"])
@token_required
def jobs_endpoint(current_api_user):
"""
Returns a list of jobs with general information in JSON format.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide job status as a string for filter
* application: provide job application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent job to old
* length: number of job to be returned, provide as a int
* offset: how many job to skip before returning the specified length of job. Provide as int.
offset works like: number of job skipped = offset * length
* auto-validate-ids: If ids provided in ids parameters does not exist in job repository, then skip those ids.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
# Get jobs information according to select filter and range filter
jobs_info = query_internal_api(f"/internal/jobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobs_info)
# Jobs statistics API - GET Method
@gui.route("/api/jobs/statistics", methods=["GET"])
@token_required
def jobs_statistics_endpoint(current_api_user):
"""
Returns the number of jobs in new, running, completed, killed, failed status.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get statistics information
statistics = query_internal_api("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(statistics)
@gui.route("/api/queue", methods=["GET"])
@token_required
def queue_endpoint(current_api_user):
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(queue_info)
@gui.route("/api/queue/chart", methods=["GET","POST"])
def queue_chart_endpoint():
chart_info = query_internal_api("/internal/queue/data", "get")
response = make_response(json.dumps(chart_info))
response.content_type = 'application/json'
return response
# Job incomplete ids API - GET Method
@gui.route("/api/jobs/incomplete_ids", methods=["GET"])
@token_required
def jobs_incomplete_ids_endpoint(current_api_user):
"""
Returns a list of incomplete job ids in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get incomplete ids list
incomplete_ids_list = query_internal_api("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(incomplete_ids_list)
# ******************** Config API ******************** #
# Config API - GET Method
@gui.route("/api/config", methods=["GET"], defaults={"section": ""})
@gui.route("/api/config/<section>", methods=["GET"])
@token_required
def config_endpoint(current_api_user, section: str):
"""
Returns a list of all the section of the configuration and their options as well as the values in JSON format.
If section is provide, returns information about the section in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get config information
if section != "":
config_info = query_internal_api(f"/internal/config/{section}", "get")
else:
config_info = query_internal_api("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(config_info)
# ******************** Templates API ******************** #
# Templates API - GET Method
@gui.route("/api/templates", methods=["GET"])
@token_required
def templates_endpoint(current_api_user):
"""
Returns a list of objects containing template info in JSON format.
* ids: provide a JSON string of list of IDs
* status: provide template status as a string for filter
* application: provide template application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent template to old
* length: number of template to be returned, provide as a int
* offset: how many template to skip before returning the specified length of template. Provide as int.
offset works like: number of template skipped = offset * length
:param current_api_user: Information of the current_user based on the request's JWT token
"""
params = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
# Query ganga for templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(templates_info)
# Single template full print API - GET Method
@gui.route("/api/templates/<int:template_id>/full-print", methods=["GET"])
@token_required
def template_full_print_endpoint(current_api_user, template_id: int):
"""
Return full print of the template.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query template full print from ganga
full_print_info = query_internal_api(f"/internal/templates/{template_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Template API - DELETE Method
@gui.route("/api/templates/<int:template_id>", methods=["DELETE"])
@token_required
def delete_template_endpoint(current_api_user, template_id: int):
"""
Given the templates id, delete it from the template repository.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to remove the template
response_info = query_internal_api(f"/internal/templates/{template_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Credentials API ******************** #
# Credential store API - GET Method
@gui.route("/api/credentials", methods=["GET"])
@token_required
def credentials_endpoint(current_api_user):
"""
Return a list of credentials and their information in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga for credentials information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(credentials_info)
# Credential Store API - PUT Method - Renew all credentials
@gui.route("/api/credentials/renew", methods=["PUT"])
@token_required
def credentials_renew_endpoint(current_api_user):
"""
Renew all the credentials in the credential store.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to renew credentials
response_info = query_internal_api("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Job Tree API ******************** #
# Job tree API - GET Method
@gui.route("/api/jobtree", methods=["GET"])
@token_required
def jobtree_endpoint(current_api_user):
"""
Return the job tree folder structure as the json format of python dict.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get job tree information
jobtree_info = query_internal_api("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobtree_info)
# ******************** Job Tree API ******************** #
# Plugins API - GET Method
@gui.route("/api/plugins", methods=["GET"])
@token_required
def plugins_endpoint(current_api_user):
"""
Return plugins information, category and names of the plugins in the category.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugins information
plugins_info = query_internal_api("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugins_info)
# Plugin API - GET Method
@gui.route("/api/plugins/<plugin_name>", methods=["GET"])
@token_required
def plugin_endpoint(current_api_user, plugin_name: str):
"""
Return single plugin information like name and docstring.
:param plugin_name: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugin_info)
# ******************** Helper Functions ******************** #
# Validate uploaded filename.
def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
# Make HTTP request to the Internal Flask Server which is running on a GangaThread which has access to ganga namespace.
def query_internal_api(route: str, method: str, **kwargs):
"""
:param route: str
:param method: str
:param kwargs: dict
:return: dict
Make a HTTP request to the Internal API Flask server which runs on a GangaThread to query data from Ganga.
Check response status code and extract the data or raise an exception accordingly.
kwargs can be param, json, etc. Any attribute that is supported by the requests module.
"""
# Internal url for communicating with API server running on a GangaThread
INTERNAL_URL = f"http://localhost:{gui.config['INTERNAL_PORT']}"
# Raise error if HTTP method not supported
if method not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported method: {method}")
# Made the HTTP requests, along with whatever arguments provided
res = getattr(requests, method)(INTERNAL_URL + route, **kwargs)
# Check is request is OK
if res.status_code != 200:
raise Exception(res.json().get("message"))
# Return request data
return res.json()
def create_session_defaults():
"""
Create user session defaults and assign default values to them.
"""
# Set session defaults for templates filter
if "templates_per_page" not in session:
session["templates_per_page"] = 10
if "templates_filter" not in session:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
# Set session defaults for jobs filter
if "jobs_per_page" not in session:
session["jobs_per_page"] = 10
if "jobs_filter" not in session:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Set session defaults for subjobs filter
if "subjobs_per_page" not in session:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in session:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Ping internal API server
def ping_internal():
"""
Ping internal API server if it is running
"""
trials = 0
while True:
try:
ping = query_internal_api("/ping", "get")
if ping is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
trials += 1
if trials > 20:
return False
def start_ganga(internal_port: int, args: str = ""):
"""
Start a ganga session in a pseudo terminal and stores the file descriptor of the terminal as well as the PID of the ganga session.
:param args: str - str of arguments to provide to ganga
:param internal_port: int
"""
# Create child process attached to a pty that we can read from and write to
(child_pid, fd) = pty.fork()
if child_pid == 0:
# This is the child process fork. Anything printed here will show up in the pty, including the output of this subprocess
ganga_env = os.environ.copy()
ganga_env["WEB_CLI"] = "True"
ganga_env["INTERNAL_PORT"] = str(internal_port)
subprocess.run(f"ganga --webgui {args}", shell=True, env=ganga_env)
else:
# This is the parent process fork. Store fd (connected to the child’s controlling terminal) and child pid
gui.config["FD"] = fd
gui.config["CHILD_PID"] = child_pid
set_windowsize(fd, 50, 50)
print("Ganga started, PID: ", child_pid)
# Set the window size of the pseudo terminal according to the size in the frontend
def set_windowsize(fd, row, col, xpix=0, ypix=0):
winsize = struct.pack("HHHH", row, col, xpix, ypix)
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
# Read and forward that data from the pseudo terminal to the frontend
def read_and_forward_pty_output():
max_read_bytes = 1024 * 20
while True:
socketio.sleep(0.01)
if gui.config["FD"]:
timeout_sec = 0
(data_ready, _, _) = select.select([gui.config["FD"]], [], [], timeout_sec)
if data_ready:
output = os.read(gui.config["FD"], max_read_bytes).decode()
socketio.emit("pty-output", {"output": output}, namespace="/pty")
def start_web_cli(host: str, port: int, internal_port: int, log_output=True, ganga_args: str = ""):
"""
Start the web server on eventlet serving the terminal on the specified port. (Production ready server)
:param ganga_args: str - arguments to be passed to ganga
:param host: str
:param port: int
:param internal_port: int
"""
from GangaGUI.start import create_default_user
# Create default user
gui_user, gui_password = create_default_user()
print(f"Starting the GUI server on http://{host}:{port}")
print(f"You login information for the GUI is: Username: {gui_user.user} Password: {gui_password}")
gui.config["INTERNAL_PORT"] = internal_port
gui.config["WEB_CLI"] = True
gui.config["GANGA_ARGS"] = ganga_args
socketio.run(gui, host=host, port=port, log_output=log_output) # TODO
# ******************** Shutdown Function ******************** #
# Route used to shutdown the Internal API server and GUI server
@gui.route("/shutdown", methods=["GET"])
def shutdown():
if gui.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self shutdown server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
response_info = query_internal_api("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
# ******************** EOF ******************** #
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename, safe_join
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, session, send_file, make_response
from flask_login import login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
# ******************** Initialisation of Flask App for GUI ******************** #
# GUI Flask App and set configuration from ./config.py file
gui = Flask(__name__)
gui.config.from_object(Config)
# Database object which is used to interact with the "gui.sqlite" in gangadir/gui folder
# NOTE: IT HAS NO RELATION WITH THE GANGA PERSISTENT DATABASE
db = SQLAlchemy(gui)
# Login manage for the view routes
login = LoginManager(gui)
login.login_view = "login"
login.login_message = "Please Login to Access this Page."
login.login_message_category = "warning"
# For websocket, for communication between frontend and backend
socketio = SocketIO(gui)
# ******************** The user class for database and authentication ******************** #
# ORM Class to represent Users - used to access the GUI & API resources
class User(UserMixin, db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.String(64), unique=True)
user = db.Column(db.String(32), unique=True)
password_hash = db.Column(db.String(64))
role = db.Column(db.String(32))
pinned_jobs = db.Column(db.Text)
def store_password_hash(self, password: str):
self.password_hash = generate_password_hash(password)
def verify_password(self, password: str) -> bool:
return check_password_hash(self.password_hash, password)
def generate_auth_token(self, expires_in_days: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=expires_in_days)},
gui.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
# User Loader Function for Flask Login
@login.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# ******************** Global Variables ******************** #
# Colors showed for different job statuses in the GUI based on Bootstrap CSS
status_color = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
# Allowed extensions when uploading any files to GUI
ALLOWED_EXTENSIONS = {"txt", "py"}
# Variables to globally store plugins and actions
actions = {}
plugins = {}
# ******************** Run Before First Request ******************** #
# Execute before first request
@gui.before_first_request
def initial_run():
"""
This function runs before first request. It stores actions and plugins information from the ganga. It create default session cookies. If WEB_CLI is also started then it also starts a Ganga session.
"""
global actions, plugins
# Start ganga if WEB_CLI mode is True
if gui.config['WEB_CLI'] is True:
start_ganga(gui.config['INTERNAL_PORT'], args=gui.config["GANGA_ARGS"])
session["WEB_CLI"] = True
elif gui.config['INTERNAL_PORT'] is None:
gui.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
# If user is authenticated, log them out. This happens after a fresh start of the GUI server.
if current_user.is_authenticated:
logout_user()
# Create user session defaults
create_session_defaults()
# Check if internal server is online, exit after 20s of retrying
if not ping_internal():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
# Get job actions and plugins information from ganga
try:
# Get actions and plugins data once
actions = query_internal_api("/internal/jobs/actions", "get")
plugins = query_internal_api("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
# ******************** View Routes ******************** #
# Login View
@gui.route("/login", methods=["GET", "POST"])
def login():
"""
Handles login route of the GUI.
"""
# If already authenticated, logout
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
# Login user
if request.method == "POST":
# Form data
username = request.form.get("username")
password = request.form.get("password")
# Database query
user = User.query.filter_by(user=username).first()
# If valid user, login
if user and user.verify_password(password):
login_user(user, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
# Get users from the database
users = User.query.all()
return render_template("login.html", title="Login", users=users)
# Logout View
@gui.route("/logout", methods=["GET"])
def logout():
"""
Logout user from GUI
"""
# Logout
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
# Dashboard view
@gui.route("/")
@login_required
def dashboard():
"""
Handles the dashboard route of the GUI.
"""
quick_statistics = {}
recent_jobs_info = []
pinned_jobs_info = []
try:
# Query overall statistics
quick_statistics = query_internal_api("/internal/jobs/statistics", "get")
# Query recent 10 jobs
recent_jobs_info = query_internal_api("/internal/jobs/recent", "get")
# Query pinned jobs
u = current_user
pinned_jobs_info = query_internal_api("/internal/jobs", "get", params={
"ids": u.pinned_jobs if u.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
quick_statistics=quick_statistics,
recent_jobs_info=recent_jobs_info,
pinned_jobs_info=pinned_jobs_info,
status_color=status_color)
# Config view
@gui.route("/config", methods=["GET", "POST"])
@login_required
def config_page():
"""
Handles the config route of the GUI.
"""
full_config_info = []
config_info = []
section = None
# When GUI request for specific section
if request.method == "POST":
# Get section name for request form data
section = request.form.get("section")
section = None if section in ["", None] else section
try:
# Query full config
full_config_info = query_internal_api("/internal/config", "get")
# If asked for specific section, add only that for displaying
config_info = full_config_info if section is None else [s for s in full_config_info if s["name"] == section]
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("config.html", title="Config", full_config_info=full_config_info, config_info=config_info)
#Edit gangarc
@gui.route("/config_edit",methods=["GET", "POST"])
@login_required
def edit_config_page():
"""
Edit gangarc file from the GUI
"""
gui_rc = gui.config["GANGA_RC"]
with open(gui_rc, "rt") as f:
ganga_config = f.read()
if request.method == 'POST':
config_ganga = request.form['config-data']
with open(gui_rc, 'w') as f1:
f1.write(str(config_ganga))
flash(".gangarc Edited", "success")
with open(gui_rc, "rt") as f2:
ganga_config = f2.read()
return render_template("config_edit.html", title="Edit gangarc", ganga_config=ganga_config)
@login_required
# Create view
@gui.route("/create", methods=["GET", "POST"])
def create_page():
"""
Handles create route of the GUI.
"""
# Handle file uploads
if request.method == "POST":
# Load from the uploaded file
if "loadfile" in request.files:
loadfile = request.files["loadfile"]
if loadfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if loadfile and allowed_file(loadfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
loadfile.save(save_path)
# Load the file
try:
# Query to load the file
response_info = query_internal_api("/internal/load", "get", params={"path": save_path})
except Exception as err:
# Display error in the GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# Run file using the runfile GPI function
if "runfile" in request.files:
runfile = request.files["runfile"]
if runfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, save the file
if runfile and allowed_file(runfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
runfile.save(save_path)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": save_path})
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
try:
# Query templates info
templates_info = query_internal_api("/internal/templates", "get",
params={"recent": True, "length": "6"})
except Exception as err:
# Display error to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", templates_info=templates_info)
# Runfile view
@gui.route("/create/runfile", methods=["GET", "POST"])
@login_required
def runfile_page():
"""
Quick create a runfile to be run using the runfile GPI function.
"""
# Runfile path
runfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
# Save runfile data from frontend
if request.method == "POST":
runfile_data = request.form.get("runfile-data")
with open(runfile_path, "w+") as f:
f.write(runfile_data)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": runfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error back in the GUI
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
# Templates view
@gui.route("/templates", methods=["GET", "POST"])
@login_required
def templates_page():
"""
Handles the templates route of the GUI. Displays templates in a tabular form.
"""
# Update filter values
if request.method == "POST":
# Add filter data to user session
session["templates_per_page"] = int(request.form.get("templates-per-page"))
session["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
templates_per_page = session["templates_per_page"]
try:
# Query total number of templates
templates_length = query_internal_api("/internal/templates/length", "get", params=session["templates_filter"])
# Calculate number of max pages
number_of_pages = (int(templates_length) // int(templates_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("templates_page", page=number_of_pages - 1))
# Add templates filters and range options for query params
params = session["templates_filter"].copy()
params.update({
"recent": True,
"length": templates_per_page,
"offset": current_page
})
# Query templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
# Flash error if any
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
templates_info=templates_info)
# Jobs view
@gui.route("/jobs", methods=["GET", "POST"])
@login_required
def jobs_page():
"""
Handles jobs route of the GUI. Displays jobs in a tabular view.
"""
# Update filter values
if request.method == "POST":
# Add form data to user session
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
session["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from user session
jobs_per_page = session["jobs_per_page"]
try:
# Query total number of jobs
jobs_length = query_internal_api("/internal/jobs/length", "get", params=session["jobs_filter"])
# Calculate number of max pages
number_of_pages = (int(jobs_length) // int(jobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("jobs_page", page=number_of_pages - 1))
# Add jobs filters and range options for query params
params = session["jobs_filter"].copy()
params.update({
"recent": True,
"length": jobs_per_page,
"offset": current_page
})
# Query jobs information
jobs_info = query_internal_api("/internal/jobs", "get", params=params)
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
jobs_info=jobs_info,
backends=plugins["backends"],
applications=plugins["applications"],
number_of_pages=number_of_pages,
current_page=current_page,
status_color=status_color)
# Job view
@gui.route('/jobs/<int:job_id>')
@login_required
def job_page(job_id: int):
"""
Handles job route of the GUI. Displays all the information about the job.
:param job_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
# stdout and stderr path
stdout_path = os.path.join(job_info["outputdir"], "stdout")
stderr_path = os.path.join(job_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {job_id}",
job_info=job_info,
status_color=status_color,
attribute_actions=actions.get("attributes"),
method_actions=actions.get("methods"),
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info)
# Export job
@gui.route("/jobs/<int:job_id>/export")
@login_required
def job_export(job_id: int):
"""
Sends the job file which is generated using export function of GPI.
:param job_id: int
"""
# Path to save file using export GPI function
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], f"export.txt")
try:
# Query to export the job at export path
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Send file
return send_file(export_path, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{job_id}.txt")
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Edit job
@gui.route("/jobs/<int:job_id>/edit", methods=["GET", "POST"])
@login_required
def job_edit(job_id: int):
"""
Show the exported job text on the GUI for it to be edited and submit. Will create a new job after submission.
:param job_id: int
"""
# Save paths
loadfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], "export.txt")
# Create a new job with the submitted information
if request.method == "POST":
# Save the edited job info
edited_job_info = request.form.get("edited-job-info")
with open(loadfile_path, "w+") as f:
f.write(edited_job_info)
# Load the file
try:
# Query to load the job
response_info = query_internal_api("/internal/load", "get", params={"path": loadfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(request.url)
try:
# Query to export the job text
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Read exported job file to display
with open(export_path) as f:
exported_data = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("edit_job.html", title=f"Edit Job {job_id}", job_id=job_id, exported_data=exported_data)
# Browse job directory
@gui.route("/job/<int:job_id>/browse", defaults={"path": ""})
@gui.route("/job/<int:job_id>/browse/<path:path>")
@login_required
def job_browse(job_id: int, path):
"""
Browse directory of the job.
:param job_id: int
:param path: str
"""
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Base directory of the job
job_base_dir = os.path.dirname(os.path.dirname(job_info["outputdir"]))
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Join the base and the requested path
abs_path = safe_join(job_base_dir, path)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(job_base_dir, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", job_id=job_id))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template('job_dir.html', title=f"Job {job_id} Directory",
job_id=job_id,
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Subjobs view
@gui.route("/jobs/<int:job_id>/subjobs", methods=["GET", "POST"])
@login_required
def subjobs_page(job_id: int):
"""
Handles subjobs view of the GUI. Displays subjobs of a job in a tabular form.
:param job_id: int
"""
# Change filter values
if request.method == "POST":
# Add form data to client session
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
session["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
subjobs_per_page = session["subjobs_per_page"]
try:
# Query total number of subjobs
subjobs_length = query_internal_api(f"/internal/jobs/{job_id}/subjobs/length", "get",
params=session["subjobs_filter"])
# Calculate number of max pages
number_of_pages = (int(subjobs_length) // int(subjobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("subjobs_page", page=number_of_pages - 1, job_id=job_id))
# Add subjobs filters and range options for query params
params = session["subjobs_filter"].copy()
params.update({
"recent": True,
"length": subjobs_per_page,
"offset": current_page
})
# Query subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {job_id}",
status_color=status_color,
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
subjobs_info=subjobs_info,
job_id=job_id)
# Subjob view
@gui.route("/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@login_required
def subjob_page(job_id: int, subjob_id: int):
"""
Handles subjob route of the GUI. Displays extensive details of a subjob.
:param job_id: int
:param subjob_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_outputdir = query_internal_api(f"/internal/jobs/{job_id}/outputdir", "get")
# Query subjob information
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
# Extract browse path that can be used by job_browse route
job_dir_basepath = os.path.dirname(os.path.dirname(job_outputdir["outputdir"]))
subjob_dir_basepath = os.path.dirname(os.path.dirname(subjob_info["outputdir"]))
browse_path = subjob_dir_basepath.replace(job_dir_basepath, "")
# stdout and stderr path
stdout_path = os.path.join(subjob_info["outputdir"], "stdout")
stderr_path = os.path.join(subjob_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("subjobs_page", job_id=job_id))
return render_template("subjob.html",
title=f"Subjob {subjob_id} - Job {job_id}",
subjob_info=subjob_info,
status_color=status_color,
attribute_actions=actions["attributes"],
method_actions=actions["methods"],
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info,
job_id=job_id,
browse_path=browse_path)
# Credential view
@gui.route("/credentials")
@login_required
def credentials_page():
"""
Handles credential store view of the GUI. Displays credentials in a tabular form.
"""
try:
# Query credential store information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=credentials_info)
@gui.route("/queue", methods=["GET"])
@login_required
def queue_page():
"""
Displays queues information
"""
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=queue_info)
# Plugins view
@gui.route('/plugins')
@login_required
def plugins_page():
"""
Handles plugins route of the GUI. Displays the list of plugins.
"""
try:
# Store plugins information
plugins_info = plugins
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', plugins_info=plugins_info)
# Plugin view
@gui.route("/plugin/<plugin_name>")
@login_required
def plugin_page(plugin_name: str):
"""
Displays information about the plugin like it's docstring.
:param plugin_name: str
"""
try:
# Query plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{plugin_name}", plugin_info=plugin_info)
# Ganga logs view
@gui.route("/logs")
@login_required
def logs_page():
"""
Diplay ganga log file.
:return:
"""
ganga_log_path = gui.config["GANGA_LOG"]
gui_accesslog_path = gui.config["ACCESS_LOG"]
gui_errorlog_path = gui.config["ERROR_LOG"]
try:
# Get ganga log
with open(ganga_log_path, "rt") as f:
ganga_log_data = f.read()
# Get GUI access log
with open(gui_accesslog_path, "rt") as f:
gui_accesslog_data = f.read()
# Get GUI error log
with open(gui_errorlog_path, "rt") as f:
gui_errorlog_data = f.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", ganga_log_data=ganga_log_data,
gui_accesslog_data=gui_accesslog_data, gui_errorlog_data=gui_errorlog_data)
@gui.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@gui.route("/storage/<path:path>", methods=["GET", "POST"])
@login_required
def storage_page(path):
"""
A convenience feature to store some file remotely in gangadir/storage
"""
# Storage folder path
storage_folder = gui.config["STORAGE_FOLDER"]
# Join the storage path and the requested path
abs_path = os.path.join(storage_folder, path)
# Handle file uploads
if request.method == "POST":
# Uploaded file
if "storagefile" in request.files:
storagefile = request.files["storagefile"]
if storagefile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if storagefile:
# Directory check
if not os.path.isdir(abs_path):
flash("Error while uploading the file", "danger")
return redirect(request.url)
filename = secure_filename(storagefile.filename)
save_path = os.path.join(abs_path, filename)
storagefile.save(save_path)
# Success message
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(storage_folder, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template("storage.html", title="Storage",
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Serve CLI
@gui.route("/cli")
@login_required
def serve_cli():
return render_template("cli.html")
# Establish a websocket connection from the frontend to the server
@socketio.on("connect", namespace="/pty")
def connect():
"""
New client connected, start reading and writing from the pseudo terminal.
"""
if gui.config["CHILD_PID"] and current_user.is_authenticated:
# Start background reading and emitting the output of the pseudo terminal
socketio.start_background_task(target=read_and_forward_pty_output)
return
# Input from the frontend
@socketio.on("pty-input", namespace="/pty")
def pty_input(data):
"""
Write to the child pty. The pty sees this as if you are typing in a real terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
os.write(gui.config["FD"], data["input"].encode())
# Resize the pseudo terminal when the frontend is resized
@socketio.on("resize", namespace="/pty")
def resize(data):
"""
Resize the pseudo terminal according to the dimension at the frontend.
:param data: contains information about rows and cols of the frontend terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
set_windowsize(gui.config["FD"], data["rows"], data["cols"])
# ******************** Token Based Authentication ******************** #
# Generate token for API authentication - token validity 5 days
@gui.route("/token", methods=["POST"])
def generate_token():
"""
Using the 'user' and 'password' data from the form body, validates the user and returns a JSON Web Token (JWT).
"""
# Request form data
request_json = request.json if request.json else {}
request_user = request_json.get("username")
request_password = request_json.get("password")
# Handle no user or no password case
if not request_user or not request_password:
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# Verify user and accordingly return the token
user = User.query.filter_by(user=request_user).first()
if user and user.verify_password(request_password):
token = user.generate_auth_token().decode("UTF-8")
response_data = {"token": token}
return jsonify(response_data)
# If authentication fails, return 401 HTTP code
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# ******************** Token Authentication Decorator ******************** #
# Decorator for token protected routes
def token_required(f):
"""
Decorator which validates the request header token in 'X-Acess-Token' field, and returns the user.
"""
@wraps(f)
def decorated(*args, **kwargs):
token = None
# Extract token from headers
if "X-Access-Token" in request.headers:
token = request.headers["X-Access-Token"]
if not token:
return jsonify({"success": False, "message": "Token is missing"}), 401
# Decode the token and subsequently identify the user
try:
data = jwt.decode(token, gui.config["SECRET_KEY"], algorithms=["HS256"])
current_api_user = User.query.filter_by(public_id=data["public_id"]).first()
if current_api_user is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return f(current_api_user, *args, **kwargs)
return decorated
# ******************** Job API ******************** #
# Single job information API - GET Method
@gui.route("/api/jobs/<int:job_id>", methods=["GET"])
@token_required
def job_endpoint(current_api_user, job_id: int):
"""
Given the job_id, returns the general information related to the job in JSON format.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job information to the GPI
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_info)
# Single job attribute information API - GET Method
@gui.route("/api/jobs/<int:job_id>/<attribute>", methods=["GET"])
@token_required
def job_attribute_endpoint(current_api_user, job_id: int, attribute: str):
"""
Given the job_id and attribute, returns the attribute information in the JSON format.
:param job_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job attribute information from ganga
job_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_attribute_info)
# Single job full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/full-print", methods=["GET"])
@token_required
def job_full_print_endpoint(current_api_user, job_id: int):
"""
Return full print of the job.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Create job using template API - POST Method
@gui.route("/api/jobs/create", methods=["POST"])
@token_required
def job_create_endpoint(current_api_user):
"""
Create a new job using the existing template.
IMPORTANT: template_id NEEDS to be provided in the request body. job_name can optionally be provided in the request body.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
data = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
# Query ganga to create a job using the template id
response_info = query_internal_api("/internal/jobs/create", "post", json=data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Copy job API - PUT Method
@gui.route("/api/jobs/<int:job_id>/copy", methods=["PUT"])
@token_required
def job_copy_endpoint(current_api_user, job_id: int):
"""
Create a copy of the job.
:param current_api_user: Information of the current_api_user based on the request's JWT token
:param job_id: int
"""
try:
# Query ganga to copy the job
response_info = query_internal_api(f"/internal/jobs/{job_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job action API - PUT Method
@gui.route("/api/jobs/<int:job_id>/<action>", methods=["PUT"])
@token_required
def job_action_endpoint(current_api_user, job_id: int, action: str):
"""
Given the job_id and action in the endpoint, perform the action on the job.
The action can be any method or attribute change that can be called on the Job object.
Example:
1)
PUT http://localhost:5000/job/13/resubmit
The above request will resubmit the job with ID 13.
2)
PUT http://localhost:5000/job/13/force_status
{"force_status":"failed"}
The above request will force status of the job with ID 13 to killed. If unsuccessful will return back the error.
3)
PUT http://localhost:5000/job/13/name
{"name"="New Name"}
The above request will change the name of the job with ID 13 to "New Name". Notice how the required values
are passed in the request body with the same name as action.
NOTE: It is NECESSARY to send body in JSON format for the request to be parsed in JSON.
:param job_id: int
:param action: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
request_data = request.json
try:
# Query ganga to perform the action
response_info = query_internal_api(f"/internal/jobs/{job_id}/{action}", "put", json=request_data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job delete API - DELETE Method
@gui.route("/api/jobs/<int:job_id>", methods=["DELETE"])
@token_required
def job_delete_endpoint(current_api_user, job_id: int):
"""
Given the job id, removes the job from the job repository.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to delete the job
response_info = query_internal_api(f"/internal/jobs/{job_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Pin the Job
@gui.route("/api/jobs/<int:job_id>/pin", methods=["PUT"])
@token_required
def job_pin_endpoint(current_api_user, job_id: int):
"""
Pin the given job, which is then shown in the dashboard.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get current user
u = current_user
# Load pinned jobs of the user from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Pin job
if job_id not in pinned_jobs:
pinned_jobs.append(job_id)
# Add new pinned jobs to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={job_id})."})
# Unpin the job
@gui.route("/api/jobs/<int:job_id>/unpin", methods=["PUT"])
@token_required
def job_unpin_endpoint(current_api_user, job_id: int):
"""
Unpin the job, and make the required change to the GUI database.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get the user from the database
u = current_user
# Load user's pinned job from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Unpin the job
if job_id in pinned_jobs:
pinned_jobs.remove(job_id)
# Commit changes to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={job_id})."})
# ******************** Subjobs API ******************** #
# Subjobs API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs", methods=["GET"])
@token_required
def subjobs_endpoint(current_api_user, job_id: int):
"""
Returns a list subjobs of a particular job in a similar way as Jobs API.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide subjob status as a string for filter
* application: provide subjob application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent subjobs to old
* length: number of subjobs to be returned, provide as a int
* offset: how many subjobs to skip before returning the specified length of subjobs. Provide as int.
offset works as: number of subjobs skipped = offset * length
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
# Query ganga for subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjobs_info)
# Single subjob info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@token_required
def subjob_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Returns information of a single subjob related to a particular job
:param job_id: int
:param subjob_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob information to ganga
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_info)
# Single Subjob Attribute Info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/<attribute>", methods=["GET"])
@token_required
def subjob_attribute_endpoint(current_api_user, job_id: int, subjob_id: int, attribute: str):
"""
Given the job id, subjob id and attribute; return the attribute information in the string format via JSON.
:param job_id: int
:param subjob_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query attribute information from ganga
subjob_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_attribute_info)
# Single subjob full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/full-print", methods=["GET"])
@token_required
def subjob_full_print_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Return full print of the subjob.
:param subjob_id: int
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Copy subjob API - PUT Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/copy", methods=["PUT"])
@token_required
def subjob_copy_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Create a copy of the subjob into a new job.
:param job_id:
:param subjob_id:
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to copy subjob
response_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Jobs API ******************** #
# Jobs API - GET Method
@gui.route("/api/jobs", methods=["GET"])
@token_required
def jobs_endpoint(current_api_user):
"""
Returns a list of jobs with general information in JSON format.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide job status as a string for filter
* application: provide job application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent job to old
* length: number of job to be returned, provide as a int
* offset: how many job to skip before returning the specified length of job. Provide as int.
offset works like: number of job skipped = offset * length
* auto-validate-ids: If ids provided in ids parameters does not exist in job repository, then skip those ids.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
# Get jobs information according to select filter and range filter
jobs_info = query_internal_api(f"/internal/jobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobs_info)
# Jobs statistics API - GET Method
@gui.route("/api/jobs/statistics", methods=["GET"])
@token_required
def jobs_statistics_endpoint(current_api_user):
"""
Returns the number of jobs in new, running, completed, killed, failed status.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get statistics information
statistics = query_internal_api("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(statistics)
@gui.route("/api/queue", methods=["GET"])
@token_required
def queue_endpoint(current_api_user):
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(queue_info)
@gui.route("/api/queue/chart", methods=["GET","POST"])
def queue_chart_endpoint():
chart_info = query_internal_api("/internal/queue/data", "get")
response = make_response(json.dumps(chart_info))
response.content_type = 'application/json'
return response
# Job incomplete ids API - GET Method
@gui.route("/api/jobs/incomplete_ids", methods=["GET"])
@token_required
def jobs_incomplete_ids_endpoint(current_api_user):
"""
Returns a list of incomplete job ids in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get incomplete ids list
incomplete_ids_list = query_internal_api("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(incomplete_ids_list)
# ******************** Config API ******************** #
# Config API - GET Method
@gui.route("/api/config", methods=["GET"], defaults={"section": ""})
@gui.route("/api/config/<section>", methods=["GET"])
@token_required
def config_endpoint(current_api_user, section: str):
"""
Returns a list of all the section of the configuration and their options as well as the values in JSON format.
If section is provide, returns information about the section in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get config information
if section != "":
config_info = query_internal_api(f"/internal/config/{section}", "get")
else:
config_info = query_internal_api("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(config_info)
# ******************** Templates API ******************** #
# Templates API - GET Method
@gui.route("/api/templates", methods=["GET"])
@token_required
def templates_endpoint(current_api_user):
"""
Returns a list of objects containing template info in JSON format.
* ids: provide a JSON string of list of IDs
* status: provide template status as a string for filter
* application: provide template application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent template to old
* length: number of template to be returned, provide as a int
* offset: how many template to skip before returning the specified length of template. Provide as int.
offset works like: number of template skipped = offset * length
:param current_api_user: Information of the current_user based on the request's JWT token
"""
params = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
# Query ganga for templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(templates_info)
# Single template full print API - GET Method
@gui.route("/api/templates/<int:template_id>/full-print", methods=["GET"])
@token_required
def template_full_print_endpoint(current_api_user, template_id: int):
"""
Return full print of the template.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query template full print from ganga
full_print_info = query_internal_api(f"/internal/templates/{template_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Template API - DELETE Method
@gui.route("/api/templates/<int:template_id>", methods=["DELETE"])
@token_required
def delete_template_endpoint(current_api_user, template_id: int):
"""
Given the templates id, delete it from the template repository.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to remove the template
response_info = query_internal_api(f"/internal/templates/{template_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Credentials API ******************** #
# Credential store API - GET Method
@gui.route("/api/credentials", methods=["GET"])
@token_required
def credentials_endpoint(current_api_user):
"""
Return a list of credentials and their information in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga for credentials information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(credentials_info)
# Credential Store API - PUT Method - Renew all credentials
@gui.route("/api/credentials/renew", methods=["PUT"])
@token_required
def credentials_renew_endpoint(current_api_user):
"""
Renew all the credentials in the credential store.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to renew credentials
response_info = query_internal_api("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Job Tree API ******************** #
# Job tree API - GET Method
@gui.route("/api/jobtree", methods=["GET"])
@token_required
def jobtree_endpoint(current_api_user):
"""
Return the job tree folder structure as the json format of python dict.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get job tree information
jobtree_info = query_internal_api("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobtree_info)
# ******************** Job Tree API ******************** #
# Plugins API - GET Method
@gui.route("/api/plugins", methods=["GET"])
@token_required
def plugins_endpoint(current_api_user):
"""
Return plugins information, category and names of the plugins in the category.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugins information
plugins_info = query_internal_api("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugins_info)
# Plugin API - GET Method
@gui.route("/api/plugins/<plugin_name>", methods=["GET"])
@token_required
def plugin_endpoint(current_api_user, plugin_name: str):
"""
Return single plugin information like name and docstring.
:param plugin_name: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugin_info)
# ******************** Helper Functions ******************** #
# Validate uploaded filename.
def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
# Make HTTP request to the Internal Flask Server which is running on a GangaThread which has access to ganga namespace.
def query_internal_api(route: str, method: str, **kwargs):
"""
:param route: str
:param method: str
:param kwargs: dict
:return: dict
Make a HTTP request to the Internal API Flask server which runs on a GangaThread to query data from Ganga.
Check response status code and extract the data or raise an exception accordingly.
kwargs can be param, json, etc. Any attribute that is supported by the requests module.
"""
# Internal url for communicating with API server running on a GangaThread
INTERNAL_URL = f"http://localhost:{gui.config['INTERNAL_PORT']}"
# Raise error if HTTP method not supported
if method not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported method: {method}")
# Made the HTTP requests, along with whatever arguments provided
res = getattr(requests, method)(INTERNAL_URL + route, **kwargs)
# Check is request is OK
if res.status_code != 200:
raise Exception(res.json().get("message"))
# Return request data
return res.json()
def create_session_defaults():
"""
Create user session defaults and assign default values to them.
"""
# Set session defaults for templates filter
if "templates_per_page" not in session:
session["templates_per_page"] = 10
if "templates_filter" not in session:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
# Set session defaults for jobs filter
if "jobs_per_page" not in session:
session["jobs_per_page"] = 10
if "jobs_filter" not in session:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Set session defaults for subjobs filter
if "subjobs_per_page" not in session:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in session:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Ping internal API server
def ping_internal():
"""
Ping internal API server if it is running
"""
trials = 0
while True:
try:
ping = query_internal_api("/ping", "get")
if ping is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
trials += 1
if trials > 20:
return False
def start_ganga(internal_port: int, args: str = ""):
"""
Start a ganga session in a pseudo terminal and stores the file descriptor of the terminal as well as the PID of the ganga session.
:param args: str - str of arguments to provide to ganga
:param internal_port: int
"""
# Create child process attached to a pty that we can read from and write to
(child_pid, fd) = pty.fork()
if child_pid == 0:
# This is the child process fork. Anything printed here will show up in the pty, including the output of this subprocess
ganga_env = os.environ.copy()
ganga_env["WEB_CLI"] = "True"
ganga_env["INTERNAL_PORT"] = str(internal_port)
subprocess.run(f"ganga --webgui {args}", shell=True, env=ganga_env)
else:
# This is the parent process fork. Store fd (connected to the child’s controlling terminal) and child pid
gui.config["FD"] = fd
gui.config["CHILD_PID"] = child_pid
set_windowsize(fd, 50, 50)
print("Ganga started, PID: ", child_pid)
# Set the window size of the pseudo terminal according to the size in the frontend
def set_windowsize(fd, row, col, xpix=0, ypix=0):
winsize = struct.pack("HHHH", row, col, xpix, ypix)
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
# Read and forward that data from the pseudo terminal to the frontend
def read_and_forward_pty_output():
max_read_bytes = 1024 * 20
while True:
socketio.sleep(0.01)
if gui.config["FD"]:
timeout_sec = 0
(data_ready, _, _) = select.select([gui.config["FD"]], [], [], timeout_sec)
if data_ready:
output = os.read(gui.config["FD"], max_read_bytes).decode()
socketio.emit("pty-output", {"output": output}, namespace="/pty")
def start_web_cli(host: str, port: int, internal_port: int, log_output=True, ganga_args: str = ""):
"""
Start the web server on eventlet serving the terminal on the specified port. (Production ready server)
:param ganga_args: str - arguments to be passed to ganga
:param host: str
:param port: int
:param internal_port: int
"""
from GangaGUI.start import create_default_user
# Create default user
gui_user, gui_password = create_default_user()
print(f"Starting the GUI server on http://{host}:{port}")
print(f"You login information for the GUI is: Username: {gui_user.user} Password: {gui_password}")
gui.config["INTERNAL_PORT"] = internal_port
gui.config["WEB_CLI"] = True
gui.config["GANGA_ARGS"] = ganga_args
socketio.run(gui, host=host, port=port, log_output=log_output) # TODO
# ******************** Shutdown Function ******************** #
# Route used to shutdown the Internal API server and GUI server
@gui.route("/shutdown", methods=["GET"])
def shutdown():
if gui.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self shutdown server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
response_info = query_internal_api("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
# ******************** EOF ******************** #
| path_disclosure | {
"code": [
"from werkzeug.utils import secure_filename",
" abs_path = os.path.join(job_base_dir, path)"
],
"line_no": [
15,
659
]
} | {
"code": [
"from werkzeug.utils import secure_filename, safe_join",
" abs_path = safe_join(job_base_dir, path)"
],
"line_no": [
15,
659
]
} | import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import .fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, VAR_84, send_file, make_response
from flask_login import .login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
VAR_0 = Flask(__name__)
VAR_0.config.from_object(Config)
VAR_1 = SQLAlchemy(VAR_0)
VAR_2 = LoginManager(VAR_0)
VAR_2.login_view = "login"
VAR_2.login_message = "Please Login to Access this Page."
VAR_2.login_message_category = "warning"
VAR_3 = SocketIO(VAR_0)
class CLASS_0(UserMixin, VAR_1.Model):
__tablename__ = "users"
VAR_35 = VAR_1.Column(VAR_1.Integer, primary_key=True)
VAR_36 = VAR_1.Column(VAR_1.String(64), unique=True)
VAR_37 = VAR_1.Column(VAR_1.String(32), unique=True)
VAR_38 = VAR_1.Column(VAR_1.String(64))
VAR_39 = VAR_1.Column(VAR_1.String(32))
VAR_40 = VAR_1.Column(VAR_1.Text)
def FUNC_66(self, VAR_41: str):
self.password_hash = generate_password_hash(VAR_41)
def FUNC_67(self, VAR_41: str) -> bool:
return check_password_hash(self.password_hash, VAR_41)
def FUNC_68(self, VAR_42: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=VAR_42)},
VAR_0.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
@VAR_2.user_loader
def FUNC_0(VAR_4):
return CLASS_0.query.get(int(VAR_4))
VAR_5 = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
VAR_6 = {"txt", "py"}
actions = {}
VAR_8 = {}
@VAR_0.before_first_request
def FUNC_1():
global VAR_7, VAR_8
if VAR_0.config['WEB_CLI'] is True:
FUNC_61(VAR_0.config['INTERNAL_PORT'], VAR_25=VAR_0.config["GANGA_ARGS"])
VAR_84["WEB_CLI"] = True
elif VAR_0.config['INTERNAL_PORT'] is None:
VAR_0.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
if current_user.is_authenticated:
logout_user()
FUNC_59()
if not FUNC_60():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
try:
VAR_7 = FUNC_58("/internal/jobs/actions", "get")
VAR_8 = FUNC_58("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
@VAR_0.route("/login", methods=["GET", "POST"])
def VAR_2():
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
if request.method == "POST":
VAR_85 = request.form.get("username")
VAR_41 = request.form.get("password")
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_85).first()
if VAR_37 and VAR_37.verify_password(VAR_41):
login_user(VAR_37, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
VAR_43 = CLASS_0.query.all()
return render_template("login.html", title="Login", VAR_43=users)
@VAR_0.route("/logout", methods=["GET"])
def FUNC_3():
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
@VAR_0.route("/")
@login_required
def FUNC_4():
VAR_44 = {}
VAR_45 = []
VAR_46 = []
try:
VAR_44 = FUNC_58("/internal/jobs/statistics", "get")
VAR_45 = FUNC_58("/internal/jobs/recent", "get")
VAR_72 = current_user
VAR_46 = FUNC_58("/internal/jobs", "get", VAR_73={
"ids": VAR_72.pinned_jobs if VAR_72.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
VAR_44=quick_statistics,
VAR_45=recent_jobs_info,
VAR_46=pinned_jobs_info,
VAR_5=status_color)
@VAR_0.route("/config", methods=["GET", "POST"])
@login_required
def FUNC_5():
VAR_47 = []
VAR_48 = []
VAR_18 = None
if request.method == "POST":
VAR_18 = request.form.get("section")
VAR_18 = None if VAR_18 in ["", None] else VAR_18
try:
VAR_47 = FUNC_58("/internal/config", "get")
VAR_48 = VAR_47 if VAR_18 is None else [s for s in VAR_47 if s["name"] == VAR_18]
except Exception as err:
flash(str(err), "danger")
return render_template("config.html", title="Config", VAR_47=full_config_info, VAR_48=config_info)
@VAR_0.route("/config_edit",methods=["GET", "POST"])
@login_required
def FUNC_6():
VAR_49 = VAR_0.config["GANGA_RC"]
with open(VAR_49, "rt") as VAR_14:
VAR_86 = VAR_14.read()
if request.method == 'POST':
VAR_87 = request.form['config-data']
with open(VAR_49, 'w') as f1:
f1.write(str(VAR_87))
flash(".gangarc Edited", "success")
with open(VAR_49, "rt") as f2:
VAR_86 = f2.read()
return render_template("config_edit.html", title="Edit gangarc", VAR_86=ganga_config)
@login_required
@VAR_0.route("/create", methods=["GET", "POST"])
def FUNC_7():
if request.method == "POST":
if "loadfile" in request.files:
VAR_119 = request.files["loadfile"]
if VAR_119.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_119 and FUNC_57(VAR_119.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_119.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
if "runfile" in request.files:
VAR_120 = request.files["runfile"]
if VAR_120.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_120 and FUNC_57(VAR_120.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
VAR_120.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
try:
VAR_88 = FUNC_58("/internal/templates", "get",
VAR_73={"recent": True, "length": "6"})
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", VAR_88=templates_info)
@VAR_0.route("/create/runfile", methods=["GET", "POST"])
@login_required
def FUNC_8():
VAR_50 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
if request.method == "POST":
VAR_89 = request.form.get("runfile-data")
with open(VAR_50, "w+") as VAR_14:
f.write(VAR_89)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_50})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
@VAR_0.route("/templates", methods=["GET", "POST"])
@login_required
def FUNC_9():
if request.method == "POST":
session["templates_per_page"] = int(request.form.get("templates-per-page"))
VAR_84["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_52 = VAR_84["templates_per_page"]
try:
VAR_90 = FUNC_58("/internal/templates/length", "get", VAR_73=VAR_84["templates_filter"])
VAR_91 = (int(VAR_90) // int(VAR_52)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("templates_page", page=VAR_91 - 1))
params = VAR_84["templates_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_52,
"offset": VAR_51
})
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_88=templates_info)
@VAR_0.route("/jobs", methods=["GET", "POST"])
@login_required
def FUNC_10():
if request.method == "POST":
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
VAR_84["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_53 = VAR_84["jobs_per_page"]
try:
VAR_92 = FUNC_58("/internal/jobs/length", "get", VAR_73=VAR_84["jobs_filter"])
VAR_91 = (int(VAR_92) // int(VAR_53)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("jobs_page", page=VAR_91 - 1))
params = VAR_84["jobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_53,
"offset": VAR_51
})
VAR_93 = FUNC_58("/internal/jobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
VAR_93=jobs_info,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_91=number_of_pages,
VAR_51=current_page,
VAR_5=status_color)
@VAR_0.route('/jobs/<int:VAR_9>')
@login_required
def FUNC_11(VAR_9: int):
VAR_54 = None
VAR_55 = None
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
VAR_96 = os.path.join(VAR_94["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_94["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {VAR_9}",
VAR_94=job_info,
VAR_5=status_color,
attribute_actions=VAR_7.get("attributes"),
method_actions=VAR_7.get("methods"),
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info)
@VAR_0.route("/jobs/<int:VAR_9>/export")
@login_required
def FUNC_12(VAR_9: int):
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], f"export.txt")
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
return send_file(VAR_56, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{VAR_9}.txt")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
@VAR_0.route("/jobs/<int:VAR_9>/edit", methods=["GET", "POST"])
@login_required
def FUNC_13(VAR_9: int):
VAR_57 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "export.txt")
if request.method == "POST":
VAR_99 = request.form.get("edited-job-info")
with open(VAR_57, "w+") as VAR_14:
f.write(VAR_99)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_57})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
with open(VAR_56) as VAR_14:
VAR_121 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("edit_job.html", title=f"Edit Job {VAR_9}", VAR_9=job_id, VAR_121=exported_data)
@VAR_0.route("/job/<int:VAR_9>/browse", defaults={"path": ""})
@VAR_0.route("/job/<int:VAR_9>/browse/<VAR_10:path>")
@login_required
def FUNC_14(VAR_9: int, VAR_10):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_100 = os.path.dirname(os.path.dirname(VAR_94["outputdir"]))
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
VAR_58 = os.path.join(VAR_100, VAR_10)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_100, "")
if not os.path.exists(VAR_58):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", VAR_9=job_id))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template('job_dir.html', title=f"Job {VAR_9} Directory",
VAR_9=job_id,
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs", methods=["GET", "POST"])
@login_required
def FUNC_15(VAR_9: int):
if request.method == "POST":
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
VAR_84["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_62 = VAR_84["subjobs_per_page"]
try:
VAR_101 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/length", "get",
VAR_73=VAR_84["subjobs_filter"])
VAR_91 = (int(VAR_101) // int(VAR_62)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("subjobs_page", page=VAR_91 - 1, VAR_9=job_id))
params = VAR_84["subjobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_62,
"offset": VAR_51
})
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {VAR_9}",
VAR_5=status_color,
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_102=subjobs_info,
VAR_9=job_id)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@login_required
def FUNC_16(VAR_9: int, VAR_11: int):
VAR_54 = None
VAR_55 = None
try:
VAR_103 = FUNC_58(f"/internal/jobs/{VAR_9}/outputdir", "get")
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
VAR_105 = os.path.dirname(os.path.dirname(VAR_103["outputdir"]))
VAR_106 = os.path.dirname(os.path.dirname(VAR_104["outputdir"]))
VAR_107 = VAR_106.replace(VAR_105, "")
VAR_96 = os.path.join(VAR_104["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_104["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("subjobs_page", VAR_9=job_id))
return render_template("subjob.html",
title=f"Subjob {VAR_11} - Job {VAR_9}",
VAR_104=subjob_info,
VAR_5=status_color,
attribute_actions=VAR_7["attributes"],
method_actions=VAR_7["methods"],
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info,
VAR_9=job_id,
VAR_107=browse_path)
@VAR_0.route("/credentials")
@login_required
def FUNC_17():
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=VAR_108)
@VAR_0.route("/queue", methods=["GET"])
@login_required
def FUNC_18():
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=VAR_109)
@VAR_0.route('/plugins')
@login_required
def FUNC_19():
try:
VAR_110 = VAR_8
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', VAR_110=plugins_info)
@VAR_0.route("/plugin/<VAR_12>")
@login_required
def FUNC_20(VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{VAR_12}", VAR_111=plugin_info)
@VAR_0.route("/logs")
@login_required
def FUNC_21():
VAR_63 = VAR_0.config["GANGA_LOG"]
VAR_64 = VAR_0.config["ACCESS_LOG"]
VAR_65 = VAR_0.config["ERROR_LOG"]
try:
with open(VAR_63, "rt") as VAR_14:
VAR_122 = VAR_14.read()
with open(VAR_64, "rt") as VAR_14:
VAR_123 = VAR_14.read()
with open(VAR_65, "rt") as VAR_14:
VAR_124 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", VAR_122=ganga_log_data,
VAR_123=gui_accesslog_data, VAR_124=gui_errorlog_data)
@VAR_0.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@VAR_0.route("/storage/<VAR_10:path>", methods=["GET", "POST"])
@login_required
def FUNC_22(VAR_10):
VAR_66 = VAR_0.config["STORAGE_FOLDER"]
VAR_58 = os.path.join(VAR_66, VAR_10)
if request.method == "POST":
if "storagefile" in request.files:
VAR_125 = request.files["storagefile"]
if VAR_125.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_125:
if not os.path.isdir(VAR_58):
flash("Error while uploading the file", "danger")
return redirect(request.url)
VAR_20 = secure_filename(VAR_125.filename)
VAR_130 = os.path.join(VAR_58, VAR_20)
VAR_125.save(VAR_130)
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_66, "")
if not os.path.exists(VAR_58):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template("storage.html", title="Storage",
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/cli")
@login_required
def FUNC_23():
return render_template("cli.html")
@VAR_3.on("connect", namespace="/pty")
def FUNC_24():
if VAR_0.config["CHILD_PID"] and current_user.is_authenticated:
VAR_3.start_background_task(target=FUNC_63)
return
@VAR_3.on("pty-input", namespace="/pty")
def FUNC_25(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
os.write(VAR_0.config["FD"], VAR_13["input"].encode())
@VAR_3.on("resize", namespace="/pty")
def FUNC_26(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
FUNC_62(VAR_0.config["FD"], VAR_13["rows"], VAR_13["cols"])
@VAR_0.route("/token", methods=["POST"])
def FUNC_27():
VAR_67 = request.json if request.json else {}
VAR_68 = VAR_67.get("username")
VAR_69 = VAR_67.get("password")
if not VAR_68 or not VAR_69:
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_68).first()
if VAR_37 and VAR_37.verify_password(VAR_69):
VAR_112 = VAR_37.generate_auth_token().decode("UTF-8")
VAR_70 = {"token": VAR_112}
return jsonify(VAR_70)
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
def FUNC_28(VAR_14):
@wraps(VAR_14)
def FUNC_69(*VAR_25, **VAR_23):
VAR_112 = None
if "X-Access-Token" in request.headers:
VAR_112 = request.headers["X-Access-Token"]
if not VAR_112:
return jsonify({"success": False, "message": "Token is missing"}), 401
try:
VAR_13 = jwt.decode(VAR_112, VAR_0.config["SECRET_KEY"], algorithms=["HS256"])
VAR_15 = CLASS_0.query.filter_by(VAR_36=VAR_13["public_id"]).first()
if VAR_15 is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return VAR_14(VAR_15, *VAR_25, **VAR_23)
return FUNC_69
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["GET"])
@FUNC_28
def FUNC_29(VAR_15, VAR_9: int):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_94)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_30(VAR_15, VAR_9: int, VAR_16: str):
try:
VAR_113 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_113)
@VAR_0.route("/api/jobs/<int:VAR_9>/full-print", methods=["GET"])
@FUNC_28
def FUNC_31(VAR_15, VAR_9: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/create", methods=["POST"])
@FUNC_28
def FUNC_32(VAR_15):
VAR_13 = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
VAR_98 = FUNC_58("/internal/jobs/create", "post", json=VAR_13)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/copy", methods=["PUT"])
@FUNC_28
def FUNC_33(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_17>", methods=["PUT"])
@FUNC_28
def FUNC_34(VAR_15, VAR_9: int, VAR_17: str):
VAR_71 = request.json
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_17}", "put", json=VAR_71)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["DELETE"])
@FUNC_28
def FUNC_35(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/pin", methods=["PUT"])
@FUNC_28
def FUNC_36(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 not in VAR_40:
pinned_jobs.append(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/unpin", methods=["PUT"])
@FUNC_28
def FUNC_37(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 in VAR_40:
pinned_jobs.remove(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs", methods=["GET"])
@FUNC_28
def FUNC_38(VAR_15, VAR_9: int):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_102)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@FUNC_28
def FUNC_39(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_104)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_40(VAR_15, VAR_9: int, VAR_11: int, VAR_16: str):
try:
VAR_114 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_114)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/full-print", methods=["GET"])
@FUNC_28
def FUNC_41(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/copy", methods=["PUT"])
@FUNC_28
def FUNC_42(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs", methods=["GET"])
@FUNC_28
def FUNC_43(VAR_15):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
VAR_93 = FUNC_58(f"/internal/jobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_93)
@VAR_0.route("/api/jobs/statistics", methods=["GET"])
@FUNC_28
def FUNC_44(VAR_15):
try:
VAR_115 = FUNC_58("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_115)
@VAR_0.route("/api/queue", methods=["GET"])
@FUNC_28
def FUNC_45(VAR_15):
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_109)
@VAR_0.route("/api/queue/chart", methods=["GET","POST"])
def FUNC_46():
VAR_74 = FUNC_58("/internal/queue/data", "get")
VAR_75 = make_response(json.dumps(VAR_74))
VAR_75.content_type = 'application/json'
return VAR_75
@VAR_0.route("/api/jobs/incomplete_ids", methods=["GET"])
@FUNC_28
def FUNC_47(VAR_15):
try:
VAR_116 = FUNC_58("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_116)
@VAR_0.route("/api/config", methods=["GET"], defaults={"section": ""})
@VAR_0.route("/api/config/<VAR_18>", methods=["GET"])
@FUNC_28
def FUNC_48(VAR_15, VAR_18: str):
try:
if VAR_18 != "":
VAR_48 = FUNC_58(f"/internal/config/{VAR_18}", "get")
else:
VAR_48 = FUNC_58("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_48)
@VAR_0.route("/api/templates", methods=["GET"])
@FUNC_28
def FUNC_49(VAR_15):
VAR_73 = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_88)
@VAR_0.route("/api/templates/<int:VAR_19>/full-print", methods=["GET"])
@FUNC_28
def FUNC_50(VAR_15, VAR_19: int):
try:
VAR_95 = FUNC_58(f"/internal/templates/{VAR_19}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/templates/<int:VAR_19>", methods=["DELETE"])
@FUNC_28
def FUNC_51(VAR_15, VAR_19: int):
try:
VAR_98 = FUNC_58(f"/internal/templates/{VAR_19}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/credentials", methods=["GET"])
@FUNC_28
def FUNC_52(VAR_15):
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_108)
@VAR_0.route("/api/credentials/renew", methods=["PUT"])
@FUNC_28
def FUNC_53(VAR_15):
try:
VAR_98 = FUNC_58("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobtree", methods=["GET"])
@FUNC_28
def FUNC_54(VAR_15):
try:
VAR_117 = FUNC_58("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_117)
@VAR_0.route("/api/plugins", methods=["GET"])
@FUNC_28
def FUNC_55(VAR_15):
try:
VAR_110 = FUNC_58("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_110)
@VAR_0.route("/api/VAR_8/<VAR_12>", methods=["GET"])
@FUNC_28
def FUNC_56(VAR_15, VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_111)
def FUNC_57(VAR_20):
return "." in VAR_20 and VAR_20.rsplit(".", 1)[1].lower() in VAR_6
def FUNC_58(VAR_21: str, VAR_22: str, **VAR_23):
VAR_76 = f"http://localhost:{VAR_0.config['INTERNAL_PORT']}"
if VAR_22 not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported VAR_22: {method}")
VAR_77 = getattr(requests, VAR_22)(VAR_76 + VAR_21, **VAR_23)
if VAR_77.status_code != 200:
raise Exception(VAR_77.json().get("message"))
return VAR_77.json()
def FUNC_59():
if "templates_per_page" not in VAR_84:
session["templates_per_page"] = 10
if "templates_filter" not in VAR_84:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
if "jobs_per_page" not in VAR_84:
session["jobs_per_page"] = 10
if "jobs_filter" not in VAR_84:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
if "subjobs_per_page" not in VAR_84:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in VAR_84:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
def FUNC_60():
VAR_78 = 0
while True:
try:
VAR_126 = FUNC_58("/ping", "get")
if VAR_126 is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
VAR_78 += 1
if VAR_78 > 20:
return False
def FUNC_61(VAR_24: int, VAR_25: str = ""):
(VAR_79, VAR_26) = pty.fork()
if VAR_79 == 0:
VAR_118 = os.environ.copy()
VAR_118["WEB_CLI"] = "True"
VAR_118["INTERNAL_PORT"] = str(VAR_24)
subprocess.run(f"ganga --webgui {VAR_25}", shell=True, env=VAR_118)
else:
VAR_0.config["FD"] = VAR_26
VAR_0.config["CHILD_PID"] = VAR_79
FUNC_62(VAR_26, 50, 50)
print("Ganga started, PID: ", VAR_79)
def FUNC_62(VAR_26, VAR_27, VAR_28, VAR_29=0, VAR_30=0):
VAR_80 = struct.pack("HHHH", VAR_27, VAR_28, VAR_29, VAR_30)
fcntl.ioctl(VAR_26, termios.TIOCSWINSZ, VAR_80)
def FUNC_63():
VAR_81 = 1024 * 20
while True:
VAR_3.sleep(0.01)
if VAR_0.config["FD"]:
VAR_127 = 0
(VAR_128, VAR_129, _) = select.select([VAR_0.config["FD"]], [], [], VAR_127)
if VAR_128:
VAR_131 = os.read(VAR_0.config["FD"], VAR_81).decode()
VAR_3.emit("pty-output", {"output": VAR_131}, namespace="/pty")
def FUNC_64(VAR_31: str, VAR_32: int, VAR_24: int, VAR_33=True, VAR_34: str = ""):
from GangaGUI.start import create_default_user
VAR_82, VAR_83 = create_default_user()
print(f"Starting the GUI server on http://{VAR_31}:{VAR_32}")
print(f"You VAR_2 information for the GUI is: Username: {VAR_82.user} Password: {VAR_83}")
VAR_0.config["INTERNAL_PORT"] = VAR_24
VAR_0.config["WEB_CLI"] = True
VAR_0.config["GANGA_ARGS"] = VAR_34
VAR_3.run(VAR_0, VAR_31=host, VAR_32=port, VAR_33=log_output) # TODO
@VAR_0.route("/shutdown", methods=["GET"])
def FUNC_65():
if VAR_0.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self FUNC_65 server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
VAR_98 = FUNC_58("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import .fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename, safe_join
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, VAR_84, send_file, make_response
from flask_login import .login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
VAR_0 = Flask(__name__)
VAR_0.config.from_object(Config)
VAR_1 = SQLAlchemy(VAR_0)
VAR_2 = LoginManager(VAR_0)
VAR_2.login_view = "login"
VAR_2.login_message = "Please Login to Access this Page."
VAR_2.login_message_category = "warning"
VAR_3 = SocketIO(VAR_0)
class CLASS_0(UserMixin, VAR_1.Model):
__tablename__ = "users"
VAR_35 = VAR_1.Column(VAR_1.Integer, primary_key=True)
VAR_36 = VAR_1.Column(VAR_1.String(64), unique=True)
VAR_37 = VAR_1.Column(VAR_1.String(32), unique=True)
VAR_38 = VAR_1.Column(VAR_1.String(64))
VAR_39 = VAR_1.Column(VAR_1.String(32))
VAR_40 = VAR_1.Column(VAR_1.Text)
def FUNC_66(self, VAR_41: str):
self.password_hash = generate_password_hash(VAR_41)
def FUNC_67(self, VAR_41: str) -> bool:
return check_password_hash(self.password_hash, VAR_41)
def FUNC_68(self, VAR_42: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=VAR_42)},
VAR_0.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
@VAR_2.user_loader
def FUNC_0(VAR_4):
return CLASS_0.query.get(int(VAR_4))
VAR_5 = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
VAR_6 = {"txt", "py"}
actions = {}
VAR_8 = {}
@VAR_0.before_first_request
def FUNC_1():
global VAR_7, VAR_8
if VAR_0.config['WEB_CLI'] is True:
FUNC_61(VAR_0.config['INTERNAL_PORT'], VAR_25=VAR_0.config["GANGA_ARGS"])
VAR_84["WEB_CLI"] = True
elif VAR_0.config['INTERNAL_PORT'] is None:
VAR_0.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
if current_user.is_authenticated:
logout_user()
FUNC_59()
if not FUNC_60():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
try:
VAR_7 = FUNC_58("/internal/jobs/actions", "get")
VAR_8 = FUNC_58("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
@VAR_0.route("/login", methods=["GET", "POST"])
def VAR_2():
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
if request.method == "POST":
VAR_85 = request.form.get("username")
VAR_41 = request.form.get("password")
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_85).first()
if VAR_37 and VAR_37.verify_password(VAR_41):
login_user(VAR_37, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
VAR_43 = CLASS_0.query.all()
return render_template("login.html", title="Login", VAR_43=users)
@VAR_0.route("/logout", methods=["GET"])
def FUNC_3():
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
@VAR_0.route("/")
@login_required
def FUNC_4():
VAR_44 = {}
VAR_45 = []
VAR_46 = []
try:
VAR_44 = FUNC_58("/internal/jobs/statistics", "get")
VAR_45 = FUNC_58("/internal/jobs/recent", "get")
VAR_72 = current_user
VAR_46 = FUNC_58("/internal/jobs", "get", VAR_73={
"ids": VAR_72.pinned_jobs if VAR_72.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
VAR_44=quick_statistics,
VAR_45=recent_jobs_info,
VAR_46=pinned_jobs_info,
VAR_5=status_color)
@VAR_0.route("/config", methods=["GET", "POST"])
@login_required
def FUNC_5():
VAR_47 = []
VAR_48 = []
VAR_18 = None
if request.method == "POST":
VAR_18 = request.form.get("section")
VAR_18 = None if VAR_18 in ["", None] else VAR_18
try:
VAR_47 = FUNC_58("/internal/config", "get")
VAR_48 = VAR_47 if VAR_18 is None else [s for s in VAR_47 if s["name"] == VAR_18]
except Exception as err:
flash(str(err), "danger")
return render_template("config.html", title="Config", VAR_47=full_config_info, VAR_48=config_info)
@VAR_0.route("/config_edit",methods=["GET", "POST"])
@login_required
def FUNC_6():
VAR_49 = VAR_0.config["GANGA_RC"]
with open(VAR_49, "rt") as VAR_14:
VAR_86 = VAR_14.read()
if request.method == 'POST':
VAR_87 = request.form['config-data']
with open(VAR_49, 'w') as f1:
f1.write(str(VAR_87))
flash(".gangarc Edited", "success")
with open(VAR_49, "rt") as f2:
VAR_86 = f2.read()
return render_template("config_edit.html", title="Edit gangarc", VAR_86=ganga_config)
@login_required
@VAR_0.route("/create", methods=["GET", "POST"])
def FUNC_7():
if request.method == "POST":
if "loadfile" in request.files:
VAR_119 = request.files["loadfile"]
if VAR_119.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_119 and FUNC_57(VAR_119.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_119.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
if "runfile" in request.files:
VAR_120 = request.files["runfile"]
if VAR_120.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_120 and FUNC_57(VAR_120.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
VAR_120.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
try:
VAR_88 = FUNC_58("/internal/templates", "get",
VAR_73={"recent": True, "length": "6"})
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", VAR_88=templates_info)
@VAR_0.route("/create/runfile", methods=["GET", "POST"])
@login_required
def FUNC_8():
VAR_50 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
if request.method == "POST":
VAR_89 = request.form.get("runfile-data")
with open(VAR_50, "w+") as VAR_14:
f.write(VAR_89)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_50})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
@VAR_0.route("/templates", methods=["GET", "POST"])
@login_required
def FUNC_9():
if request.method == "POST":
session["templates_per_page"] = int(request.form.get("templates-per-page"))
VAR_84["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_52 = VAR_84["templates_per_page"]
try:
VAR_90 = FUNC_58("/internal/templates/length", "get", VAR_73=VAR_84["templates_filter"])
VAR_91 = (int(VAR_90) // int(VAR_52)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("templates_page", page=VAR_91 - 1))
params = VAR_84["templates_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_52,
"offset": VAR_51
})
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_88=templates_info)
@VAR_0.route("/jobs", methods=["GET", "POST"])
@login_required
def FUNC_10():
if request.method == "POST":
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
VAR_84["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_53 = VAR_84["jobs_per_page"]
try:
VAR_92 = FUNC_58("/internal/jobs/length", "get", VAR_73=VAR_84["jobs_filter"])
VAR_91 = (int(VAR_92) // int(VAR_53)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("jobs_page", page=VAR_91 - 1))
params = VAR_84["jobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_53,
"offset": VAR_51
})
VAR_93 = FUNC_58("/internal/jobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
VAR_93=jobs_info,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_91=number_of_pages,
VAR_51=current_page,
VAR_5=status_color)
@VAR_0.route('/jobs/<int:VAR_9>')
@login_required
def FUNC_11(VAR_9: int):
VAR_54 = None
VAR_55 = None
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
VAR_96 = os.path.join(VAR_94["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_94["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {VAR_9}",
VAR_94=job_info,
VAR_5=status_color,
attribute_actions=VAR_7.get("attributes"),
method_actions=VAR_7.get("methods"),
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info)
@VAR_0.route("/jobs/<int:VAR_9>/export")
@login_required
def FUNC_12(VAR_9: int):
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], f"export.txt")
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
return send_file(VAR_56, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{VAR_9}.txt")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
@VAR_0.route("/jobs/<int:VAR_9>/edit", methods=["GET", "POST"])
@login_required
def FUNC_13(VAR_9: int):
VAR_57 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "export.txt")
if request.method == "POST":
VAR_99 = request.form.get("edited-job-info")
with open(VAR_57, "w+") as VAR_14:
f.write(VAR_99)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_57})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
with open(VAR_56) as VAR_14:
VAR_121 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("edit_job.html", title=f"Edit Job {VAR_9}", VAR_9=job_id, VAR_121=exported_data)
@VAR_0.route("/job/<int:VAR_9>/browse", defaults={"path": ""})
@VAR_0.route("/job/<int:VAR_9>/browse/<VAR_10:path>")
@login_required
def FUNC_14(VAR_9: int, VAR_10):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_100 = os.path.dirname(os.path.dirname(VAR_94["outputdir"]))
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
VAR_58 = safe_join(VAR_100, VAR_10)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_100, "")
if not os.path.exists(VAR_58):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", VAR_9=job_id))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template('job_dir.html', title=f"Job {VAR_9} Directory",
VAR_9=job_id,
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs", methods=["GET", "POST"])
@login_required
def FUNC_15(VAR_9: int):
if request.method == "POST":
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
VAR_84["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_62 = VAR_84["subjobs_per_page"]
try:
VAR_101 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/length", "get",
VAR_73=VAR_84["subjobs_filter"])
VAR_91 = (int(VAR_101) // int(VAR_62)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("subjobs_page", page=VAR_91 - 1, VAR_9=job_id))
params = VAR_84["subjobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_62,
"offset": VAR_51
})
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {VAR_9}",
VAR_5=status_color,
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_102=subjobs_info,
VAR_9=job_id)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@login_required
def FUNC_16(VAR_9: int, VAR_11: int):
VAR_54 = None
VAR_55 = None
try:
VAR_103 = FUNC_58(f"/internal/jobs/{VAR_9}/outputdir", "get")
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
VAR_105 = os.path.dirname(os.path.dirname(VAR_103["outputdir"]))
VAR_106 = os.path.dirname(os.path.dirname(VAR_104["outputdir"]))
VAR_107 = VAR_106.replace(VAR_105, "")
VAR_96 = os.path.join(VAR_104["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_104["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("subjobs_page", VAR_9=job_id))
return render_template("subjob.html",
title=f"Subjob {VAR_11} - Job {VAR_9}",
VAR_104=subjob_info,
VAR_5=status_color,
attribute_actions=VAR_7["attributes"],
method_actions=VAR_7["methods"],
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info,
VAR_9=job_id,
VAR_107=browse_path)
@VAR_0.route("/credentials")
@login_required
def FUNC_17():
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=VAR_108)
@VAR_0.route("/queue", methods=["GET"])
@login_required
def FUNC_18():
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=VAR_109)
@VAR_0.route('/plugins')
@login_required
def FUNC_19():
try:
VAR_110 = VAR_8
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', VAR_110=plugins_info)
@VAR_0.route("/plugin/<VAR_12>")
@login_required
def FUNC_20(VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{VAR_12}", VAR_111=plugin_info)
@VAR_0.route("/logs")
@login_required
def FUNC_21():
VAR_63 = VAR_0.config["GANGA_LOG"]
VAR_64 = VAR_0.config["ACCESS_LOG"]
VAR_65 = VAR_0.config["ERROR_LOG"]
try:
with open(VAR_63, "rt") as VAR_14:
VAR_122 = VAR_14.read()
with open(VAR_64, "rt") as VAR_14:
VAR_123 = VAR_14.read()
with open(VAR_65, "rt") as VAR_14:
VAR_124 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", VAR_122=ganga_log_data,
VAR_123=gui_accesslog_data, VAR_124=gui_errorlog_data)
@VAR_0.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@VAR_0.route("/storage/<VAR_10:path>", methods=["GET", "POST"])
@login_required
def FUNC_22(VAR_10):
VAR_66 = VAR_0.config["STORAGE_FOLDER"]
VAR_58 = os.path.join(VAR_66, VAR_10)
if request.method == "POST":
if "storagefile" in request.files:
VAR_125 = request.files["storagefile"]
if VAR_125.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_125:
if not os.path.isdir(VAR_58):
flash("Error while uploading the file", "danger")
return redirect(request.url)
VAR_20 = secure_filename(VAR_125.filename)
VAR_130 = os.path.join(VAR_58, VAR_20)
VAR_125.save(VAR_130)
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_66, "")
if not os.path.exists(VAR_58):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template("storage.html", title="Storage",
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/cli")
@login_required
def FUNC_23():
return render_template("cli.html")
@VAR_3.on("connect", namespace="/pty")
def FUNC_24():
if VAR_0.config["CHILD_PID"] and current_user.is_authenticated:
VAR_3.start_background_task(target=FUNC_63)
return
@VAR_3.on("pty-input", namespace="/pty")
def FUNC_25(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
os.write(VAR_0.config["FD"], VAR_13["input"].encode())
@VAR_3.on("resize", namespace="/pty")
def FUNC_26(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
FUNC_62(VAR_0.config["FD"], VAR_13["rows"], VAR_13["cols"])
@VAR_0.route("/token", methods=["POST"])
def FUNC_27():
VAR_67 = request.json if request.json else {}
VAR_68 = VAR_67.get("username")
VAR_69 = VAR_67.get("password")
if not VAR_68 or not VAR_69:
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_68).first()
if VAR_37 and VAR_37.verify_password(VAR_69):
VAR_112 = VAR_37.generate_auth_token().decode("UTF-8")
VAR_70 = {"token": VAR_112}
return jsonify(VAR_70)
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
def FUNC_28(VAR_14):
@wraps(VAR_14)
def FUNC_69(*VAR_25, **VAR_23):
VAR_112 = None
if "X-Access-Token" in request.headers:
VAR_112 = request.headers["X-Access-Token"]
if not VAR_112:
return jsonify({"success": False, "message": "Token is missing"}), 401
try:
VAR_13 = jwt.decode(VAR_112, VAR_0.config["SECRET_KEY"], algorithms=["HS256"])
VAR_15 = CLASS_0.query.filter_by(VAR_36=VAR_13["public_id"]).first()
if VAR_15 is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return VAR_14(VAR_15, *VAR_25, **VAR_23)
return FUNC_69
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["GET"])
@FUNC_28
def FUNC_29(VAR_15, VAR_9: int):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_94)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_30(VAR_15, VAR_9: int, VAR_16: str):
try:
VAR_113 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_113)
@VAR_0.route("/api/jobs/<int:VAR_9>/full-print", methods=["GET"])
@FUNC_28
def FUNC_31(VAR_15, VAR_9: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/create", methods=["POST"])
@FUNC_28
def FUNC_32(VAR_15):
VAR_13 = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
VAR_98 = FUNC_58("/internal/jobs/create", "post", json=VAR_13)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/copy", methods=["PUT"])
@FUNC_28
def FUNC_33(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_17>", methods=["PUT"])
@FUNC_28
def FUNC_34(VAR_15, VAR_9: int, VAR_17: str):
VAR_71 = request.json
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_17}", "put", json=VAR_71)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["DELETE"])
@FUNC_28
def FUNC_35(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/pin", methods=["PUT"])
@FUNC_28
def FUNC_36(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 not in VAR_40:
pinned_jobs.append(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/unpin", methods=["PUT"])
@FUNC_28
def FUNC_37(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 in VAR_40:
pinned_jobs.remove(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs", methods=["GET"])
@FUNC_28
def FUNC_38(VAR_15, VAR_9: int):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_102)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@FUNC_28
def FUNC_39(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_104)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_40(VAR_15, VAR_9: int, VAR_11: int, VAR_16: str):
try:
VAR_114 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_114)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/full-print", methods=["GET"])
@FUNC_28
def FUNC_41(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/copy", methods=["PUT"])
@FUNC_28
def FUNC_42(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs", methods=["GET"])
@FUNC_28
def FUNC_43(VAR_15):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
VAR_93 = FUNC_58(f"/internal/jobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_93)
@VAR_0.route("/api/jobs/statistics", methods=["GET"])
@FUNC_28
def FUNC_44(VAR_15):
try:
VAR_115 = FUNC_58("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_115)
@VAR_0.route("/api/queue", methods=["GET"])
@FUNC_28
def FUNC_45(VAR_15):
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_109)
@VAR_0.route("/api/queue/chart", methods=["GET","POST"])
def FUNC_46():
VAR_74 = FUNC_58("/internal/queue/data", "get")
VAR_75 = make_response(json.dumps(VAR_74))
VAR_75.content_type = 'application/json'
return VAR_75
@VAR_0.route("/api/jobs/incomplete_ids", methods=["GET"])
@FUNC_28
def FUNC_47(VAR_15):
try:
VAR_116 = FUNC_58("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_116)
@VAR_0.route("/api/config", methods=["GET"], defaults={"section": ""})
@VAR_0.route("/api/config/<VAR_18>", methods=["GET"])
@FUNC_28
def FUNC_48(VAR_15, VAR_18: str):
try:
if VAR_18 != "":
VAR_48 = FUNC_58(f"/internal/config/{VAR_18}", "get")
else:
VAR_48 = FUNC_58("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_48)
@VAR_0.route("/api/templates", methods=["GET"])
@FUNC_28
def FUNC_49(VAR_15):
VAR_73 = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_88)
@VAR_0.route("/api/templates/<int:VAR_19>/full-print", methods=["GET"])
@FUNC_28
def FUNC_50(VAR_15, VAR_19: int):
try:
VAR_95 = FUNC_58(f"/internal/templates/{VAR_19}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/templates/<int:VAR_19>", methods=["DELETE"])
@FUNC_28
def FUNC_51(VAR_15, VAR_19: int):
try:
VAR_98 = FUNC_58(f"/internal/templates/{VAR_19}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/credentials", methods=["GET"])
@FUNC_28
def FUNC_52(VAR_15):
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_108)
@VAR_0.route("/api/credentials/renew", methods=["PUT"])
@FUNC_28
def FUNC_53(VAR_15):
try:
VAR_98 = FUNC_58("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobtree", methods=["GET"])
@FUNC_28
def FUNC_54(VAR_15):
try:
VAR_117 = FUNC_58("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_117)
@VAR_0.route("/api/plugins", methods=["GET"])
@FUNC_28
def FUNC_55(VAR_15):
try:
VAR_110 = FUNC_58("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_110)
@VAR_0.route("/api/VAR_8/<VAR_12>", methods=["GET"])
@FUNC_28
def FUNC_56(VAR_15, VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_111)
def FUNC_57(VAR_20):
return "." in VAR_20 and VAR_20.rsplit(".", 1)[1].lower() in VAR_6
def FUNC_58(VAR_21: str, VAR_22: str, **VAR_23):
VAR_76 = f"http://localhost:{VAR_0.config['INTERNAL_PORT']}"
if VAR_22 not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported VAR_22: {method}")
VAR_77 = getattr(requests, VAR_22)(VAR_76 + VAR_21, **VAR_23)
if VAR_77.status_code != 200:
raise Exception(VAR_77.json().get("message"))
return VAR_77.json()
def FUNC_59():
if "templates_per_page" not in VAR_84:
session["templates_per_page"] = 10
if "templates_filter" not in VAR_84:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
if "jobs_per_page" not in VAR_84:
session["jobs_per_page"] = 10
if "jobs_filter" not in VAR_84:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
if "subjobs_per_page" not in VAR_84:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in VAR_84:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
def FUNC_60():
VAR_78 = 0
while True:
try:
VAR_126 = FUNC_58("/ping", "get")
if VAR_126 is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
VAR_78 += 1
if VAR_78 > 20:
return False
def FUNC_61(VAR_24: int, VAR_25: str = ""):
(VAR_79, VAR_26) = pty.fork()
if VAR_79 == 0:
VAR_118 = os.environ.copy()
VAR_118["WEB_CLI"] = "True"
VAR_118["INTERNAL_PORT"] = str(VAR_24)
subprocess.run(f"ganga --webgui {VAR_25}", shell=True, env=VAR_118)
else:
VAR_0.config["FD"] = VAR_26
VAR_0.config["CHILD_PID"] = VAR_79
FUNC_62(VAR_26, 50, 50)
print("Ganga started, PID: ", VAR_79)
def FUNC_62(VAR_26, VAR_27, VAR_28, VAR_29=0, VAR_30=0):
VAR_80 = struct.pack("HHHH", VAR_27, VAR_28, VAR_29, VAR_30)
fcntl.ioctl(VAR_26, termios.TIOCSWINSZ, VAR_80)
def FUNC_63():
VAR_81 = 1024 * 20
while True:
VAR_3.sleep(0.01)
if VAR_0.config["FD"]:
VAR_127 = 0
(VAR_128, VAR_129, _) = select.select([VAR_0.config["FD"]], [], [], VAR_127)
if VAR_128:
VAR_131 = os.read(VAR_0.config["FD"], VAR_81).decode()
VAR_3.emit("pty-output", {"output": VAR_131}, namespace="/pty")
def FUNC_64(VAR_31: str, VAR_32: int, VAR_24: int, VAR_33=True, VAR_34: str = ""):
from GangaGUI.start import create_default_user
VAR_82, VAR_83 = create_default_user()
print(f"Starting the GUI server on http://{VAR_31}:{VAR_32}")
print(f"You VAR_2 information for the GUI is: Username: {VAR_82.user} Password: {VAR_83}")
VAR_0.config["INTERNAL_PORT"] = VAR_24
VAR_0.config["WEB_CLI"] = True
VAR_0.config["GANGA_ARGS"] = VAR_34
VAR_3.run(VAR_0, VAR_31=host, VAR_32=port, VAR_33=log_output) # TODO
@VAR_0.route("/shutdown", methods=["GET"])
def FUNC_65():
if VAR_0.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self FUNC_65 server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
VAR_98 = FUNC_58("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
| [
23,
24,
25,
26,
27,
30,
31,
32,
34,
35,
40,
41,
43,
44,
45,
46,
47,
56,
59,
62,
67,
70,
71,
72,
76,
77,
78,
79,
80,
81,
92,
93,
95,
96,
99,
100,
101,
102,
103,
104,
110,
112,
113,
119,
120,
123,
124,
126,
127,
131,
132,
134,
140,
141,
142,
143,
144,
145,
151,
152,
155,
156,
158,
159,
162,
163,
165,
166,
171,
173,
174,
176,
178,
179,
180,
186,
187,
190,
192,
193,
194,
201,
205,
207,
209,
210,
212,
213,
218,
220,
222,
229,
230,
231,
238,
242,
243,
245,
248,
250,
252,
253,
255,
257,
259,
261,
262,
280,
282,
288,
289,
291,
292,
298,
299,
303,
304,
306,
308,
310,
313,
314,
317,
318,
324,
325,
329,
330,
332,
334,
336,
339,
340,
343,
344,
347,
349,
352,
354,
357,
359,
360,
361,
368,
369,
371,
372,
377,
378,
380,
383,
385,
388,
390,
391,
392,
399,
400,
402,
406,
407,
409,
410,
412,
414,
415,
417,
418,
420,
421,
424,
425,
432,
433,
435,
437,
440,
448,
449,
450,
457,
458,
460,
465,
466,
468,
469,
471,
473,
474,
476,
477,
479,
480,
483,
484,
491,
492,
494,
496,
499,
508,
509,
510,
518,
521,
523,
524,
526,
527,
529,
530,
533,
534,
538,
539,
543,
545,
548,
558,
559,
560,
568,
569,
571,
573,
574,
576,
577,
579,
581,
583,
585,
586,
587,
595,
596,
599,
600,
602,
603,
607,
608,
610,
613,
615,
618,
620,
622,
623,
626,
628,
631,
633,
634,
635,
645,
647,
649,
650,
652,
654,
657,
658,
660,
661,
663,
664,
668,
669,
672,
674,
675,
677,
678,
684,
690,
691,
692,
700,
701,
703,
708,
709,
711,
712,
714,
716,
719,
720,
722,
723,
726,
727,
734,
735,
737,
741,
751,
752,
753,
762,
765,
767,
768,
770,
771,
773,
774,
776,
777,
781,
782,
785,
786,
790,
791,
795,
797,
800,
812,
813,
814,
821,
823,
825,
827,
830,
832,
844,
846,
847,
854,
856,
858,
860,
863,
865,
866,
867,
875,
877,
879,
881,
884,
886,
887,
888,
896,
900,
902,
905,
906,
909,
910,
913,
917,
920,
921,
929,
930,
932,
933,
935,
936,
938,
939,
945,
946,
948,
952,
956,
957,
960,
961,
964,
965,
967,
968,
972,
973,
976,
978,
979,
981,
982,
988,
993,
994,
995,
1000,
1001,
1002,
1008,
1010,
1013,
1014,
1015,
1021,
1024,
1025,
1026,
1033,
1036,
1037,
1038,
1039,
1040,
1046,
1047,
1051,
1052,
1056,
1057,
1063,
1064,
1067,
1068,
1069,
1070,
1071,
1076,
1079,
1081,
1082,
1085,
1088,
1089,
1101,
1103,
1105,
1106,
1107,
1108,
1109,
1110,
1116,
1120,
1122,
1124,
1127,
1129,
1130,
1131,
1137,
1142,
1144,
1146,
1149,
1151,
1152,
1153,
1159,
1163,
1165,
1167,
1170,
1172,
1173,
1174,
1180,
1182,
1185,
1186,
1191,
1193,
1195,
1198,
1200,
1201,
1202,
1211,
1213,
1215,
1218,
1220,
1221,
1222,
1228,
1230,
1234,
1236,
1240,
1242,
1246,
1249,
1251,
1256,
1257,
1259,
1261,
1263,
1266,
1268,
1269,
1270,
1276,
1280,
1282,
1284,
1287,
1289,
1290,
1291,
1300,
1301,
1303,
1304,
1306,
1307,
1310,
1311,
1315,
1317,
1318,
1319,
1328,
1329,
1331,
1332,
1334,
1335,
1338,
1339,
1343,
1345,
1346,
1347,
1348,
1349,
1355,
1365,
1369,
1379,
1381,
1383,
1386,
1388,
1389,
1390,
1396,
1401,
1403,
1405,
1408,
1410,
1411,
1412,
1418,
1424,
1426,
1428,
1431,
1433,
1434,
1435,
1441,
1446,
1448,
1450,
1453,
1455,
1456,
1457,
1467,
1469,
1471,
1474,
1476,
1477,
1478,
1479,
1480,
1486,
1497,
1500,
1511,
1513,
1515,
1518,
1520,
1521,
1522,
1530,
1532,
1536,
1538,
1542,
1545,
1548,
1550,
1553,
1554,
1559,
1560,
1568,
1570,
1572,
1575,
1577,
1578,
1579,
1580,
1581,
1588,
1590,
1593,
1595,
1600,
1603,
1605,
1606,
1607,
1608,
1609,
1615,
1624,
1627,
1635,
1637,
1639,
1642,
1644,
1645,
1646,
1652,
1656,
1658,
1660,
1663,
1665,
1666,
1667,
1672,
1674,
1678,
1680,
1682,
1685,
1687,
1688,
1689,
1690,
1691,
1697,
1700,
1702,
1704,
1707,
1709,
1710,
1711,
1717,
1720,
1722,
1724,
1727,
1729,
1730,
1731,
1732,
1733,
1739,
1742,
1744,
1746,
1749,
1751,
1752,
1753,
1754,
1755,
1761,
1764,
1766,
1768,
1771,
1773,
1774,
1775,
1781,
1785,
1787,
1789,
1792,
1794,
1795,
1796,
1797,
1798,
1801,
1802,
1803,
1810,
1813,
1816,
1817,
1819,
1820,
1823,
1824,
1826,
1827,
1830,
1831,
1833,
1834,
1839,
1840,
1845,
1846,
1851,
1852,
1857,
1858,
1859,
1864,
1873,
1878,
1879,
1886,
1887,
1889,
1891,
1897,
1902,
1903,
1904,
1908,
1909,
1910,
1921,
1922,
1931,
1933,
1934,
1936,
1939,
1944,
1945,
1946,
1947,
1948,
1951,
1955,
1960,
1962,
1963,
1964,
107,
108,
109,
148,
149,
150,
183,
184,
185,
198,
199,
200,
235,
236,
237,
266,
267,
268,
285,
286,
287,
365,
366,
367,
396,
397,
398,
454,
455,
456,
514,
515,
516,
517,
564,
565,
566,
567,
591,
592,
593,
594,
640,
641,
642,
643,
644,
696,
697,
698,
699,
757,
758,
759,
760,
761,
818,
819,
820,
836,
837,
838,
851,
852,
853,
871,
872,
873,
874,
892,
893,
894,
895,
926,
927,
928,
1005,
1006,
1007,
1018,
1019,
1020,
1029,
1030,
1031,
1032,
1043,
1044,
1045,
1073,
1074,
1075,
1114,
1115,
1116,
1117,
1118,
1119,
1135,
1136,
1137,
1138,
1139,
1140,
1141,
1157,
1158,
1159,
1160,
1161,
1162,
1178,
1179,
1180,
1181,
1182,
1183,
1184,
1206,
1207,
1208,
1209,
1210,
1226,
1227,
1228,
1229,
1230,
1231,
1232,
1233,
1234,
1235,
1236,
1237,
1238,
1239,
1240,
1241,
1242,
1243,
1244,
1245,
1246,
1247,
1248,
1249,
1250,
1251,
1252,
1253,
1254,
1255,
1274,
1275,
1276,
1277,
1278,
1279,
1295,
1296,
1297,
1298,
1299,
1323,
1324,
1325,
1326,
1327,
1353,
1354,
1355,
1356,
1357,
1358,
1359,
1360,
1361,
1362,
1363,
1364,
1365,
1366,
1367,
1368,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1416,
1417,
1418,
1419,
1420,
1421,
1422,
1423,
1439,
1440,
1441,
1442,
1443,
1444,
1445,
1461,
1462,
1463,
1464,
1465,
1466,
1484,
1485,
1486,
1487,
1488,
1489,
1490,
1491,
1492,
1493,
1494,
1495,
1496,
1497,
1498,
1499,
1526,
1527,
1528,
1529,
1564,
1565,
1566,
1567,
1586,
1587,
1588,
1589,
1590,
1591,
1592,
1613,
1614,
1615,
1616,
1617,
1618,
1619,
1620,
1621,
1622,
1623,
1624,
1625,
1626,
1650,
1651,
1652,
1653,
1654,
1655,
1671,
1672,
1673,
1674,
1675,
1676,
1677,
1695,
1696,
1697,
1698,
1699,
1715,
1716,
1717,
1718,
1719,
1737,
1738,
1739,
1740,
1741,
1759,
1760,
1761,
1762,
1763,
1779,
1780,
1781,
1782,
1783,
1784,
1805,
1806,
1807,
1808,
1809,
1810,
1811,
1812,
1813,
1814,
1815,
1836,
1837,
1838,
1861,
1862,
1863,
1881,
1882,
1883,
1884,
1885,
1924,
1925,
1926,
1927,
1928,
1929,
1930
] | [
23,
24,
25,
26,
27,
30,
31,
32,
34,
35,
40,
41,
43,
44,
45,
46,
47,
56,
59,
62,
67,
70,
71,
72,
76,
77,
78,
79,
80,
81,
92,
93,
95,
96,
99,
100,
101,
102,
103,
104,
110,
112,
113,
119,
120,
123,
124,
126,
127,
131,
132,
134,
140,
141,
142,
143,
144,
145,
151,
152,
155,
156,
158,
159,
162,
163,
165,
166,
171,
173,
174,
176,
178,
179,
180,
186,
187,
190,
192,
193,
194,
201,
205,
207,
209,
210,
212,
213,
218,
220,
222,
229,
230,
231,
238,
242,
243,
245,
248,
250,
252,
253,
255,
257,
259,
261,
262,
280,
282,
288,
289,
291,
292,
298,
299,
303,
304,
306,
308,
310,
313,
314,
317,
318,
324,
325,
329,
330,
332,
334,
336,
339,
340,
343,
344,
347,
349,
352,
354,
357,
359,
360,
361,
368,
369,
371,
372,
377,
378,
380,
383,
385,
388,
390,
391,
392,
399,
400,
402,
406,
407,
409,
410,
412,
414,
415,
417,
418,
420,
421,
424,
425,
432,
433,
435,
437,
440,
448,
449,
450,
457,
458,
460,
465,
466,
468,
469,
471,
473,
474,
476,
477,
479,
480,
483,
484,
491,
492,
494,
496,
499,
508,
509,
510,
518,
521,
523,
524,
526,
527,
529,
530,
533,
534,
538,
539,
543,
545,
548,
558,
559,
560,
568,
569,
571,
573,
574,
576,
577,
579,
581,
583,
585,
586,
587,
595,
596,
599,
600,
602,
603,
607,
608,
610,
613,
615,
618,
620,
622,
623,
626,
628,
631,
633,
634,
635,
645,
647,
649,
650,
652,
654,
657,
658,
660,
661,
663,
664,
668,
669,
672,
674,
675,
677,
678,
684,
690,
691,
692,
700,
701,
703,
708,
709,
711,
712,
714,
716,
719,
720,
722,
723,
726,
727,
734,
735,
737,
741,
751,
752,
753,
762,
765,
767,
768,
770,
771,
773,
774,
776,
777,
781,
782,
785,
786,
790,
791,
795,
797,
800,
812,
813,
814,
821,
823,
825,
827,
830,
832,
844,
846,
847,
854,
856,
858,
860,
863,
865,
866,
867,
875,
877,
879,
881,
884,
886,
887,
888,
896,
900,
902,
905,
906,
909,
910,
913,
917,
920,
921,
929,
930,
932,
933,
935,
936,
938,
939,
945,
946,
948,
952,
956,
957,
960,
961,
964,
965,
967,
968,
972,
973,
976,
978,
979,
981,
982,
988,
993,
994,
995,
1000,
1001,
1002,
1008,
1010,
1013,
1014,
1015,
1021,
1024,
1025,
1026,
1033,
1036,
1037,
1038,
1039,
1040,
1046,
1047,
1051,
1052,
1056,
1057,
1063,
1064,
1067,
1068,
1069,
1070,
1071,
1076,
1079,
1081,
1082,
1085,
1088,
1089,
1101,
1103,
1105,
1106,
1107,
1108,
1109,
1110,
1116,
1120,
1122,
1124,
1127,
1129,
1130,
1131,
1137,
1142,
1144,
1146,
1149,
1151,
1152,
1153,
1159,
1163,
1165,
1167,
1170,
1172,
1173,
1174,
1180,
1182,
1185,
1186,
1191,
1193,
1195,
1198,
1200,
1201,
1202,
1211,
1213,
1215,
1218,
1220,
1221,
1222,
1228,
1230,
1234,
1236,
1240,
1242,
1246,
1249,
1251,
1256,
1257,
1259,
1261,
1263,
1266,
1268,
1269,
1270,
1276,
1280,
1282,
1284,
1287,
1289,
1290,
1291,
1300,
1301,
1303,
1304,
1306,
1307,
1310,
1311,
1315,
1317,
1318,
1319,
1328,
1329,
1331,
1332,
1334,
1335,
1338,
1339,
1343,
1345,
1346,
1347,
1348,
1349,
1355,
1365,
1369,
1379,
1381,
1383,
1386,
1388,
1389,
1390,
1396,
1401,
1403,
1405,
1408,
1410,
1411,
1412,
1418,
1424,
1426,
1428,
1431,
1433,
1434,
1435,
1441,
1446,
1448,
1450,
1453,
1455,
1456,
1457,
1467,
1469,
1471,
1474,
1476,
1477,
1478,
1479,
1480,
1486,
1497,
1500,
1511,
1513,
1515,
1518,
1520,
1521,
1522,
1530,
1532,
1536,
1538,
1542,
1545,
1548,
1550,
1553,
1554,
1559,
1560,
1568,
1570,
1572,
1575,
1577,
1578,
1579,
1580,
1581,
1588,
1590,
1593,
1595,
1600,
1603,
1605,
1606,
1607,
1608,
1609,
1615,
1624,
1627,
1635,
1637,
1639,
1642,
1644,
1645,
1646,
1652,
1656,
1658,
1660,
1663,
1665,
1666,
1667,
1672,
1674,
1678,
1680,
1682,
1685,
1687,
1688,
1689,
1690,
1691,
1697,
1700,
1702,
1704,
1707,
1709,
1710,
1711,
1717,
1720,
1722,
1724,
1727,
1729,
1730,
1731,
1732,
1733,
1739,
1742,
1744,
1746,
1749,
1751,
1752,
1753,
1754,
1755,
1761,
1764,
1766,
1768,
1771,
1773,
1774,
1775,
1781,
1785,
1787,
1789,
1792,
1794,
1795,
1796,
1797,
1798,
1801,
1802,
1803,
1810,
1813,
1816,
1817,
1819,
1820,
1823,
1824,
1826,
1827,
1830,
1831,
1833,
1834,
1839,
1840,
1845,
1846,
1851,
1852,
1857,
1858,
1859,
1864,
1873,
1878,
1879,
1886,
1887,
1889,
1891,
1897,
1902,
1903,
1904,
1908,
1909,
1910,
1921,
1922,
1931,
1933,
1934,
1936,
1939,
1944,
1945,
1946,
1947,
1948,
1951,
1955,
1960,
1962,
1963,
1964,
107,
108,
109,
148,
149,
150,
183,
184,
185,
198,
199,
200,
235,
236,
237,
266,
267,
268,
285,
286,
287,
365,
366,
367,
396,
397,
398,
454,
455,
456,
514,
515,
516,
517,
564,
565,
566,
567,
591,
592,
593,
594,
640,
641,
642,
643,
644,
696,
697,
698,
699,
757,
758,
759,
760,
761,
818,
819,
820,
836,
837,
838,
851,
852,
853,
871,
872,
873,
874,
892,
893,
894,
895,
926,
927,
928,
1005,
1006,
1007,
1018,
1019,
1020,
1029,
1030,
1031,
1032,
1043,
1044,
1045,
1073,
1074,
1075,
1114,
1115,
1116,
1117,
1118,
1119,
1135,
1136,
1137,
1138,
1139,
1140,
1141,
1157,
1158,
1159,
1160,
1161,
1162,
1178,
1179,
1180,
1181,
1182,
1183,
1184,
1206,
1207,
1208,
1209,
1210,
1226,
1227,
1228,
1229,
1230,
1231,
1232,
1233,
1234,
1235,
1236,
1237,
1238,
1239,
1240,
1241,
1242,
1243,
1244,
1245,
1246,
1247,
1248,
1249,
1250,
1251,
1252,
1253,
1254,
1255,
1274,
1275,
1276,
1277,
1278,
1279,
1295,
1296,
1297,
1298,
1299,
1323,
1324,
1325,
1326,
1327,
1353,
1354,
1355,
1356,
1357,
1358,
1359,
1360,
1361,
1362,
1363,
1364,
1365,
1366,
1367,
1368,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1416,
1417,
1418,
1419,
1420,
1421,
1422,
1423,
1439,
1440,
1441,
1442,
1443,
1444,
1445,
1461,
1462,
1463,
1464,
1465,
1466,
1484,
1485,
1486,
1487,
1488,
1489,
1490,
1491,
1492,
1493,
1494,
1495,
1496,
1497,
1498,
1499,
1526,
1527,
1528,
1529,
1564,
1565,
1566,
1567,
1586,
1587,
1588,
1589,
1590,
1591,
1592,
1613,
1614,
1615,
1616,
1617,
1618,
1619,
1620,
1621,
1622,
1623,
1624,
1625,
1626,
1650,
1651,
1652,
1653,
1654,
1655,
1671,
1672,
1673,
1674,
1675,
1676,
1677,
1695,
1696,
1697,
1698,
1699,
1715,
1716,
1717,
1718,
1719,
1737,
1738,
1739,
1740,
1741,
1759,
1760,
1761,
1762,
1763,
1779,
1780,
1781,
1782,
1783,
1784,
1805,
1806,
1807,
1808,
1809,
1810,
1811,
1812,
1813,
1814,
1815,
1836,
1837,
1838,
1861,
1862,
1863,
1881,
1882,
1883,
1884,
1885,
1924,
1925,
1926,
1927,
1928,
1929,
1930
] |
1CWE-79
| """
This module defines the things that are used in setup.py for building the notebook
This includes:
* Functions for finding things like packages, package data, etc.
* A function for checking dependencies.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import pipes
import shutil
import sys
from distutils import log
from distutils.cmd import Command
from fnmatch import fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import list2cmdline
else:
def list2cmdline(cmd_list):
return ' '.join(map(pipes.quote, cmd_list))
#-------------------------------------------------------------------------------
# Useful globals and utility functions
#-------------------------------------------------------------------------------
# A few handy globals
isfile = os.path.isfile
pjoin = os.path.join
repo_root = os.path.dirname(os.path.abspath(__file__))
is_repo = os.path.isdir(pjoin(repo_root, '.git'))
def oscmd(s):
print(">", s)
os.system(s)
# Py3 compatibility hacks, without assuming IPython itself is installed with
# the full py3compat machinery.
try:
execfile
except NameError:
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
#---------------------------------------------------------------------------
# Basic project information
#---------------------------------------------------------------------------
name = 'notebook'
# release.py contains version, authors, license, url, keywords, etc.
version_ns = {}
execfile(pjoin(repo_root, name, '_version.py'), version_ns)
version = version_ns['__version__']
# vendored from pep440 package, we allow `.dev` suffix without trailing number.
loose_pep440re = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not loose_pep440re.match(version):
raise ValueError('Invalid version number `%s`, please follow pep440 convention or pip will get confused about which package is more recent.' % version)
#---------------------------------------------------------------------------
# Find packages
#---------------------------------------------------------------------------
def find_packages():
"""
Find all of the packages.
"""
packages = []
for dir,subdirs,files in os.walk(name):
package = dir.replace(os.path.sep, '.')
if '__init__.py' not in files:
# not a package
continue
packages.append(package)
return packages
#---------------------------------------------------------------------------
# Find package data
#---------------------------------------------------------------------------
def find_package_data():
"""
Find package_data.
"""
# This is not enough for these things to appear in a sdist.
# We need to muck with the MANIFEST to get this to work
# exclude components and less from the walk;
# we will build the components separately
excludes = [
pjoin('static', 'components'),
pjoin('static', '*', 'less'),
pjoin('static', '*', 'node_modules')
]
# walk notebook resources:
cwd = os.getcwd()
os.chdir('notebook')
static_data = []
for parent, dirs, files in os.walk('static'):
if any(fnmatch(parent, pat) for pat in excludes):
# prevent descending into subdirs
dirs[:] = []
continue
for f in files:
static_data.append(pjoin(parent, f))
# for verification purposes, explicitly add main.min.js
# so that installation will fail if they are missing
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
static_data.append(pjoin('static', app, 'js', 'main.min.js'))
components = pjoin("static", "components")
# select the components we actually need to install
# (there are lots of resources we bundle for sdist-reasons that we don't actually use)
static_data.extend([
pjoin(components, "backbone", "backbone-min.js"),
pjoin(components, "bootstrap", "dist", "js", "bootstrap.min.js"),
pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
pjoin(components, "create-react-class", "index.js"),
pjoin(components, "font-awesome", "css", "*.css"),
pjoin(components, "es6-promise", "*.js"),
pjoin(components, "font-awesome", "fonts", "*.*"),
pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
pjoin(components, "jed", "jed.js"),
pjoin(components, "jquery", "jquery.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
pjoin(components, "jquery-ui", "jquery-ui.min.js"),
pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
pjoin(components, "marked", "lib", "marked.js"),
pjoin(components, "react", "react.production.min.js"),
pjoin(components, "react", "react-dom.production.min.js"),
pjoin(components, "requirejs", "require.js"),
pjoin(components, "requirejs-plugins", "src", "json.js"),
pjoin(components, "requirejs-text", "text.js"),
pjoin(components, "underscore", "underscore-min.js"),
pjoin(components, "moment", "moment.js"),
pjoin(components, "moment", "min", "*.js"),
pjoin(components, "xterm.js", "index.js"),
pjoin(components, "xterm.js-css", "index.css"),
pjoin(components, "xterm.js-fit", "index.js"),
pjoin(components, "text-encoding", "lib", "encoding.js"),
])
# Ship all of Codemirror's CSS and JS
for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
for f in files:
if f.endswith(('.js', '.css')):
static_data.append(pjoin(parent, f))
# Trim mathjax
mj = lambda *path: pjoin(components, 'MathJax', *path)
static_data.extend([
mj('MathJax.js'),
mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
mj('config', 'Safe.js'),
])
trees = []
mj_out = mj('jax', 'output')
if os.path.exists(mj_out):
for output in os.listdir(mj_out):
path = pjoin(mj_out, output)
static_data.append(pjoin(path, '*.js'))
autoload = pjoin(path, 'autoload')
if os.path.isdir(autoload):
trees.append(autoload)
for tree in trees + [
mj('localization'), # limit to en?
mj('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
mj('extensions'),
mj('jax', 'input', 'TeX'),
mj('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
mj('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
mj('jax', 'element', 'mml'),
]:
for parent, dirs, files in os.walk(tree):
for f in files:
static_data.append(pjoin(parent, f))
os.chdir(os.path.join('tests',))
js_tests = glob('*.js') + glob('*/*.js')
os.chdir(cwd)
package_data = {
'notebook' : ['templates/*'] + static_data,
'notebook.tests' : js_tests,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return package_data
def check_package_data(package_data):
"""verify that package_data globs make sense"""
print("checking package data")
for pkg, data in package_data.items():
pkg_root = pjoin(*pkg.split('.'))
for d in data:
path = pjoin(pkg_root, d)
if '*' in path:
assert len(glob(path)) > 0, "No files match pattern %s" % path
else:
assert os.path.exists(path), "Missing package data: %s" % path
def check_package_data_first(command):
"""decorator for checking package_data before running a given command
Probably only needs to wrap build_py
"""
class DecoratedCommand(command):
def run(self):
check_package_data(self.package_data)
command.run(self)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
#---------------------------------------------------------------------------
# Notebook related
#---------------------------------------------------------------------------
try:
from shutil import which
except ImportError:
## which() function copied from Python 3.4.3; PSF license
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
static = pjoin(repo_root, 'notebook', 'static')
npm_path = os.pathsep.join([
pjoin(repo_root, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def mtime(path):
"""shorthand for mtime"""
return os.stat(path).st_mtime
def run(cmd, *args, **kwargs):
"""Echo a command before running it"""
log.info('> ' + list2cmdline(cmd))
kwargs['shell'] = (sys.platform == 'win32')
return check_call(cmd, *args, **kwargs)
class CompileBackendTranslation(Command):
description = "compile the .po files into .mo files, that contain the translations."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
paths = glob('notebook/i18n/??_??')
for p in paths:
LANG = p[-5:]
for component in ['notebook', 'nbui']:
run(['pybabel', 'compile',
'-D', component,
'-f',
'-l', LANG,
'-i', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.po'),
'-o', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.mo')
])
class Bower(Command):
description = "fetch static client-side components with bower"
user_options = [
('force', 'f', "force fetching of bower dependencies"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
bower_dir = pjoin(static, 'components')
node_modules = pjoin(repo_root, 'node_modules')
def should_run(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
return mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))
def should_run_npm(self):
if not which('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return mtime(self.node_modules) < mtime(pjoin(repo_root, 'package.json'))
def run(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
run(['npm', 'install'], cwd=repo_root)
os.utime(self.node_modules, None)
env = os.environ.copy()
env['PATH'] = npm_path
try:
run(
['bower', 'install', '--allow-root', '--config.interactive=false'],
cwd=repo_root,
env=env
)
except OSError as e:
print("Failed to run bower: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# self.npm_components()
os.utime(self.bower_dir, None)
# update package data in case this created new files
update_package_data(self.distribution)
def patch_out_bootstrap_bw_print():
"""Hack! Manually patch out the bootstrap rule that forces printing in B&W.
We haven't found a way to override this rule with another one.
"""
print_less = pjoin(static, 'components', 'bootstrap', 'less', 'print.less')
with open(print_less) as f:
lines = f.readlines()
for ix, line in enumerate(lines):
if 'Black prints faster' in line:
break
else:
return # Already patched out, nothing to do.
rmed = lines.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", rmed)
print()
with open(print_less, 'w') as f:
f.writelines(lines)
class CompileCSS(Command):
"""Recompile Notebook CSS
Regenerate the compiled CSS from LESS sources.
Requires various dev dependencies, such as require and lessc.
"""
description = "Recompile Notebook CSS"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
sources = []
targets = []
for name in ('ipython', 'style'):
sources.append(pjoin(static, 'style', '%s.less' % name))
targets.append(pjoin(static, 'style', '%s.min.css' % name))
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
patch_out_bootstrap_bw_print()
for src, dst in zip(self.sources, self.targets):
try:
run(['lessc',
'--source-map',
'--include-path=%s' % pipes.quote(static),
src,
dst,
], cwd=repo_root, env=env)
except OSError as e:
print("Failed to build css: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# update package data in case this created new files
update_package_data(self.distribution)
class CompileJS(Command):
"""Rebuild Notebook Javascript main.min.js files and translation files.
Calls require via build-main.js
"""
description = "Rebuild Notebook Javascript main.min.js files"
user_options = [
('force', 'f', "force rebuilding js targets"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
apps = ['notebook', 'tree', 'edit', 'terminal', 'auth']
targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ]
def sources(self, name):
"""Generator yielding .js sources that an application depends on"""
yield pjoin(repo_root, 'tools', 'build-main.js')
yield pjoin(static, name, 'js', 'main.js')
for sec in [name, 'base', 'auth']:
for f in glob(pjoin(static, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield pjoin(static, 'services', 'config.js')
if name == 'notebook':
for f in glob(pjoin(static, 'services', '*', '*.js')):
yield f
for parent, dirs, files in os.walk(pjoin(static, 'components')):
if os.path.basename(parent) == 'MathJax':
# don't look in MathJax, since it takes forever to walk it
dirs[:] = []
continue
for f in files:
yield pjoin(parent, f)
def should_run(self, name, target):
if self.force or not os.path.exists(target):
return True
target_mtime = mtime(target)
for source in self.sources(name):
if mtime(source) > target_mtime:
print(source, target)
return True
return False
def build_main(self, name):
"""Build main.min.js"""
target = pjoin(static, name, 'js', 'main.min.js')
if not self.should_run(name, target):
log.info("%s up to date" % target)
return
log.info("Rebuilding %s" % target)
run(['node', 'tools/build-main.js', name])
def build_jstranslation(self, trd):
lang = trd[-5:]
run([
pjoin('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.po'),
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.json'),
])
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
pool = ThreadPool()
pool.map(self.build_main, self.apps)
pool.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
# update package data in case this created new files
update_package_data(self.distribution)
class JavascriptVersion(Command):
"""write the javascript version to notebook javascript"""
description = "Write Jupyter version to javascript"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js")
with open(nsfile) as f:
lines = f.readlines()
with open(nsfile, 'w') as f:
found = False
for line in lines:
if line.strip().startswith("Jupyter.version"):
line = ' Jupyter.version = "{0}";\n'.format(version)
found = True
f.write(line)
if not found:
raise RuntimeError("Didn't find Jupyter.version line in %s" % nsfile)
def css_js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
self.distribution.run_command('jsversion')
jsdeps = self.distribution.get_command_obj('jsdeps')
js = self.distribution.get_command_obj('js')
css = self.distribution.get_command_obj('css')
jsdeps.force = js.force = strict
targets = [ jsdeps.bower_dir ]
targets.extend(js.targets)
targets.extend(css.targets)
missing = [ t for t in targets if not os.path.exists(t) ]
if not is_repo and not missing:
# If we're an sdist, we aren't a repo and everything should be present.
# Don't rebuild js/css in that case.
command.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
# refresh missing
missing = [ t for t in targets if not os.path.exists(t) ]
if strict or missing:
# die if strict or any targets didn't build
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
log.warn("rebuilding js and css failed. The following required files are missing: %s" % missing)
raise e
else:
log.warn("rebuilding js and css failed (not a problem)")
log.warn(str(e))
# check again for missing targets, just in case:
missing = [ t for t in targets if not os.path.exists(t) ]
if missing:
# command succeeded, but targets still missing (?!)
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
raise ValueError("The following required files are missing: %s" % missing)
command.run(self)
return DecoratedCommand
| """
This module defines the things that are used in setup.py for building the notebook
This includes:
* Functions for finding things like packages, package data, etc.
* A function for checking dependencies.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import pipes
import shutil
import sys
from distutils import log
from distutils.cmd import Command
from fnmatch import fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import list2cmdline
else:
def list2cmdline(cmd_list):
return ' '.join(map(pipes.quote, cmd_list))
#-------------------------------------------------------------------------------
# Useful globals and utility functions
#-------------------------------------------------------------------------------
# A few handy globals
isfile = os.path.isfile
pjoin = os.path.join
repo_root = os.path.dirname(os.path.abspath(__file__))
is_repo = os.path.isdir(pjoin(repo_root, '.git'))
def oscmd(s):
print(">", s)
os.system(s)
# Py3 compatibility hacks, without assuming IPython itself is installed with
# the full py3compat machinery.
try:
execfile
except NameError:
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
#---------------------------------------------------------------------------
# Basic project information
#---------------------------------------------------------------------------
name = 'notebook'
# release.py contains version, authors, license, url, keywords, etc.
version_ns = {}
execfile(pjoin(repo_root, name, '_version.py'), version_ns)
version = version_ns['__version__']
# vendored from pep440 package, we allow `.dev` suffix without trailing number.
loose_pep440re = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not loose_pep440re.match(version):
raise ValueError('Invalid version number `%s`, please follow pep440 convention or pip will get confused about which package is more recent.' % version)
#---------------------------------------------------------------------------
# Find packages
#---------------------------------------------------------------------------
def find_packages():
"""
Find all of the packages.
"""
packages = []
for dir,subdirs,files in os.walk(name):
package = dir.replace(os.path.sep, '.')
if '__init__.py' not in files:
# not a package
continue
packages.append(package)
return packages
#---------------------------------------------------------------------------
# Find package data
#---------------------------------------------------------------------------
def find_package_data():
"""
Find package_data.
"""
# This is not enough for these things to appear in a sdist.
# We need to muck with the MANIFEST to get this to work
# exclude components and less from the walk;
# we will build the components separately
excludes = [
pjoin('static', 'components'),
pjoin('static', '*', 'less'),
pjoin('static', '*', 'node_modules')
]
# walk notebook resources:
cwd = os.getcwd()
os.chdir('notebook')
static_data = []
for parent, dirs, files in os.walk('static'):
if any(fnmatch(parent, pat) for pat in excludes):
# prevent descending into subdirs
dirs[:] = []
continue
for f in files:
static_data.append(pjoin(parent, f))
# for verification purposes, explicitly add main.min.js
# so that installation will fail if they are missing
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
static_data.append(pjoin('static', app, 'js', 'main.min.js'))
components = pjoin("static", "components")
# select the components we actually need to install
# (there are lots of resources we bundle for sdist-reasons that we don't actually use)
static_data.extend([
pjoin(components, "backbone", "backbone-min.js"),
pjoin(components, "bootstrap", "dist", "js", "bootstrap.min.js"),
pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
pjoin(components, "create-react-class", "index.js"),
pjoin(components, "font-awesome", "css", "*.css"),
pjoin(components, "es6-promise", "*.js"),
pjoin(components, "font-awesome", "fonts", "*.*"),
pjoin(components, "jed", "jed.js"),
pjoin(components, "jquery", "jquery.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
pjoin(components, "jquery-ui", "jquery-ui.min.js"),
pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
pjoin(components, "marked", "lib", "marked.js"),
pjoin(components, "react", "react.production.min.js"),
pjoin(components, "react", "react-dom.production.min.js"),
pjoin(components, "requirejs", "require.js"),
pjoin(components, "requirejs-plugins", "src", "json.js"),
pjoin(components, "requirejs-text", "text.js"),
pjoin(components, "sanitizer", "index.js"),
pjoin(components, "underscore", "underscore-min.js"),
pjoin(components, "moment", "moment.js"),
pjoin(components, "moment", "min", "*.js"),
pjoin(components, "xterm.js", "index.js"),
pjoin(components, "xterm.js-css", "index.css"),
pjoin(components, "xterm.js-fit", "index.js"),
pjoin(components, "text-encoding", "lib", "encoding.js"),
])
# Ship all of Codemirror's CSS and JS
for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
for f in files:
if f.endswith(('.js', '.css')):
static_data.append(pjoin(parent, f))
# Trim mathjax
mj = lambda *path: pjoin(components, 'MathJax', *path)
static_data.extend([
mj('MathJax.js'),
mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
mj('config', 'Safe.js'),
])
trees = []
mj_out = mj('jax', 'output')
if os.path.exists(mj_out):
for output in os.listdir(mj_out):
path = pjoin(mj_out, output)
static_data.append(pjoin(path, '*.js'))
autoload = pjoin(path, 'autoload')
if os.path.isdir(autoload):
trees.append(autoload)
for tree in trees + [
mj('localization'), # limit to en?
mj('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
mj('extensions'),
mj('jax', 'input', 'TeX'),
mj('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
mj('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
mj('jax', 'element', 'mml'),
]:
for parent, dirs, files in os.walk(tree):
for f in files:
static_data.append(pjoin(parent, f))
os.chdir(os.path.join('tests',))
js_tests = glob('*.js') + glob('*/*.js')
os.chdir(cwd)
package_data = {
'notebook' : ['templates/*'] + static_data,
'notebook.tests' : js_tests,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return package_data
def check_package_data(package_data):
"""verify that package_data globs make sense"""
print("checking package data")
for pkg, data in package_data.items():
pkg_root = pjoin(*pkg.split('.'))
for d in data:
path = pjoin(pkg_root, d)
if '*' in path:
assert len(glob(path)) > 0, "No files match pattern %s" % path
else:
assert os.path.exists(path), "Missing package data: %s" % path
def check_package_data_first(command):
"""decorator for checking package_data before running a given command
Probably only needs to wrap build_py
"""
class DecoratedCommand(command):
def run(self):
check_package_data(self.package_data)
command.run(self)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
#---------------------------------------------------------------------------
# Notebook related
#---------------------------------------------------------------------------
try:
from shutil import which
except ImportError:
## which() function copied from Python 3.4.3; PSF license
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
static = pjoin(repo_root, 'notebook', 'static')
npm_path = os.pathsep.join([
pjoin(repo_root, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def mtime(path):
"""shorthand for mtime"""
return os.stat(path).st_mtime
def run(cmd, *args, **kwargs):
"""Echo a command before running it"""
log.info('> ' + list2cmdline(cmd))
kwargs['shell'] = (sys.platform == 'win32')
return check_call(cmd, *args, **kwargs)
class CompileBackendTranslation(Command):
description = "compile the .po files into .mo files, that contain the translations."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
paths = glob('notebook/i18n/??_??')
for p in paths:
LANG = p[-5:]
for component in ['notebook', 'nbui']:
run(['pybabel', 'compile',
'-D', component,
'-f',
'-l', LANG,
'-i', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.po'),
'-o', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.mo')
])
class Bower(Command):
description = "fetch static client-side components with bower"
user_options = [
('force', 'f', "force fetching of bower dependencies"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
bower_dir = pjoin(static, 'components')
node_modules = pjoin(repo_root, 'node_modules')
sanitizer_dir = pjoin(bower_dir, 'sanitizer')
def should_run(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
if not os.path.exists(self.sanitizer_dir):
return True
bower_stale = mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))
if bower_stale:
return True
return mtime(self.sanitizer_dir) < mtime(pjoin(repo_root, 'webpack.config.js'))
def should_run_npm(self):
if not which('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return mtime(self.node_modules) < mtime(pjoin(repo_root, 'package.json'))
def run(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
run(['npm', 'install'], cwd=repo_root)
os.utime(self.node_modules, None)
env = os.environ.copy()
env['PATH'] = npm_path
try:
run(
['bower', 'install', '--allow-root', '--config.interactive=false'],
cwd=repo_root,
env=env
)
except OSError as e:
print("Failed to run bower: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# self.npm_components()
if not os.path.exists(self.sanitizer_dir):
run(['npm', 'run', 'build:webpack'], cwd=repo_root, env=env)
os.utime(self.bower_dir, None)
# update package data in case this created new files
update_package_data(self.distribution)
def patch_out_bootstrap_bw_print():
"""Hack! Manually patch out the bootstrap rule that forces printing in B&W.
We haven't found a way to override this rule with another one.
"""
print_less = pjoin(static, 'components', 'bootstrap', 'less', 'print.less')
with open(print_less) as f:
lines = f.readlines()
for ix, line in enumerate(lines):
if 'Black prints faster' in line:
break
else:
return # Already patched out, nothing to do.
rmed = lines.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", rmed)
print()
with open(print_less, 'w') as f:
f.writelines(lines)
class CompileCSS(Command):
"""Recompile Notebook CSS
Regenerate the compiled CSS from LESS sources.
Requires various dev dependencies, such as require and lessc.
"""
description = "Recompile Notebook CSS"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
sources = []
targets = []
for name in ('ipython', 'style'):
sources.append(pjoin(static, 'style', '%s.less' % name))
targets.append(pjoin(static, 'style', '%s.min.css' % name))
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
patch_out_bootstrap_bw_print()
for src, dst in zip(self.sources, self.targets):
try:
run(['lessc',
'--source-map',
'--include-path=%s' % pipes.quote(static),
src,
dst,
], cwd=repo_root, env=env)
except OSError as e:
print("Failed to build css: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# update package data in case this created new files
update_package_data(self.distribution)
class CompileJS(Command):
"""Rebuild Notebook Javascript main.min.js files and translation files.
Calls require via build-main.js
"""
description = "Rebuild Notebook Javascript main.min.js files"
user_options = [
('force', 'f', "force rebuilding js targets"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
apps = ['notebook', 'tree', 'edit', 'terminal', 'auth']
targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ]
def sources(self, name):
"""Generator yielding .js sources that an application depends on"""
yield pjoin(repo_root, 'tools', 'build-main.js')
yield pjoin(static, name, 'js', 'main.js')
for sec in [name, 'base', 'auth']:
for f in glob(pjoin(static, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield pjoin(static, 'services', 'config.js')
if name == 'notebook':
for f in glob(pjoin(static, 'services', '*', '*.js')):
yield f
for parent, dirs, files in os.walk(pjoin(static, 'components')):
if os.path.basename(parent) == 'MathJax':
# don't look in MathJax, since it takes forever to walk it
dirs[:] = []
continue
for f in files:
yield pjoin(parent, f)
def should_run(self, name, target):
if self.force or not os.path.exists(target):
return True
target_mtime = mtime(target)
for source in self.sources(name):
if mtime(source) > target_mtime:
print(source, target)
return True
return False
def build_main(self, name):
"""Build main.min.js"""
target = pjoin(static, name, 'js', 'main.min.js')
if not self.should_run(name, target):
log.info("%s up to date" % target)
return
log.info("Rebuilding %s" % target)
run(['node', 'tools/build-main.js', name])
def build_jstranslation(self, trd):
lang = trd[-5:]
run([
pjoin('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.po'),
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.json'),
])
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
pool = ThreadPool()
pool.map(self.build_main, self.apps)
pool.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
# update package data in case this created new files
update_package_data(self.distribution)
class JavascriptVersion(Command):
"""write the javascript version to notebook javascript"""
description = "Write Jupyter version to javascript"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js")
with open(nsfile) as f:
lines = f.readlines()
with open(nsfile, 'w') as f:
found = False
for line in lines:
if line.strip().startswith("Jupyter.version"):
line = ' Jupyter.version = "{0}";\n'.format(version)
found = True
f.write(line)
if not found:
raise RuntimeError("Didn't find Jupyter.version line in %s" % nsfile)
def css_js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
self.distribution.run_command('jsversion')
jsdeps = self.distribution.get_command_obj('jsdeps')
js = self.distribution.get_command_obj('js')
css = self.distribution.get_command_obj('css')
jsdeps.force = js.force = strict
targets = [ jsdeps.bower_dir ]
targets.extend(js.targets)
targets.extend(css.targets)
missing = [ t for t in targets if not os.path.exists(t) ]
if not is_repo and not missing:
# If we're an sdist, we aren't a repo and everything should be present.
# Don't rebuild js/css in that case.
command.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
# refresh missing
missing = [ t for t in targets if not os.path.exists(t) ]
if strict or missing:
# die if strict or any targets didn't build
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
log.warn("rebuilding js and css failed. The following required files are missing: %s" % missing)
raise e
else:
log.warn("rebuilding js and css failed (not a problem)")
log.warn(str(e))
# check again for missing targets, just in case:
missing = [ t for t in targets if not os.path.exists(t) ]
if missing:
# command succeeded, but targets still missing (?!)
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
raise ValueError("The following required files are missing: %s" % missing)
command.run(self)
return DecoratedCommand
| xss | {
"code": [
" pjoin(components, \"google-caja\", \"html-css-sanitizer-minified.js\"),",
" return mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))"
],
"line_no": [
140,
384
]
} | {
"code": [
" pjoin(components, \"sanitizer\", \"index.js\"),",
" if not os.path.exists(self.sanitizer_dir):",
" bower_stale = mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))",
" if bower_stale:",
" return True",
" return mtime(self.sanitizer_dir) < mtime(pjoin(repo_root, 'webpack.config.js'))",
" run(['npm', 'run', 'build:webpack'], cwd=repo_root, env=env)"
],
"line_no": [
153,
384,
387,
388,
389,
391,
426
]
} |
import os
import re
import pipes
import .shutil
import .sys
from distutils import log
from distutils.cmd import Command
from fnmatch import .fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import .list2cmdline
else:
def FUNC_10(VAR_19):
return ' '.join(map(pipes.quote, VAR_19))
VAR_0 = os.path.isfile
VAR_1 = os.path.join
VAR_2 = os.path.dirname(os.path.abspath(__file__))
VAR_3 = os.path.isdir(VAR_1(VAR_2, '.git'))
def FUNC_0(VAR_4):
print(">", VAR_4)
os.system(VAR_4)
try:
FUNC_18
except NameError:
def FUNC_18(VAR_41, VAR_42, VAR_43=None):
VAR_43 = locs or VAR_42
exec(compile(open(VAR_41).read(), VAR_41, "exec"), VAR_42, VAR_43)
VAR_5 = 'notebook'
VAR_6 = {}
FUNC_18(VAR_1(VAR_2, VAR_5, '_version.py'), VAR_6)
VAR_7 = VAR_6['__version__']
VAR_8 = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not VAR_8.match(VAR_7):
raise ValueError('Invalid VAR_7 number `%VAR_4`, please follow pep440 convention or pip will get confused about FUNC_19 VAR_44 is more recent.' % VAR_7)
def FUNC_1():
VAR_20 = []
for dir,subdirs,VAR_65 in os.walk(VAR_5):
VAR_44 = dir.replace(os.path.sep, '.')
if '__init__.py' not in VAR_65:
continue
VAR_20.append(VAR_44)
return VAR_20
def FUNC_2():
VAR_21 = [
VAR_1('static', 'components'),
VAR_1('static', '*', 'less'),
VAR_1('static', '*', 'node_modules')
]
VAR_22 = os.getcwd()
os.chdir('notebook')
VAR_23 = []
for parent, VAR_54, VAR_65 in os.walk('static'):
if any(fnmatch(parent, pat) for pat in VAR_21):
VAR_54[:] = []
continue
for f in VAR_65:
VAR_23.append(VAR_1(parent, f))
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
VAR_23.append(VAR_1('static', app, 'js', 'main.min.js'))
VAR_24 = VAR_1("static", "components")
VAR_23.extend([
VAR_1(VAR_24, "backbone", "backbone-min.js"),
VAR_1(VAR_24, "bootstrap", "dist", "js", "bootstrap.min.js"),
VAR_1(VAR_24, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
VAR_1(VAR_24, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
VAR_1(VAR_24, "create-react-class", "index.js"),
VAR_1(VAR_24, "font-awesome", "css", "*.css"),
VAR_1(VAR_24, "es6-promise", "*.js"),
VAR_1(VAR_24, "font-awesome", "fonts", "*.*"),
VAR_1(VAR_24, "google-caja", "html-VAR_62-sanitizer-minified.js"),
VAR_1(VAR_24, "jed", "jed.js"),
VAR_1(VAR_24, "jquery", "jquery.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
VAR_1(VAR_24, "jquery-ui", "jquery-ui.min.js"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "images", "*"),
VAR_1(VAR_24, "marked", "lib", "marked.js"),
VAR_1(VAR_24, "react", "react.production.min.js"),
VAR_1(VAR_24, "react", "react-dom.production.min.js"),
VAR_1(VAR_24, "requirejs", "require.js"),
VAR_1(VAR_24, "requirejs-plugins", "src", "json.js"),
VAR_1(VAR_24, "requirejs-text", "text.js"),
VAR_1(VAR_24, "underscore", "underscore-min.js"),
VAR_1(VAR_24, "moment", "moment.js"),
VAR_1(VAR_24, "moment", "min", "*.js"),
VAR_1(VAR_24, "xterm.js", "index.js"),
VAR_1(VAR_24, "xterm.js-css", "index.css"),
VAR_1(VAR_24, "xterm.js-fit", "index.js"),
VAR_1(VAR_24, "text-encoding", "lib", "encoding.js"),
])
for parent, VAR_54, VAR_65 in os.walk(VAR_1(VAR_24, 'codemirror')):
for f in VAR_65:
if f.endswith(('.js', '.css')):
VAR_23.append(VAR_1(parent, f))
VAR_25 = lambda *VAR_14: VAR_1(VAR_24, 'MathJax', *VAR_14)
VAR_23.extend([
VAR_25('MathJax.js'),
VAR_25('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
VAR_25('config', 'Safe.js'),
])
VAR_26 = []
VAR_27 = VAR_25('jax', 'output')
if os.path.exists(VAR_27):
for output in os.listdir(VAR_27):
VAR_14 = VAR_1(VAR_27, output)
VAR_23.append(VAR_1(VAR_14, '*.js'))
VAR_55 = VAR_1(VAR_14, 'autoload')
if os.path.isdir(VAR_55):
VAR_26.append(VAR_55)
for tree in VAR_26 + [
VAR_25('localization'), # limit to en?
VAR_25('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
VAR_25('extensions'),
VAR_25('jax', 'input', 'TeX'),
VAR_25('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
VAR_25('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
VAR_25('jax', 'element', 'mml'),
]:
for parent, VAR_54, VAR_65 in os.walk(tree):
for f in VAR_65:
VAR_23.append(VAR_1(parent, f))
os.chdir(os.path.join('tests',))
VAR_28 = glob('*.js') + glob('*/*.js')
os.chdir(VAR_22)
VAR_9 = {
'notebook' : ['templates/*'] + VAR_23,
'notebook.tests' : VAR_28,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return VAR_9
def FUNC_3(VAR_9):
print("checking VAR_44 data")
for pkg, data in VAR_9.items():
VAR_45 = VAR_1(*pkg.split('.'))
for d in data:
VAR_14 = VAR_1(VAR_45, d)
if '*' in VAR_14:
assert len(glob(VAR_14)) > 0, "No VAR_65 match pattern %s" % VAR_14
else:
assert os.path.exists(VAR_14), "Missing VAR_44 data: %s" % VAR_14
def FUNC_4(VAR_10):
class CLASS_5(VAR_10):
def FUNC_7(self):
FUNC_3(self.package_data)
VAR_10.run(self)
return CLASS_5
def FUNC_5(VAR_11):
VAR_29 = VAR_11.get_command_obj('build_py')
VAR_11.package_data = FUNC_2()
VAR_29.finalize_options()
try:
from shutil import .which
except ImportError:
def FUNC_19(VAR_15, VAR_46=os.F_OK | os.X_OK, VAR_14=None):
def FUNC_20(VAR_56, VAR_46):
return (os.path.exists(VAR_56) and os.access(VAR_56, VAR_46)
and not os.path.isdir(VAR_56))
if os.path.dirname(VAR_15):
if FUNC_20(VAR_15, VAR_46):
return VAR_15
return None
if VAR_14 is None:
VAR_14 = os.environ.get("PATH", os.defpath)
if not VAR_14:
return None
VAR_14 = path.split(os.pathsep)
if sys.platform == "win32":
if not os.curdir in VAR_14:
path.insert(0, os.curdir)
VAR_64 = os.environ.get("PATHEXT", "").split(os.pathsep)
if any(VAR_15.lower().endswith(ext.lower()) for ext in VAR_64):
VAR_65 = [VAR_15]
else:
VAR_65 = [VAR_15 + ext for ext in VAR_64]
else:
VAR_65 = [VAR_15]
VAR_57 = set()
for dir in VAR_14:
VAR_66 = os.path.normcase(dir)
if not VAR_66 in VAR_57:
seen.add(VAR_66)
for thefile in VAR_65:
VAR_5 = os.path.join(dir, thefile)
if FUNC_20(VAR_5, VAR_46):
return VAR_5
return None
VAR_12 = VAR_1(VAR_2, 'notebook', 'static')
VAR_13 = os.pathsep.join([
VAR_1(VAR_2, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def FUNC_6(VAR_14):
return os.stat(VAR_14).st_mtime
def FUNC_7(VAR_15, *VAR_16, **VAR_17):
log.info('> ' + FUNC_10(VAR_15))
VAR_17['shell'] = (sys.platform == 'win32')
return check_call(VAR_15, *VAR_16, **VAR_17)
class CLASS_0(Command):
VAR_30 = "compile the .po VAR_65 into .mo VAR_65, that contain the translations."
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_47 = glob('notebook/i18n/??_??')
for p in VAR_47:
VAR_58 = p[-5:]
for component in ['notebook', 'nbui']:
FUNC_7(['pybabel', 'compile',
'-D', component,
'-f',
'-l', VAR_58,
'-i', VAR_1('notebook', 'i18n', VAR_58, 'LC_MESSAGES', component+'.po'),
'-o', VAR_1('notebook', 'i18n', VAR_58, 'LC_MESSAGES', component+'.mo')
])
class CLASS_1(Command):
VAR_30 = "fetch VAR_12 client-side VAR_24 with bower"
VAR_31 = [
('force', 'f', "force fetching of bower dependencies"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_32 = VAR_1(VAR_12, 'components')
VAR_33 = VAR_1(VAR_2, 'node_modules')
def FUNC_13(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
return FUNC_6(self.bower_dir) < FUNC_6(VAR_1(VAR_2, 'bower.json'))
def FUNC_14(self):
if not FUNC_19('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return FUNC_6(self.node_modules) < FUNC_6(VAR_1(VAR_2, 'package.json'))
def FUNC_7(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
FUNC_7(['npm', 'install'], VAR_22=VAR_2)
os.utime(self.node_modules, None)
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
try:
FUNC_7(
['bower', 'install', '--allow-root', '--config.interactive=false'],
VAR_22=VAR_2,
VAR_48=env
)
except OSError as e:
print("Failed to FUNC_7 bower: %s" % e, file=sys.stderr)
print("You can install VAR_61 dependencies with `npm install`", file=sys.stderr)
raise
os.utime(self.bower_dir, None)
FUNC_5(self.distribution)
def FUNC_8():
VAR_34 = VAR_1(VAR_12, 'components', 'bootstrap', 'less', 'print.less')
with open(VAR_34) as f:
VAR_49 = f.readlines()
for ix, VAR_68 in enumerate(VAR_49):
if 'Black prints faster' in VAR_68:
break
else:
return # Already patched out, nothing to do.
VAR_35 = VAR_49.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", VAR_35)
print()
with open(VAR_34, 'w') as f:
f.writelines(VAR_49)
class CLASS_2(Command):
VAR_30 = "Recompile Notebook CSS"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
VAR_36 = []
VAR_37 = []
for VAR_5 in ('ipython', 'style'):
VAR_36.append(VAR_1(VAR_12, 'style', '%VAR_4.less' % VAR_5))
VAR_37.append(VAR_1(VAR_12, 'style', '%VAR_4.min.css' % VAR_5))
def FUNC_7(self):
self.run_command('jsdeps')
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
FUNC_8()
for src, dst in zip(self.sources, self.targets):
try:
FUNC_7(['lessc',
'--source-map',
'--include-VAR_14=%s' % pipes.quote(VAR_12),
src,
dst,
], VAR_22=VAR_2, VAR_48=env)
except OSError as e:
print("Failed to build VAR_62: %s" % e, file=sys.stderr)
print("You can install VAR_61 dependencies with `npm install`", file=sys.stderr)
raise
FUNC_5(self.distribution)
class CLASS_3(Command):
VAR_30 = "Rebuild Notebook Javascript main.min.js files"
VAR_31 = [
('force', 'f', "force rebuilding VAR_61 targets"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_38 = ['notebook', 'tree', 'edit', 'terminal', 'auth']
VAR_37 = [ VAR_1(VAR_12, app, 'js', 'main.min.js') for app in VAR_38 ]
def VAR_36(self, VAR_5):
yield VAR_1(VAR_2, 'tools', 'build-main.js')
yield VAR_1(VAR_12, VAR_5, 'js', 'main.js')
for sec in [VAR_5, 'base', 'auth']:
for f in glob(VAR_1(VAR_12, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield VAR_1(VAR_12, 'services', 'config.js')
if VAR_5 == 'notebook':
for f in glob(VAR_1(VAR_12, 'services', '*', '*.js')):
yield f
for parent, VAR_54, VAR_65 in os.walk(VAR_1(VAR_12, 'components')):
if os.path.basename(parent) == 'MathJax':
VAR_54[:] = []
continue
for f in VAR_65:
yield VAR_1(parent, f)
def FUNC_13(self, VAR_5, VAR_39):
if self.force or not os.path.exists(VAR_39):
return True
VAR_50 = FUNC_6(VAR_39)
for source in self.sources(VAR_5):
if FUNC_6(source) > VAR_50:
print(source, VAR_39)
return True
return False
def FUNC_16(self, VAR_5):
VAR_39 = VAR_1(VAR_12, VAR_5, 'js', 'main.min.js')
if not self.should_run(VAR_5, VAR_39):
log.info("%VAR_4 up to date" % VAR_39)
return
log.info("Rebuilding %s" % VAR_39)
FUNC_7(['node', 'tools/build-main.js', VAR_5])
def FUNC_17(self, VAR_40):
VAR_51 = VAR_40[-5:]
FUNC_7([
VAR_1('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES', 'nbjs.po'),
VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES', 'nbjs.json'),
])
def FUNC_7(self):
self.run_command('jsdeps')
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
VAR_52 = ThreadPool()
VAR_52.map(self.build_main, self.apps)
VAR_52.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
FUNC_5(self.distribution)
class CLASS_4(Command):
VAR_30 = "Write Jupyter VAR_7 to javascript"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_53 = VAR_1(VAR_2, "notebook", "static", "base", "js", "namespace.js")
with open(VAR_53) as f:
VAR_49 = f.readlines()
with open(VAR_53, 'w') as f:
VAR_59 = False
for VAR_68 in VAR_49:
if VAR_68.strip().startswith("Jupyter.version"):
VAR_68 = ' Jupyter.version = "{0}";\n'.format(VAR_7)
VAR_59 = True
f.write(VAR_68)
if not VAR_59:
raise RuntimeError("Didn't find Jupyter.version VAR_68 in %s" % VAR_53)
def FUNC_9(VAR_10, VAR_18=False):
class CLASS_5(VAR_10):
def FUNC_7(self):
self.distribution.run_command('jsversion')
VAR_60 = self.distribution.get_command_obj('jsdeps')
VAR_61 = self.distribution.get_command_obj('js')
VAR_62 = self.distribution.get_command_obj('css')
VAR_60.force = VAR_61.force = VAR_18
VAR_37 = [ VAR_60.bower_dir ]
VAR_37.extend(VAR_61.targets)
VAR_37.extend(VAR_62.targets)
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if not VAR_3 and not VAR_63:
VAR_10.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if VAR_18 or VAR_63:
VAR_67 = os.path.commonprefix([VAR_2 + os.sep] + VAR_63)
VAR_63 = [ m[len(VAR_67):] for m in VAR_63 ]
log.warn("rebuilding VAR_61 and VAR_62 failed. The following required VAR_65 are VAR_63: %s" % VAR_63)
raise e
else:
log.warn("rebuilding VAR_61 and VAR_62 failed (not a problem)")
log.warn(str(e))
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if VAR_63:
VAR_67 = os.path.commonprefix([VAR_2 + os.sep] + VAR_63)
VAR_63 = [ m[len(VAR_67):] for m in VAR_63 ]
raise ValueError("The following required VAR_65 are VAR_63: %s" % VAR_63)
VAR_10.run(self)
return CLASS_5
|
import os
import re
import pipes
import .shutil
import .sys
from distutils import log
from distutils.cmd import Command
from fnmatch import .fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import .list2cmdline
else:
def FUNC_10(VAR_19):
return ' '.join(map(pipes.quote, VAR_19))
VAR_0 = os.path.isfile
VAR_1 = os.path.join
VAR_2 = os.path.dirname(os.path.abspath(__file__))
VAR_3 = os.path.isdir(VAR_1(VAR_2, '.git'))
def FUNC_0(VAR_4):
print(">", VAR_4)
os.system(VAR_4)
try:
FUNC_18
except NameError:
def FUNC_18(VAR_42, VAR_43, VAR_44=None):
VAR_44 = locs or VAR_43
exec(compile(open(VAR_42).read(), VAR_42, "exec"), VAR_43, VAR_44)
VAR_5 = 'notebook'
VAR_6 = {}
FUNC_18(VAR_1(VAR_2, VAR_5, '_version.py'), VAR_6)
VAR_7 = VAR_6['__version__']
VAR_8 = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not VAR_8.match(VAR_7):
raise ValueError('Invalid VAR_7 number `%VAR_4`, please follow pep440 convention or pip will get confused about FUNC_19 VAR_45 is more recent.' % VAR_7)
def FUNC_1():
VAR_20 = []
for dir,subdirs,VAR_67 in os.walk(VAR_5):
VAR_45 = dir.replace(os.path.sep, '.')
if '__init__.py' not in VAR_67:
continue
VAR_20.append(VAR_45)
return VAR_20
def FUNC_2():
VAR_21 = [
VAR_1('static', 'components'),
VAR_1('static', '*', 'less'),
VAR_1('static', '*', 'node_modules')
]
VAR_22 = os.getcwd()
os.chdir('notebook')
VAR_23 = []
for parent, VAR_56, VAR_67 in os.walk('static'):
if any(fnmatch(parent, pat) for pat in VAR_21):
VAR_56[:] = []
continue
for f in VAR_67:
VAR_23.append(VAR_1(parent, f))
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
VAR_23.append(VAR_1('static', app, 'js', 'main.min.js'))
VAR_24 = VAR_1("static", "components")
VAR_23.extend([
VAR_1(VAR_24, "backbone", "backbone-min.js"),
VAR_1(VAR_24, "bootstrap", "dist", "js", "bootstrap.min.js"),
VAR_1(VAR_24, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
VAR_1(VAR_24, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
VAR_1(VAR_24, "create-react-class", "index.js"),
VAR_1(VAR_24, "font-awesome", "css", "*.css"),
VAR_1(VAR_24, "es6-promise", "*.js"),
VAR_1(VAR_24, "font-awesome", "fonts", "*.*"),
VAR_1(VAR_24, "jed", "jed.js"),
VAR_1(VAR_24, "jquery", "jquery.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
VAR_1(VAR_24, "jquery-ui", "jquery-ui.min.js"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "images", "*"),
VAR_1(VAR_24, "marked", "lib", "marked.js"),
VAR_1(VAR_24, "react", "react.production.min.js"),
VAR_1(VAR_24, "react", "react-dom.production.min.js"),
VAR_1(VAR_24, "requirejs", "require.js"),
VAR_1(VAR_24, "requirejs-plugins", "src", "json.js"),
VAR_1(VAR_24, "requirejs-text", "text.js"),
VAR_1(VAR_24, "sanitizer", "index.js"),
VAR_1(VAR_24, "underscore", "underscore-min.js"),
VAR_1(VAR_24, "moment", "moment.js"),
VAR_1(VAR_24, "moment", "min", "*.js"),
VAR_1(VAR_24, "xterm.js", "index.js"),
VAR_1(VAR_24, "xterm.js-css", "index.css"),
VAR_1(VAR_24, "xterm.js-fit", "index.js"),
VAR_1(VAR_24, "text-encoding", "lib", "encoding.js"),
])
for parent, VAR_56, VAR_67 in os.walk(VAR_1(VAR_24, 'codemirror')):
for f in VAR_67:
if f.endswith(('.js', '.css')):
VAR_23.append(VAR_1(parent, f))
VAR_25 = lambda *VAR_14: VAR_1(VAR_24, 'MathJax', *VAR_14)
VAR_23.extend([
VAR_25('MathJax.js'),
VAR_25('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
VAR_25('config', 'Safe.js'),
])
VAR_26 = []
VAR_27 = VAR_25('jax', 'output')
if os.path.exists(VAR_27):
for output in os.listdir(VAR_27):
VAR_14 = VAR_1(VAR_27, output)
VAR_23.append(VAR_1(VAR_14, '*.js'))
VAR_57 = VAR_1(VAR_14, 'autoload')
if os.path.isdir(VAR_57):
VAR_26.append(VAR_57)
for tree in VAR_26 + [
VAR_25('localization'), # limit to en?
VAR_25('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
VAR_25('extensions'),
VAR_25('jax', 'input', 'TeX'),
VAR_25('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
VAR_25('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
VAR_25('jax', 'element', 'mml'),
]:
for parent, VAR_56, VAR_67 in os.walk(tree):
for f in VAR_67:
VAR_23.append(VAR_1(parent, f))
os.chdir(os.path.join('tests',))
VAR_28 = glob('*.js') + glob('*/*.js')
os.chdir(VAR_22)
VAR_9 = {
'notebook' : ['templates/*'] + VAR_23,
'notebook.tests' : VAR_28,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return VAR_9
def FUNC_3(VAR_9):
print("checking VAR_45 data")
for pkg, data in VAR_9.items():
VAR_46 = VAR_1(*pkg.split('.'))
for d in data:
VAR_14 = VAR_1(VAR_46, d)
if '*' in VAR_14:
assert len(glob(VAR_14)) > 0, "No VAR_67 match pattern %s" % VAR_14
else:
assert os.path.exists(VAR_14), "Missing VAR_45 data: %s" % VAR_14
def FUNC_4(VAR_10):
class CLASS_5(VAR_10):
def FUNC_7(self):
FUNC_3(self.package_data)
VAR_10.run(self)
return CLASS_5
def FUNC_5(VAR_11):
VAR_29 = VAR_11.get_command_obj('build_py')
VAR_11.package_data = FUNC_2()
VAR_29.finalize_options()
try:
from shutil import .which
except ImportError:
def FUNC_19(VAR_15, VAR_47=os.F_OK | os.X_OK, VAR_14=None):
def FUNC_20(VAR_58, VAR_47):
return (os.path.exists(VAR_58) and os.access(VAR_58, VAR_47)
and not os.path.isdir(VAR_58))
if os.path.dirname(VAR_15):
if FUNC_20(VAR_15, VAR_47):
return VAR_15
return None
if VAR_14 is None:
VAR_14 = os.environ.get("PATH", os.defpath)
if not VAR_14:
return None
VAR_14 = path.split(os.pathsep)
if sys.platform == "win32":
if not os.curdir in VAR_14:
path.insert(0, os.curdir)
VAR_66 = os.environ.get("PATHEXT", "").split(os.pathsep)
if any(VAR_15.lower().endswith(ext.lower()) for ext in VAR_66):
VAR_67 = [VAR_15]
else:
VAR_67 = [VAR_15 + ext for ext in VAR_66]
else:
VAR_67 = [VAR_15]
VAR_59 = set()
for dir in VAR_14:
VAR_68 = os.path.normcase(dir)
if not VAR_68 in VAR_59:
seen.add(VAR_68)
for thefile in VAR_67:
VAR_5 = os.path.join(dir, thefile)
if FUNC_20(VAR_5, VAR_47):
return VAR_5
return None
VAR_12 = VAR_1(VAR_2, 'notebook', 'static')
VAR_13 = os.pathsep.join([
VAR_1(VAR_2, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def FUNC_6(VAR_14):
return os.stat(VAR_14).st_mtime
def FUNC_7(VAR_15, *VAR_16, **VAR_17):
log.info('> ' + FUNC_10(VAR_15))
VAR_17['shell'] = (sys.platform == 'win32')
return check_call(VAR_15, *VAR_16, **VAR_17)
class CLASS_0(Command):
VAR_30 = "compile the .po VAR_67 into .mo VAR_67, that contain the translations."
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_48 = glob('notebook/i18n/??_??')
for p in VAR_48:
VAR_60 = p[-5:]
for component in ['notebook', 'nbui']:
FUNC_7(['pybabel', 'compile',
'-D', component,
'-f',
'-l', VAR_60,
'-i', VAR_1('notebook', 'i18n', VAR_60, 'LC_MESSAGES', component+'.po'),
'-o', VAR_1('notebook', 'i18n', VAR_60, 'LC_MESSAGES', component+'.mo')
])
class CLASS_1(Command):
VAR_30 = "fetch VAR_12 client-side VAR_24 with bower"
VAR_31 = [
('force', 'f', "force fetching of bower dependencies"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_32 = VAR_1(VAR_12, 'components')
VAR_33 = VAR_1(VAR_2, 'node_modules')
VAR_34 = VAR_1(VAR_32, 'sanitizer')
def FUNC_13(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
if not os.path.exists(self.sanitizer_dir):
return True
VAR_49 = FUNC_6(self.bower_dir) < FUNC_6(VAR_1(VAR_2, 'bower.json'))
if VAR_49:
return True
return FUNC_6(self.sanitizer_dir) < FUNC_6(VAR_1(VAR_2, 'webpack.config.js'))
def FUNC_14(self):
if not FUNC_19('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return FUNC_6(self.node_modules) < FUNC_6(VAR_1(VAR_2, 'package.json'))
def FUNC_7(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
FUNC_7(['npm', 'install'], VAR_22=VAR_2)
os.utime(self.node_modules, None)
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
try:
FUNC_7(
['bower', 'install', '--allow-root', '--config.interactive=false'],
VAR_22=VAR_2,
VAR_50=env
)
except OSError as e:
print("Failed to FUNC_7 bower: %s" % e, file=sys.stderr)
print("You can install VAR_63 dependencies with `npm install`", file=sys.stderr)
raise
if not os.path.exists(self.sanitizer_dir):
FUNC_7(['npm', 'run', 'build:webpack'], VAR_22=VAR_2, VAR_50=env)
os.utime(self.bower_dir, None)
FUNC_5(self.distribution)
def FUNC_8():
VAR_35 = VAR_1(VAR_12, 'components', 'bootstrap', 'less', 'print.less')
with open(VAR_35) as f:
VAR_51 = f.readlines()
for ix, VAR_70 in enumerate(VAR_51):
if 'Black prints faster' in VAR_70:
break
else:
return # Already patched out, nothing to do.
VAR_36 = VAR_51.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", VAR_36)
print()
with open(VAR_35, 'w') as f:
f.writelines(VAR_51)
class CLASS_2(Command):
VAR_30 = "Recompile Notebook CSS"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
VAR_37 = []
VAR_38 = []
for VAR_5 in ('ipython', 'style'):
VAR_37.append(VAR_1(VAR_12, 'style', '%VAR_4.less' % VAR_5))
VAR_38.append(VAR_1(VAR_12, 'style', '%VAR_4.min.css' % VAR_5))
def FUNC_7(self):
self.run_command('jsdeps')
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
FUNC_8()
for src, dst in zip(self.sources, self.targets):
try:
FUNC_7(['lessc',
'--source-map',
'--include-VAR_14=%s' % pipes.quote(VAR_12),
src,
dst,
], VAR_22=VAR_2, VAR_50=env)
except OSError as e:
print("Failed to build VAR_64: %s" % e, file=sys.stderr)
print("You can install VAR_63 dependencies with `npm install`", file=sys.stderr)
raise
FUNC_5(self.distribution)
class CLASS_3(Command):
VAR_30 = "Rebuild Notebook Javascript main.min.js files"
VAR_31 = [
('force', 'f', "force rebuilding VAR_63 targets"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_39 = ['notebook', 'tree', 'edit', 'terminal', 'auth']
VAR_38 = [ VAR_1(VAR_12, app, 'js', 'main.min.js') for app in VAR_39 ]
def VAR_37(self, VAR_5):
yield VAR_1(VAR_2, 'tools', 'build-main.js')
yield VAR_1(VAR_12, VAR_5, 'js', 'main.js')
for sec in [VAR_5, 'base', 'auth']:
for f in glob(VAR_1(VAR_12, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield VAR_1(VAR_12, 'services', 'config.js')
if VAR_5 == 'notebook':
for f in glob(VAR_1(VAR_12, 'services', '*', '*.js')):
yield f
for parent, VAR_56, VAR_67 in os.walk(VAR_1(VAR_12, 'components')):
if os.path.basename(parent) == 'MathJax':
VAR_56[:] = []
continue
for f in VAR_67:
yield VAR_1(parent, f)
def FUNC_13(self, VAR_5, VAR_40):
if self.force or not os.path.exists(VAR_40):
return True
VAR_52 = FUNC_6(VAR_40)
for source in self.sources(VAR_5):
if FUNC_6(source) > VAR_52:
print(source, VAR_40)
return True
return False
def FUNC_16(self, VAR_5):
VAR_40 = VAR_1(VAR_12, VAR_5, 'js', 'main.min.js')
if not self.should_run(VAR_5, VAR_40):
log.info("%VAR_4 up to date" % VAR_40)
return
log.info("Rebuilding %s" % VAR_40)
FUNC_7(['node', 'tools/build-main.js', VAR_5])
def FUNC_17(self, VAR_41):
VAR_53 = VAR_41[-5:]
FUNC_7([
VAR_1('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
VAR_1('notebook', 'i18n', VAR_53, 'LC_MESSAGES', 'nbjs.po'),
VAR_1('notebook', 'i18n', VAR_53, 'LC_MESSAGES', 'nbjs.json'),
])
def FUNC_7(self):
self.run_command('jsdeps')
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
VAR_54 = ThreadPool()
VAR_54.map(self.build_main, self.apps)
VAR_54.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
FUNC_5(self.distribution)
class CLASS_4(Command):
VAR_30 = "Write Jupyter VAR_7 to javascript"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_55 = VAR_1(VAR_2, "notebook", "static", "base", "js", "namespace.js")
with open(VAR_55) as f:
VAR_51 = f.readlines()
with open(VAR_55, 'w') as f:
VAR_61 = False
for VAR_70 in VAR_51:
if VAR_70.strip().startswith("Jupyter.version"):
VAR_70 = ' Jupyter.version = "{0}";\n'.format(VAR_7)
VAR_61 = True
f.write(VAR_70)
if not VAR_61:
raise RuntimeError("Didn't find Jupyter.version VAR_70 in %s" % VAR_55)
def FUNC_9(VAR_10, VAR_18=False):
class CLASS_5(VAR_10):
def FUNC_7(self):
self.distribution.run_command('jsversion')
VAR_62 = self.distribution.get_command_obj('jsdeps')
VAR_63 = self.distribution.get_command_obj('js')
VAR_64 = self.distribution.get_command_obj('css')
VAR_62.force = VAR_63.force = VAR_18
VAR_38 = [ VAR_62.bower_dir ]
VAR_38.extend(VAR_63.targets)
VAR_38.extend(VAR_64.targets)
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if not VAR_3 and not VAR_65:
VAR_10.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if VAR_18 or VAR_65:
VAR_69 = os.path.commonprefix([VAR_2 + os.sep] + VAR_65)
VAR_65 = [ m[len(VAR_69):] for m in VAR_65 ]
log.warn("rebuilding VAR_63 and VAR_64 failed. The following required VAR_67 are VAR_65: %s" % VAR_65)
raise e
else:
log.warn("rebuilding VAR_63 and VAR_64 failed (not a problem)")
log.warn(str(e))
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if VAR_65:
VAR_69 = os.path.commonprefix([VAR_2 + os.sep] + VAR_65)
VAR_65 = [ m[len(VAR_69):] for m in VAR_65 ]
raise ValueError("The following required VAR_67 are VAR_65: %s" % VAR_65)
VAR_10.run(self)
return CLASS_5
| [
3,
5,
9,
10,
11,
12,
18,
25,
31,
32,
33,
34,
35,
36,
41,
45,
46,
47,
48,
55,
56,
57,
58,
59,
60,
62,
63,
66,
68,
69,
70,
74,
75,
76,
77,
78,
87,
91,
92,
93,
94,
95,
100,
101,
102,
103,
104,
110,
111,
117,
122,
123,
124,
127,
129,
130,
162,
163,
168,
169,
176,
179,
187,
200,
203,
205,
213,
215,
216,
228,
229,
232,
240,
245,
247,
248,
249,
250,
251,
255,
260,
264,
266,
267,
268,
272,
273,
274,
275,
280,
286,
288,
291,
292,
294,
295,
296,
297,
303,
304,
306,
317,
318,
320,
325,
329,
330,
336,
339,
341,
344,
347,
348,
361,
364,
368,
371,
374,
377,
383,
385,
393,
398,
403,
406,
417,
419,
421,
422,
425,
431,
437,
444,
447,
449,
454,
457,
460,
466,
471,
473,
486,
488,
489,
492,
499,
502,
505,
508,
513,
524,
529,
539,
543,
549,
560,
568,
570,
571,
576,
579,
582,
596,
597,
607,
612,
614,
615,
618,
624,
627,
635,
636,
639,
643,
646,
1,
2,
3,
4,
5,
6,
7,
8,
80,
81,
82,
97,
98,
99,
218,
231,
232,
233,
234,
242,
327,
332,
424,
425,
426,
427,
446,
447,
448,
449,
450,
451,
491,
492,
493,
494,
573,
599,
510,
541,
257,
258,
259,
260,
261,
262,
263,
264,
265
] | [
3,
5,
9,
10,
11,
12,
18,
25,
31,
32,
33,
34,
35,
36,
41,
45,
46,
47,
48,
55,
56,
57,
58,
59,
60,
62,
63,
66,
68,
69,
70,
74,
75,
76,
77,
78,
87,
91,
92,
93,
94,
95,
100,
101,
102,
103,
104,
110,
111,
117,
122,
123,
124,
127,
129,
130,
162,
163,
168,
169,
176,
179,
187,
200,
203,
205,
213,
215,
216,
228,
229,
232,
240,
245,
247,
248,
249,
250,
251,
255,
260,
264,
266,
267,
268,
272,
273,
274,
275,
280,
286,
288,
291,
292,
294,
295,
296,
297,
303,
304,
306,
317,
318,
320,
325,
329,
330,
336,
339,
341,
344,
347,
348,
361,
364,
368,
371,
374,
378,
386,
390,
392,
400,
405,
410,
413,
424,
428,
430,
431,
434,
440,
446,
453,
456,
458,
463,
466,
469,
475,
480,
482,
495,
497,
498,
501,
508,
511,
514,
517,
522,
533,
538,
548,
552,
558,
569,
577,
579,
580,
585,
588,
591,
605,
606,
616,
621,
623,
624,
627,
633,
636,
644,
645,
648,
652,
655,
1,
2,
3,
4,
5,
6,
7,
8,
80,
81,
82,
97,
98,
99,
218,
231,
232,
233,
234,
242,
327,
332,
433,
434,
435,
436,
455,
456,
457,
458,
459,
460,
500,
501,
502,
503,
582,
608,
519,
550,
257,
258,
259,
260,
261,
262,
263,
264,
265
] |
2CWE-601
| #!/bin/python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Auth, Mail, PluginManager and various utilities
------------------------------------------------
"""
import base64
from functools import reduce
from gluon._compat import pickle, thread, urllib2, Cookie, StringIO, urlencode
from gluon._compat import configparser, MIMEBase, MIMEMultipart, MIMEText, Header
from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems
from gluon._compat import to_bytes, to_native, add_charset, string_types
from gluon._compat import charset_QP, basestring, unicodeT, to_unicode
from gluon._compat import urllib2, urlopen
import datetime
import logging
import sys
import glob
import os
import re
import time
import fnmatch
import traceback
import smtplib
import email.utils
import random
import hmac
import hashlib
import json
from email import message_from_string
from gluon.authapi import AuthAPI
from gluon.contenttype import contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid, compare
from gluon.fileutils import read_file, check_credentials
from gluon import *
from gluon.contrib.autolinks import expand_one
from gluon.contrib.markmin.markmin2html import replace_at_urls
from gluon.contrib.markmin.markmin2html import replace_autolinks
from gluon.contrib.markmin.markmin2html import replace_components
from pydal.objects import Row, Set, Query
import gluon.serializers as serializers
Table = DAL.Table
Field = DAL.Field
__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',
'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']
# mind there are two loggers here (logger and crud.settings.logger)!
logger = logging.getLogger("web2py")
DEFAULT = lambda: None
def getarg(position, default=None):
args = current.request.args
if position < 0 and len(args) >= -position:
return args[position]
elif position >= 0 and len(args) > position:
return args[position]
else:
return default
def callback(actions, form, tablename=None):
if actions:
if tablename and isinstance(actions, dict):
actions = actions.get(tablename, [])
if not isinstance(actions, (list, tuple)):
actions = [actions]
[action(form) for action in actions]
def validators(*a):
b = []
for item in a:
if isinstance(item, (list, tuple)):
b = b + list(item)
else:
b.append(item)
return b
def call_or_redirect(f, *args):
if callable(f):
redirect(f(*args))
else:
redirect(f)
def replace_id(url, form):
if url:
url = url.replace('[id]', str(form.vars.id))
if url[0] == '/' or url[:4] == 'http':
return url
return URL(url)
class Mail(object):
"""
Class for configuring and sending emails with alternative text / html
body, multiple attachments and encryption support
Works with SMTP and Google App Engine.
Args:
server: SMTP server address in address:port notation
sender: sender email address
login: sender login name and password in login:password notation
or None if no authentication is required
tls: enables/disables encryption (True by default)
In Google App Engine use ::
server='gae'
For sake of backward compatibility all fields are optional and default
to None, however, to be able to send emails at least server and sender
must be specified. They are available under following fields::
mail.settings.server
mail.settings.sender
mail.settings.login
mail.settings.timeout = 60 # seconds (default)
When server is 'logging', email is logged but not sent (debug mode)
Optionally you can use PGP encryption or X509::
mail.settings.cipher_type = None
mail.settings.gpg_home = None
mail.settings.sign = True
mail.settings.sign_passphrase = None
mail.settings.encrypt = True
mail.settings.x509_sign_keyfile = None
mail.settings.x509_sign_certfile = None
mail.settings.x509_sign_chainfile = None
mail.settings.x509_nocerts = False
mail.settings.x509_crypt_certfiles = None
cipher_type : None
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults
to True
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name /
string or a list of file names /
strings (PEM format)
Examples:
Create Mail object with authentication data for remote server::
mail = Mail('example.com:25', 'me@example.com', 'me:password')
Notice for GAE users:
attachments have an automatic content_id='attachment-i' where i is progressive number
in this way the can be referenced from the HTML as <img src="cid:attachment-0" /> etc.
"""
class Attachment(MIMEBase):
"""
Email attachment
Args:
payload: path to file or file-like object with read() method
filename: name of the attachment stored in message; if set to
None, it will be fetched from payload path; file-like
object payload must have explicit filename specified
content_id: id of the attachment; automatically contained within
`<` and `>`
content_type: content type of the attachment; if set to None,
it will be fetched from filename using gluon.contenttype
module
encoding: encoding of all strings passed to this function (except
attachment body)
Content ID is used to identify attachments within the html body;
in example, attached image with content ID 'photo' may be used in
html message as a source of img tag `<img src="cid:photo" />`.
Example::
Create attachment from text file::
attachment = Mail.Attachment('/path/to/file.txt')
Content-Type: text/plain
MIME-Version: 1.0
Content-Disposition: attachment; filename="file.txt"
Content-Transfer-Encoding: base64
SOMEBASE64CONTENT=
Create attachment from image file with custom filename and cid::
attachment = Mail.Attachment('/path/to/file.png',
filename='photo.png',
content_id='photo')
Content-Type: image/png
MIME-Version: 1.0
Content-Disposition: attachment; filename="photo.png"
Content-Id: <photo>
Content-Transfer-Encoding: base64
SOMEOTHERBASE64CONTENT=
"""
def __init__(
self,
payload,
filename=None,
content_id=None,
content_type=None,
encoding='utf-8'):
if isinstance(payload, str):
if filename is None:
filename = os.path.basename(payload)
payload = read_file(payload, 'rb')
else:
if filename is None:
raise Exception('Missing attachment name')
payload = payload.read()
# FIXME PY3 can be used to_native?
filename = filename.encode(encoding)
if content_type is None:
content_type = contenttype(filename)
self.my_filename = filename
self.my_payload = payload
MIMEBase.__init__(self, *content_type.split('/', 1))
self.set_payload(payload)
self['Content-Disposition'] = Header('attachment; filename="%s"' % to_native(filename, encoding), 'utf-8')
if content_id is not None:
self['Content-Id'] = '<%s>' % to_native(content_id, encoding)
Encoders.encode_base64(self)
def __init__(self, server=None, sender=None, login=None, tls=True):
settings = self.settings = Settings()
settings.server = server
settings.sender = sender
settings.login = login
settings.tls = tls
settings.timeout = 5 # seconds
settings.hostname = None
settings.ssl = False
settings.cipher_type = None
settings.gpg_home = None
settings.sign = True
settings.sign_passphrase = None
settings.encrypt = True
settings.x509_sign_keyfile = None
settings.x509_sign_certfile = None
settings.x509_sign_chainfile = None
settings.x509_nocerts = False
settings.x509_crypt_certfiles = None
settings.debug = False
settings.lock_keys = True
self.result = {}
self.error = None
def send(self,
to,
subject='[no subject]',
message='[no message]',
attachments=None,
cc=None,
bcc=None,
reply_to=None,
sender=None,
encoding='utf-8',
raw=False,
headers={},
from_address=None,
cipher_type=None,
sign=None,
sign_passphrase=None,
encrypt=None,
x509_sign_keyfile=None,
x509_sign_chainfile=None,
x509_sign_certfile=None,
x509_crypt_certfiles=None,
x509_nocerts=None
):
"""
Sends an email using data specified in constructor
Args:
to: list or tuple of receiver addresses; will also accept single
object
subject: subject of the email
message: email body text; depends on type of passed object:
- if 2-list or 2-tuple is passed: first element will be
source of plain text while second of html text;
- otherwise: object will be the only source of plain text
and html source will be set to None
If text or html source is:
- None: content part will be ignored,
- string: content part will be set to it,
- file-like object: content part will be fetched from it using
it's read() method
attachments: list or tuple of Mail.Attachment objects; will also
accept single object
cc: list or tuple of carbon copy receiver addresses; will also
accept single object
bcc: list or tuple of blind carbon copy receiver addresses; will
also accept single object
reply_to: address to which reply should be composed
encoding: encoding of all strings passed to this method (including
message bodies)
headers: dictionary of headers to refine the headers just before
sending mail, e.g. `{'X-Mailer' : 'web2py mailer'}`
from_address: address to appear in the 'From:' header, this is not
the envelope sender. If not specified the sender will be used
cipher_type :
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults to True.
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name / string or
a list of file names / strings (PEM format)
Examples:
Send plain text message to single address::
mail.send('you@example.com',
'Message subject',
'Plain text body of the message')
Send html message to single address::
mail.send('you@example.com',
'Message subject',
'<html>Plain text body of the message</html>')
Send text and html message to three addresses (two in cc)::
mail.send('you@example.com',
'Message subject',
('Plain text body', '<html>html body</html>'),
cc=['other1@example.com', 'other2@example.com'])
Send html only message with image attachment available from the
message by 'photo' content id::
mail.send('you@example.com',
'Message subject',
(None, '<html><img src="cid:photo" /></html>'),
Mail.Attachment('/path/to/photo.jpg'
content_id='photo'))
Send email with two attachments and no body text::
mail.send('you@example.com,
'Message subject',
None,
[Mail.Attachment('/path/to/fist.file'),
Mail.Attachment('/path/to/second.file')])
Returns:
True on success, False on failure.
Before return, method updates two object's fields:
- self.result: return value of smtplib.SMTP.sendmail() or GAE's
mail.send_mail() method
- self.error: Exception message or None if above was successful
"""
# We don't want to use base64 encoding for unicode mail
add_charset('utf-8', charset_QP, charset_QP, 'utf-8')
def encode_header(key):
if [c for c in key if 32 > ord(c) or ord(c) > 127]:
return Header(key.encode('utf-8'), 'utf-8')
else:
return key
# encoded or raw text
def encoded_or_raw(text):
if raw:
text = encode_header(text)
return text
sender = sender or self.settings.sender
if not isinstance(self.settings.server, str):
raise Exception('Server address not specified')
if not isinstance(sender, str):
raise Exception('Sender address not specified')
if not raw and attachments:
# Use multipart/mixed if there is attachments
payload_in = MIMEMultipart('mixed')
elif raw:
# no encoding configuration for raw messages
if not isinstance(message, basestring):
message = message.read()
if isinstance(message, unicodeT):
text = message.encode('utf-8')
elif not encoding == 'utf-8':
text = message.decode(encoding).encode('utf-8')
else:
text = message
# No charset passed to avoid transport encoding
# NOTE: some unicode encoded strings will produce
# unreadable mail contents.
payload_in = MIMEText(text)
if to:
if not isinstance(to, (list, tuple)):
to = [to]
else:
raise Exception('Target receiver address not specified')
if reply_to:
if not isinstance(reply_to, (list, tuple)):
reply_to = [reply_to]
if cc:
if not isinstance(cc, (list, tuple)):
cc = [cc]
if bcc:
if not isinstance(bcc, (list, tuple)):
bcc = [bcc]
if message is None:
text = html = None
elif isinstance(message, (list, tuple)):
text, html = message
elif message.strip().startswith('<html') and \
message.strip().endswith('</html>'):
text = self.settings.server == 'gae' and message or None
html = message
else:
text = message
html = None
if (text is not None or html is not None) and (not raw):
if text is not None:
if not isinstance(text, basestring):
text = text.read()
if isinstance(text, unicodeT):
text = text.encode('utf-8')
elif not encoding == 'utf-8':
text = text.decode(encoding).encode('utf-8')
if html is not None:
if not isinstance(html, basestring):
html = html.read()
if isinstance(html, unicodeT):
html = html.encode('utf-8')
elif not encoding == 'utf-8':
html = html.decode(encoding).encode('utf-8')
# Construct mime part only if needed
if text is not None and html:
# We have text and html we need multipart/alternative
attachment = MIMEMultipart('alternative')
attachment.attach(MIMEText(text, _charset='utf-8'))
attachment.attach(MIMEText(html, 'html', _charset='utf-8'))
elif text is not None:
attachment = MIMEText(text, _charset='utf-8')
elif html:
attachment = MIMEText(html, 'html', _charset='utf-8')
if attachments:
# If there is attachments put text and html into
# multipart/mixed
payload_in.attach(attachment)
else:
# No attachments no multipart/mixed
payload_in = attachment
if (attachments is None) or raw:
pass
elif isinstance(attachments, (list, tuple)):
for attachment in attachments:
payload_in.attach(attachment)
else:
payload_in.attach(attachments)
attachments = [attachments]
#######################################################
# CIPHER #
#######################################################
cipher_type = cipher_type or self.settings.cipher_type
sign = sign if sign is not None else self.settings.sign
sign_passphrase = sign_passphrase or self.settings.sign_passphrase
encrypt = encrypt if encrypt is not None else self.settings.encrypt
#######################################################
# GPGME #
#######################################################
if cipher_type == 'gpg':
if self.settings.gpg_home:
# Set GNUPGHOME environment variable to set home of gnupg
import os
os.environ['GNUPGHOME'] = self.settings.gpg_home
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to gpg"
return False
# need a python-pyme package and gpgme lib
from pyme import core, errors
from pyme.constants.sig import mode
############################################
# sign #
############################################
if sign:
import string
core.check_version(None)
pin = string.replace(payload_in.as_string(), '\n', '\r\n')
plain = core.Data(pin)
sig = core.Data()
c = core.Context()
c.set_armor(1)
c.signers_clear()
# search for signing key for From:
for sigkey in c.op_keylist_all(sender, 1):
if sigkey.can_sign:
c.signers_add(sigkey)
if not c.signers_enum(0):
self.error = 'No key for signing [%s]' % sender
return False
c.set_passphrase_cb(lambda x, y, z: sign_passphrase)
try:
# make a signature
c.op_sign(plain, sig, mode.DETACH)
sig.seek(0, 0)
# make it part of the email
payload = MIMEMultipart('signed',
boundary=None,
_subparts=None,
**dict(micalg="pgp-sha1",
protocol="application/pgp-signature"))
# insert the origin payload
payload.attach(payload_in)
# insert the detached signature
p = MIMEBase("application", 'pgp-signature')
p.set_payload(sig.read())
payload.attach(p)
# it's just a trick to handle the no encryption case
payload_in = payload
except errors.GPGMEError as ex:
self.error = "GPG error: %s" % ex.getstring()
return False
############################################
# encrypt #
############################################
if encrypt:
core.check_version(None)
plain = core.Data(payload_in.as_string())
cipher = core.Data()
c = core.Context()
c.set_armor(1)
# collect the public keys for encryption
recipients = []
rec = to[:]
if cc:
rec.extend(cc)
if bcc:
rec.extend(bcc)
for addr in rec:
c.op_keylist_start(addr, 0)
r = c.op_keylist_next()
if r is None:
self.error = 'No key for [%s]' % addr
return False
recipients.append(r)
try:
# make the encryption
c.op_encrypt(recipients, 1, plain, cipher)
cipher.seek(0, 0)
# make it a part of the email
payload = MIMEMultipart('encrypted',
boundary=None,
_subparts=None,
**dict(protocol="application/pgp-encrypted"))
p = MIMEBase("application", 'pgp-encrypted')
p.set_payload("Version: 1\r\n")
payload.attach(p)
p = MIMEBase("application", 'octet-stream')
p.set_payload(cipher.read())
payload.attach(p)
except errors.GPGMEError as ex:
self.error = "GPG error: %s" % ex.getstring()
return False
#######################################################
# X.509 #
#######################################################
elif cipher_type == 'x509':
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to x509"
return False
import os
x509_sign_keyfile = x509_sign_keyfile or self.settings.x509_sign_keyfile
x509_sign_chainfile = x509_sign_chainfile or self.settings.x509_sign_chainfile
x509_sign_certfile = x509_sign_certfile or self.settings.x509_sign_certfile or \
x509_sign_keyfile or self.settings.x509_sign_certfile
# crypt certfiles could be a string or a list
x509_crypt_certfiles = x509_crypt_certfiles or self.settings.x509_crypt_certfiles
x509_nocerts = x509_nocerts or\
self.settings.x509_nocerts
# need m2crypto
try:
from M2Crypto import BIO, SMIME, X509
except Exception as e:
self.error = "Can't load M2Crypto module"
return False
msg_bio = BIO.MemoryBuffer(payload_in.as_string())
s = SMIME.SMIME()
# SIGN
if sign:
# key for signing
try:
keyfile_bio = BIO.openfile(x509_sign_keyfile)\
if os.path.isfile(x509_sign_keyfile)\
else BIO.MemoryBuffer(x509_sign_keyfile)
sign_certfile_bio = BIO.openfile(x509_sign_certfile)\
if os.path.isfile(x509_sign_certfile)\
else BIO.MemoryBuffer(x509_sign_certfile)
s.load_key_bio(keyfile_bio, sign_certfile_bio,
callback=lambda x: sign_passphrase)
if x509_sign_chainfile:
sk = X509.X509_Stack()
chain = X509.load_cert(x509_sign_chainfile)\
if os.path.isfile(x509_sign_chainfile)\
else X509.load_cert_string(x509_sign_chainfile)
sk.push(chain)
s.set_x509_stack(sk)
except Exception as e:
self.error = "Something went wrong on certificate / private key loading: <%s>" % str(e)
return False
try:
if x509_nocerts:
flags = SMIME.PKCS7_NOCERTS
else:
flags = 0
if not encrypt:
flags += SMIME.PKCS7_DETACHED
p7 = s.sign(msg_bio, flags=flags)
msg_bio = BIO.MemoryBuffer(payload_in.as_string(
)) # Recreate coz sign() has consumed it.
except Exception as e:
self.error = "Something went wrong on signing: <%s> %s" % (
str(e), str(flags))
return False
# ENCRYPT
if encrypt:
try:
sk = X509.X509_Stack()
if not isinstance(x509_crypt_certfiles, (list, tuple)):
x509_crypt_certfiles = [x509_crypt_certfiles]
# make an encryption cert's stack
for crypt_certfile in x509_crypt_certfiles:
certfile = X509.load_cert(crypt_certfile)\
if os.path.isfile(crypt_certfile)\
else X509.load_cert_string(crypt_certfile)
sk.push(certfile)
s.set_x509_stack(sk)
s.set_cipher(SMIME.Cipher('des_ede3_cbc'))
tmp_bio = BIO.MemoryBuffer()
if sign:
s.write(tmp_bio, p7)
else:
tmp_bio.write(payload_in.as_string())
p7 = s.encrypt(tmp_bio)
except Exception as e:
self.error = "Something went wrong on encrypting: <%s>" % str(e)
return False
# Final stage in sign and encryption
out = BIO.MemoryBuffer()
if encrypt:
s.write(out, p7)
else:
if sign:
s.write(out, p7, msg_bio, SMIME.PKCS7_DETACHED)
else:
out.write('\r\n')
out.write(payload_in.as_string())
out.close()
st = str(out.read())
payload = message_from_string(st)
else:
# no cryptography process as usual
payload = payload_in
if from_address:
payload['From'] = encoded_or_raw(to_unicode(from_address, encoding))
else:
payload['From'] = encoded_or_raw(to_unicode(sender, encoding))
origTo = to[:]
if to:
payload['To'] = encoded_or_raw(to_unicode(', '.join(to), encoding))
if reply_to:
payload['Reply-To'] = encoded_or_raw(to_unicode(', '.join(reply_to), encoding))
if cc:
payload['Cc'] = encoded_or_raw(to_unicode(', '.join(cc), encoding))
to.extend(cc)
if bcc:
to.extend(bcc)
payload['Subject'] = encoded_or_raw(to_unicode(subject, encoding))
payload['Date'] = email.utils.formatdate()
for k, v in iteritems(headers):
payload[k] = encoded_or_raw(to_unicode(v, encoding))
result = {}
try:
if self.settings.server == 'logging':
entry = 'email not sent\n%s\nFrom: %s\nTo: %s\nSubject: %s\n\n%s\n%s\n' % \
('-' * 40, sender, ', '.join(to), subject, text or html, '-' * 40)
logger.warning(entry)
elif self.settings.server.startswith('logging:'):
entry = 'email not sent\n%s\nFrom: %s\nTo: %s\nSubject: %s\n\n%s\n%s\n' % \
('-' * 40, sender, ', '.join(to), subject, text or html, '-' * 40)
open(self.settings.server[8:], 'a').write(entry)
elif self.settings.server == 'gae':
xcc = dict()
if cc:
xcc['cc'] = cc
if bcc:
xcc['bcc'] = bcc
if reply_to:
xcc['reply_to'] = reply_to
from google.appengine.api import mail
attachments = attachments and [mail.Attachment(
a.my_filename,
a.my_payload,
content_id='<attachment-%s>' % k
) for k, a in enumerate(attachments) if not raw]
if attachments:
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding),
body=to_unicode(text or '', encoding),
html=html,
attachments=attachments, **xcc)
elif html and (not raw):
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding), body=to_unicode(text or '', encoding), html=html, **xcc)
else:
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding), body=to_unicode(text or '', encoding), **xcc)
elif self.settings.server == 'aws':
import boto3
from botocore.exceptions import ClientError
client = boto3.client('ses')
try:
raw = {'Data': payload.as_string()}
response = client.send_raw_email(RawMessage=raw,
Source=sender,
Destinations=to)
return True
except ClientError as e:
# we should log this error:
# print e.response['Error']['Message']
return False
else:
smtp_args = self.settings.server.split(':')
kwargs = dict(timeout=self.settings.timeout)
func = smtplib.SMTP_SSL if self.settings.ssl else smtplib.SMTP
server = func(*smtp_args, **kwargs)
try:
if self.settings.tls and not self.settings.ssl:
server.ehlo(self.settings.hostname)
server.starttls()
server.ehlo(self.settings.hostname)
if self.settings.login:
server.login(*self.settings.login.split(':', 1))
result = server.sendmail(sender, to, payload.as_string())
finally:
# do not want to hide errors raising some exception here
try:
server.quit()
except smtplib.SMTPException:
# ensure to close any socket with SMTP server
try:
server.close()
except Exception:
pass
except Exception as e:
logger.warning('Mail.send failure:%s' % e)
self.result = result
self.error = e
return False
self.result = result
self.error = None
return True
class Recaptcha2(DIV):
"""
Experimental:
Creates a DIV holding the newer Recaptcha from Google (v2)
Args:
request : the request. If not passed, uses current request
public_key : the public key Google gave you
private_key : the private key Google gave you
error_message : the error message to show if verification fails
label : the label to use
options (dict) : takes these parameters
- hl
- theme
- type
- tabindex
- callback
- expired-callback
see https://developers.google.com/recaptcha/docs/display for docs about those
comment : the comment
Examples:
Use as::
form = FORM(Recaptcha2(public_key='...', private_key='...'))
or::
form = SQLFORM(...)
form.append(Recaptcha2(public_key='...', private_key='...'))
to protect the login page instead, use::
from gluon.tools import Recaptcha2
auth.settings.captcha = Recaptcha2(request, public_key='...', private_key='...')
"""
API_URI = 'https://www.google.com/recaptcha/api.js'
VERIFY_SERVER = 'https://www.google.com/recaptcha/api/siteverify'
def __init__(self,
request=None,
public_key='',
private_key='',
error_message='invalid',
label='Verify:',
options=None,
comment='',
):
request = request or current.request
self.request_vars = request and request.vars or current.request.vars
self.remote_addr = request.env.remote_addr
self.public_key = public_key
self.private_key = private_key
self.errors = Storage()
self.error_message = error_message
self.components = []
self.attributes = {}
self.label = label
self.options = options or {}
self.comment = comment
def _validate(self):
recaptcha_response_field = self.request_vars.pop('g-recaptcha-response', None)
remoteip = self.remote_addr
if not recaptcha_response_field:
self.errors['captcha'] = self.error_message
return False
params = urlencode({
'secret': self.private_key,
'remoteip': remoteip,
'response': recaptcha_response_field,
}).encode('utf-8')
request = urllib2.Request(
url=self.VERIFY_SERVER,
data=to_bytes(params),
headers={'Content-type': 'application/x-www-form-urlencoded',
'User-agent': 'reCAPTCHA Python'})
httpresp = urlopen(request)
content = httpresp.read()
httpresp.close()
try:
response_dict = json.loads(to_native(content))
except:
self.errors['captcha'] = self.error_message
return False
if response_dict.get('success', False):
self.request_vars.captcha = ''
return True
else:
self.errors['captcha'] = self.error_message
return False
def xml(self):
api_uri = self.API_URI
hl = self.options.pop('hl', None)
if hl:
api_uri = self.API_URI + '?hl=%s' % hl
public_key = self.public_key
self.options['sitekey'] = public_key
captcha = DIV(
SCRIPT(_src=api_uri, _async='', _defer=''),
DIV(_class="g-recaptcha", data=self.options),
TAG.noscript(XML("""
<div style="width: 302px; height: 352px;">
<div style="width: 302px; height: 352px; position: relative;">
<div style="width: 302px; height: 352px; position: absolute;">
<iframe src="https://www.google.com/recaptcha/api/fallback?k=%(public_key)s"
frameborder="0" scrolling="no"
style="width: 302px; height:352px; border-style: none;">
</iframe>
</div>
<div style="width: 250px; height: 80px; position: absolute; border-style: none;
bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;">
<textarea id="g-recaptcha-response" name="g-recaptcha-response"
class="g-recaptcha-response"
style="width: 250px; height: 80px; border: 1px solid #c1c1c1;
margin: 0px; padding: 0px; resize: none;" value="">
</textarea>
</div>
</div>
</div>""" % dict(public_key=public_key))
)
)
if not self.errors.captcha:
return XML(captcha).xml()
else:
captcha.append(DIV(self.errors['captcha'], _class='error'))
return XML(captcha).xml()
# this should only be used for captcha and perhaps not even for that
def addrow(form, a, b, c, style, _id, position=-1):
if style == "divs":
form[0].insert(position, DIV(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "table2cols":
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(c, _class='w2p_fc')))
form[0].insert(position + 1, TR(TD(b, _class='w2p_fw'),
_colspan=2, _id=_id))
elif style == "ul":
form[0].insert(position, LI(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "bootstrap":
form[0].insert(position, DIV(LABEL(a, _class='control-label'),
DIV(b, SPAN(c, _class='inline-help'),
_class='controls'),
_class='control-group', _id=_id))
elif style in ("bootstrap3_inline", "bootstrap4_inline"):
form[0].insert(position, DIV(LABEL(a, _class='control-label col-sm-3'),
DIV(b, SPAN(c, _class='help-block'),
_class='col-sm-9'),
_class='form-group row', _id=_id))
elif style in ("bootstrap3_stacked", "bootstrap4_stacked"):
form[0].insert(position, DIV(LABEL(a, _class='control-label'),
b, SPAN(c, _class='help-block'),
_class='form-group row', _id=_id))
else:
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(b, _class='w2p_fw'),
TD(c, _class='w2p_fc'), _id=_id))
class AuthJWT(object):
"""
Experimental!
Args:
- secret_key: the secret. Without salting, an attacker knowing this can impersonate
any user
- algorithm : uses as they are in the JWT specs, HS256, HS384 or HS512 basically means
signing with HMAC with a 256, 284 or 512bit hash
- verify_expiration : verifies the expiration checking the exp claim
- leeway: allow n seconds of skew when checking for token expiration
- expiration : how many seconds a token may be valid
- allow_refresh: enable the machinery to get a refreshed token passing a not-already-expired
token
- refresh_expiration_delta: to avoid continous refresh of the token
- header_prefix : self-explanatory. "JWT" and "Bearer" seems to be the emerging standards
- jwt_add_header: a dict holding additional mappings to the header. by default only alg and typ are filled
- user_param: the name of the parameter holding the username when requesting a token. Can be useful, e.g, for
email-based authentication, with "email" as a parameter
- pass_param: same as above, but for the password
- realm: self-explanatory
- salt: can be static or a function that takes the payload as an argument.
Example:
def mysalt(payload):
return payload['hmac_key'].split('-')[0]
- additional_payload: can be a dict to merge with the payload or a function that takes
the payload as input and returns the modified payload
Example:
def myadditional_payload(payload):
payload['my_name_is'] = 'bond,james bond'
return payload
- before_authorization: can be a callable that takes the deserialized token (a dict) as input.
Gets called right after signature verification but before the actual
authorization takes place. It may be use to cast
the extra auth_user fields to their actual types.
You can raise with HTTP a proper error message
Example:
def mybefore_authorization(tokend):
if not tokend['my_name_is'] == 'bond,james bond':
raise HTTP(400, 'Invalid JWT my_name_is claim')
- max_header_length: check max length to avoid load()ing unusually large tokens (could mean crafted, e.g. in a DDoS.)
Basic Usage:
in models (or the controller needing it)
myjwt = AuthJWT(auth, secret_key='secret')
in the controller issuing tokens
def login_and_take_token():
return myjwt.jwt_token_manager()
A call then to /app/controller/login_and_take_token with username and password returns the token
A call to /app/controller/login_and_take_token with the original token returns the refreshed token
To protect a function with JWT
@myjwt.allows_jwt()
@auth.requires_login()
def protected():
return '%s$%s' % (request.now, auth.user_id)
To inject optional auth info into the action with JWT
@myjwt.allows_jwt()
def unprotected():
if auth.user:
return '%s$%s' % (request.now, auth.user_id)
return "No auth info!"
"""
def __init__(self,
auth,
secret_key,
algorithm='HS256',
verify_expiration=True,
leeway=30,
expiration=60 * 5,
allow_refresh=True,
refresh_expiration_delta=60 * 60,
header_prefix='Bearer',
jwt_add_header=None,
user_param='username',
pass_param='password',
realm='Login required',
salt=None,
additional_payload=None,
before_authorization=None,
max_header_length=4 * 1024,
):
self.secret_key = secret_key
self.auth = auth
self.algorithm = algorithm
if self.algorithm not in ('HS256', 'HS384', 'HS512'):
raise NotImplementedError('Algorithm %s not allowed' % algorithm)
self.verify_expiration = verify_expiration
self.leeway = leeway
self.expiration = expiration
self.allow_refresh = allow_refresh
self.refresh_expiration_delta = refresh_expiration_delta
self.header_prefix = header_prefix
self.jwt_add_header = jwt_add_header or {}
base_header = {'alg': self.algorithm, 'typ': 'JWT'}
for k, v in iteritems(self.jwt_add_header):
base_header[k] = v
self.cached_b64h = self.jwt_b64e(json.dumps(base_header))
digestmod_mapping = {
'HS256': hashlib.sha256,
'HS384': hashlib.sha384,
'HS512': hashlib.sha512
}
self.digestmod = digestmod_mapping[algorithm]
self.user_param = user_param
self.pass_param = pass_param
self.realm = realm
self.salt = salt
self.additional_payload = additional_payload
self.before_authorization = before_authorization
self.max_header_length = max_header_length
self.recvd_token = None
@staticmethod
def jwt_b64e(string):
string = to_bytes(string)
return base64.urlsafe_b64encode(string).strip(b'=')
@staticmethod
def jwt_b64d(string):
"""base64 decodes a single bytestring (and is tolerant to getting
called with a unicode string).
The result is also a bytestring.
"""
string = to_bytes(string, 'ascii', 'ignore')
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4))
def generate_token(self, payload):
secret = to_bytes(self.secret_key)
if self.salt:
if callable(self.salt):
secret = "%s$%s" % (secret, self.salt(payload))
else:
secret = "%s$%s" % (secret, self.salt)
if isinstance(secret, unicodeT):
secret = secret.encode('ascii', 'ignore')
b64h = self.cached_b64h
b64p = self.jwt_b64e(serializers.json(payload))
jbody = b64h + b'.' + b64p
mauth = hmac.new(key=secret, msg=jbody, digestmod=self.digestmod)
jsign = self.jwt_b64e(mauth.digest())
return to_native(jbody + b'.' + jsign)
def verify_signature(self, body, signature, secret):
mauth = hmac.new(key=secret, msg=body, digestmod=self.digestmod)
return compare(self.jwt_b64e(mauth.digest()), signature)
def load_token(self, token):
token = to_bytes(token, 'utf-8', 'strict')
body, sig = token.rsplit(b'.', 1)
b64h, b64b = body.split(b'.', 1)
if b64h != self.cached_b64h:
# header not the same
raise HTTP(400, 'Invalid JWT Header')
secret = self.secret_key
tokend = serializers.loads_json(to_native(self.jwt_b64d(b64b)))
if self.salt:
if callable(self.salt):
secret = "%s$%s" % (secret, self.salt(tokend))
else:
secret = "%s$%s" % (secret, self.salt)
secret = to_bytes(secret, 'ascii', 'ignore')
if not self.verify_signature(body, sig, secret):
# signature verification failed
raise HTTP(400, 'Token signature is invalid')
if self.verify_expiration:
now = time.mktime(datetime.datetime.utcnow().timetuple())
if tokend['exp'] + self.leeway < now:
raise HTTP(400, 'Token is expired')
if callable(self.before_authorization):
self.before_authorization(tokend)
return tokend
def serialize_auth_session(self, session_auth):
"""
As bad as it sounds, as long as this is rarely used (vs using the token)
this is the faster method, even if we ditch session in jwt_token_manager().
We (mis)use the heavy default auth mechanism to avoid any further computation,
while sticking to a somewhat-stable Auth API.
"""
# TODO: Check the following comment
# is the following safe or should we use
# calendar.timegm(datetime.datetime.utcnow().timetuple())
# result seem to be the same (seconds since epoch, in UTC)
now = time.mktime(datetime.datetime.utcnow().timetuple())
expires = now + self.expiration
payload = dict(
hmac_key=session_auth['hmac_key'],
user_groups=session_auth['user_groups'],
user=session_auth['user'].as_dict(),
iat=now,
exp=expires
)
return payload
def refresh_token(self, orig_payload):
now = time.mktime(datetime.datetime.utcnow().timetuple())
if self.verify_expiration:
orig_exp = orig_payload['exp']
if orig_exp + self.leeway < now:
# token already expired, can't be used for refresh
raise HTTP(400, 'Token already expired')
orig_iat = orig_payload.get('orig_iat') or orig_payload['iat']
if orig_iat + self.refresh_expiration_delta < now:
# refreshed too long ago
raise HTTP(400, 'Token issued too long ago')
expires = now + self.expiration
orig_payload.update(
orig_iat=orig_iat,
iat=now,
exp=expires,
hmac_key=web2py_uuid()
)
self.alter_payload(orig_payload)
return orig_payload
def alter_payload(self, payload):
if self.additional_payload:
if callable(self.additional_payload):
payload = self.additional_payload(payload)
elif isinstance(self.additional_payload, dict):
payload.update(self.additional_payload)
return payload
def jwt_token_manager(self, token_param='_token'):
"""
The part that issues (and refreshes) tokens.
Used in a controller, given myjwt is the istantiated class, as
@myjwt.allow_jwt(required=False, verify_expiration=False)
def api_auth():
return myjwt.jwt_token_manager()
Then, a call to /app/c/api_auth with username and password
returns a token, while /app/c/api_auth with the current token
issues another token (expired, but within grace time)
"""
request = current.request
response = current.response
session = current.session
# forget and unlock response
session.forget(response)
valid_user = None
ret = None
token = None
try:
token = self.recvd_token or self.get_jwt_token_from_request(token_param)
except HTTP:
pass
if token:
if not self.allow_refresh:
raise HTTP(403, 'Refreshing token is not allowed')
tokend = self.load_token(token)
# verification can fail here
refreshed = self.refresh_token(tokend)
ret = {'token': self.generate_token(refreshed)}
elif self.user_param in request.vars and self.pass_param in request.vars:
username = request.vars[self.user_param]
password = request.vars[self.pass_param]
valid_user = self.auth.login_bare(username, password)
else:
valid_user = self.auth.user
self.auth.login_user(valid_user)
if valid_user:
payload = self.serialize_auth_session(session.auth)
self.alter_payload(payload)
ret = {'token': self.generate_token(payload)}
elif ret is None:
raise HTTP(401,
'Not Authorized - need to be logged in, to pass a token '
'for refresh or username and password for login',
**{'WWW-Authenticate': 'JWT realm="%s"' % self.realm})
response.headers['Content-Type'] = 'application/json'
return serializers.json(ret)
def inject_token(self, tokend):
"""
The real deal, not touching the db but still logging-in the user
"""
self.auth.user = Storage(tokend['user'])
self.auth.user_groups = tokend['user_groups']
self.auth.hmac_key = tokend['hmac_key']
def get_jwt_token_from_request(self, token_param='_token'):
"""
The method that extracts and validates the token, either
from the header or the _token var
token_param: request.vars attribute with the token used only if the http authorization header is not present.
"""
token = None
token_in_header = current.request.env.http_authorization
if token_in_header:
parts = token_in_header.split()
if parts[0].lower() != self.header_prefix.lower():
raise HTTP(400, 'Invalid JWT header')
elif len(parts) == 1:
raise HTTP(400, 'Invalid JWT header, missing token')
elif len(parts) > 2:
raise HTTP(400, 'Invalid JWT header, token contains spaces')
token = parts[1]
else:
token = current.request.vars.get(token_param)
if token is None:
raise HTTP(400, 'JWT header not found and JWT parameter {} missing in request'.format(token_param))
self.recvd_token = token
return token
def allows_jwt(self, otherwise=None, required=True, verify_expiration=True, token_param='_token'):
"""
The decorator that takes care of injecting auth info in the decorated action.
Works w/o resorting to session.
Args:
required: the token is mandatory (either in request.var._token or in the HTTP hearder Authorization Bearer)
verify_expiration: allows to bypass expiration check. Useful to manage token renewal.
token_param: request.vars attribute with the token used only if the http authorization header is not present (default: "_token").
"""
def decorator(action):
def f(*args, **kwargs):
try:
token = self.get_jwt_token_from_request(token_param=token_param)
except HTTP as e:
if required:
raise e
token = None
if token and len(token) < self.max_header_length:
old_verify_expiration = self.verify_expiration
try:
self.verify_expiration = verify_expiration
tokend = self.load_token(token)
except ValueError:
raise HTTP(400, 'Invalid JWT header, wrong token format')
finally:
self.verify_expiration = old_verify_expiration
self.inject_token(tokend)
return action(*args, **kwargs)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
class Auth(AuthAPI):
default_settings = dict(AuthAPI.default_settings,
allow_basic_login=False,
allow_basic_login_only=False,
allow_delete_accounts=False,
alternate_requires_registration=False,
auth_manager_role=None,
auth_two_factor_enabled=False,
auth_two_factor_tries_left=3,
bulk_register_enabled=False,
captcha=None,
cas_maps=None,
client_side=True,
formstyle=None,
hideerror=False,
label_separator=None,
login_after_password_change=True,
login_after_registration=False,
login_captcha=None,
login_specify_error=False,
long_expiration=3600 * 30 * 24, # one month
mailer=None,
manager_actions={},
multi_login=False,
on_failed_authentication=lambda x: redirect(x),
pre_registration_div=None,
prevent_open_redirect_attacks=True,
prevent_password_reset_attacks=True,
profile_fields=None,
register_captcha=None,
register_fields=None,
register_verify_password=True,
remember_me_form=True,
reset_password_requires_verification=False,
retrieve_password_captcha=None,
retrieve_username_captcha=None,
showid=False,
table_cas=None,
table_cas_name='auth_cas',
table_event=None,
table_group=None,
table_membership=None,
table_permission=None,
table_token_name='auth_token',
table_user=None,
two_factor_authentication_group=None,
update_fields=['email'],
wiki=Settings()
)
# ## these are messages that can be customized
default_messages = dict(AuthAPI.default_messages,
access_denied='Insufficient privileges',
bulk_invite_body='You have been invited to join %(site)s, click %(link)s to complete '
'the process',
bulk_invite_subject='Invitation to join %(site)s',
delete_label='Check to delete',
email_sent='Email sent',
email_verified='Email verified',
function_disabled='Function disabled',
impersonate_log='User %(id)s is impersonating %(other_id)s',
invalid_reset_password='Invalid reset password',
invalid_two_factor_code='Incorrect code. {0} more attempt(s) remaining.',
is_empty="Cannot be empty",
label_client_ip='Client IP',
label_description='Description',
label_email='E-mail',
label_first_name='First name',
label_group_id='Group ID',
label_last_name='Last name',
label_name='Name',
label_origin='Origin',
label_password='Password',
label_record_id='Record ID',
label_registration_id='Registration identifier',
label_registration_key='Registration key',
label_remember_me="Remember me (for 30 days)",
label_reset_password_key='Reset Password key',
label_role='Role',
label_table_name='Object or table name',
label_time_stamp='Timestamp',
label_two_factor='Authentication code',
label_user_id='User ID',
label_username='Username',
login_button='Log In',
login_disabled='Login disabled by administrator',
new_password='New password',
new_password_sent='A new password was emailed to you',
old_password='Old password',
password_change_button='Change password',
password_reset_button='Request reset password',
profile_save_button='Apply changes',
register_button='Sign Up',
reset_password='Click on the link %(link)s to reset your password',
reset_password_log='User %(id)s Password reset',
reset_password_subject='Password reset',
retrieve_password='Your password is: %(password)s',
retrieve_password_log='User %(id)s Password retrieved',
retrieve_password_subject='Password retrieve',
retrieve_two_factor_code='Your temporary login code is {0}',
retrieve_two_factor_code_subject='Two-step Login Authentication Code',
retrieve_username='Your username is: %(username)s',
retrieve_username_log='User %(id)s Username retrieved',
retrieve_username_subject='Username retrieve',
submit_button='Submit',
two_factor_comment='This code was emailed to you and is required for login.',
unable_send_email='Unable to send email',
username_sent='Your username was emailed to you',
verify_email='Welcome %(username)s! Click on the link %(link)s to verify your email',
verify_email_log='User %(id)s Verification email sent',
verify_email_subject='Email verification',
verify_password='Verify Password',
verify_password_comment='please input your password again'
)
"""
Class for authentication, authorization, role based access control.
Includes:
- registration and profile
- login and logout
- username and password retrieval
- event logging
- role creation and assignment
- user defined group/role based permission
Args:
environment: is there for legacy but unused (awful)
db: has to be the database where to create tables for authentication
mailer: `Mail(...)` or None (no mailer) or True (make a mailer)
hmac_key: can be a hmac_key or hmac_key=Auth.get_or_create_key()
controller: (where is the user action?)
cas_provider: (delegate authentication to the URL, CAS2)
Authentication Example::
from gluon.contrib.utils import *
mail=Mail()
mail.settings.server='smtp.gmail.com:587'
mail.settings.sender='you@somewhere.com'
mail.settings.login='username:password'
auth=Auth(db)
auth.settings.mailer=mail
# auth.settings....=...
auth.define_tables()
def authentication():
return dict(form=auth())
Exposes:
- `http://.../{application}/{controller}/authentication/login`
- `http://.../{application}/{controller}/authentication/logout`
- `http://.../{application}/{controller}/authentication/register`
- `http://.../{application}/{controller}/authentication/verify_email`
- `http://.../{application}/{controller}/authentication/retrieve_username`
- `http://.../{application}/{controller}/authentication/retrieve_password`
- `http://.../{application}/{controller}/authentication/reset_password`
- `http://.../{application}/{controller}/authentication/profile`
- `http://.../{application}/{controller}/authentication/change_password`
On registration a group with role=new_user.id is created
and user is given membership of this group.
You can create a group with::
group_id=auth.add_group('Manager', 'can access the manage action')
auth.add_permission(group_id, 'access to manage')
Here "access to manage" is just a user defined string.
You can give access to a user::
auth.add_membership(group_id, user_id)
If user id is omitted, the logged in user is assumed
Then you can decorate any action::
@auth.requires_permission('access to manage')
def manage():
return dict()
You can restrict a permission to a specific table::
auth.add_permission(group_id, 'edit', db.sometable)
@auth.requires_permission('edit', db.sometable)
Or to a specific record::
auth.add_permission(group_id, 'edit', db.sometable, 45)
@auth.requires_permission('edit', db.sometable, 45)
If authorization is not granted calls::
auth.settings.on_failed_authorization
Other options::
auth.settings.mailer=None
auth.settings.expiration=3600 # seconds
...
### these are messages that can be customized
...
"""
@staticmethod
def get_or_create_key(filename=None, alg='sha512'):
request = current.request
if not filename:
filename = os.path.join(request.folder, 'private', 'auth.key')
if os.path.exists(filename):
key = open(filename, 'r').read().strip()
else:
key = alg + ':' + web2py_uuid()
open(filename, 'w').write(key)
return key
def url(self, f=None, args=None, vars=None, scheme=False):
if args is None:
args = []
if vars is None:
vars = {}
host = scheme and self.settings.host
return URL(c=self.settings.controller,
f=f, args=args, vars=vars, scheme=scheme, host=host)
def here(self):
return URL(args=current.request.args, vars=current.request.get_vars)
def select_host(self, host, host_names=None):
"""
checks that host is valid, i.e. in the list of glob host_names
if the host is missing, then is it selects the first entry from host_names
read more here: https://github.com/web2py/web2py/issues/1196
"""
if host:
if host_names:
for item in host_names:
if fnmatch.fnmatch(host, item):
break
else:
raise HTTP(403, "Invalid Hostname")
elif host_names:
host = host_names[0]
else:
host = 'localhost'
return host
def __init__(self, environment=None, db=None, mailer=True,
hmac_key=None, controller='default', function='user',
cas_provider=None, signature=True, secure=False,
csrf_prevention=True, propagate_extension=None,
url_index=None, jwt=None, host_names=None):
# next two lines for backward compatibility
if not db and environment and isinstance(environment, DAL):
db = environment
self.db = db
self.environment = current
self.csrf_prevention = csrf_prevention
request = current.request
session = current.session
auth = session.auth
self.user_groups = auth and auth.user_groups or {}
if secure:
request.requires_https()
now = request.now
# if we have auth info
# if not expired it, used it
# if expired, clear the session
# else, only clear auth info in the session
if auth:
delta = datetime.timedelta(days=0, seconds=auth.expiration)
if auth.last_visit and auth.last_visit + delta > now:
self.user = auth.user
# this is a trick to speed up sessions to avoid many writes
if (now - auth.last_visit).seconds > (auth.expiration // 10):
auth.last_visit = now
else:
self.user = None
if session.auth:
del session.auth
session.renew(clear_session=True)
else:
self.user = None
if session.auth:
del session.auth
# ## what happens after login?
url_index = url_index or URL(controller, 'index')
url_login = URL(controller, function, args='login',
extension=propagate_extension)
# ## what happens after registration?
settings = self.settings = Settings()
settings.update(Auth.default_settings)
host = self.select_host(request.env.http_host, host_names)
settings.update(
cas_domains=[host],
enable_tokens=False,
cas_provider=cas_provider,
cas_actions=dict(login='login',
validate='validate',
servicevalidate='serviceValidate',
proxyvalidate='proxyValidate',
logout='logout'),
cas_create_user=True,
extra_fields={},
actions_disabled=[],
controller=controller,
function=function,
login_url=url_login,
logged_url=URL(controller, function, args='profile'),
download_url=URL(controller, 'download'),
mailer=(mailer is True) and Mail() or mailer,
on_failed_authorization=URL(controller, function, args='not_authorized'),
login_next=url_index,
login_onvalidation=[],
login_onaccept=[],
login_onfail=[],
login_methods=[self],
login_form=self,
logout_next=url_index,
logout_onlogout=None,
register_next=url_index,
register_onvalidation=[],
register_onaccept=[],
verify_email_next=url_login,
verify_email_onaccept=[],
profile_next=url_index,
profile_onvalidation=[],
profile_onaccept=[],
retrieve_username_next=url_index,
retrieve_password_next=url_index,
request_reset_password_next=url_login,
reset_password_next=url_index,
change_password_next=url_index,
change_password_onvalidation=[],
change_password_onaccept=[],
retrieve_password_onvalidation=[],
request_reset_password_onvalidation=[],
request_reset_password_onaccept=[],
reset_password_onvalidation=[],
reset_password_onaccept=[],
hmac_key=hmac_key,
formstyle=current.response.formstyle,
label_separator=current.response.form_label_separator,
two_factor_methods=[],
two_factor_onvalidation=[],
host=host,
)
settings.lock_keys = True
# ## these are messages that can be customized
messages = self.messages = Messages(current.T)
messages.update(Auth.default_messages)
messages.update(ajax_failed_authentication=
DIV(H4('NOT AUTHORIZED'),
'Please ',
A('login',
_href=self.settings.login_url +
('?_next=' + urllib_quote(current.request.env.http_web2py_component_location))
if current.request.env.http_web2py_component_location else ''),
' to view this content.',
_class='not-authorized alert alert-block'))
messages.lock_keys = True
# for "remember me" option
response = current.response
if auth and auth.remember_me:
# when user wants to be logged in for longer
response.session_cookie_expires = auth.expiration
if signature:
self.define_signature()
else:
self.signature = None
self.jwt_handler = jwt and AuthJWT(self, **jwt)
def get_vars_next(self):
next = current.request.vars._next
host = current.request.env.http_host
if isinstance(next, (list, tuple)):
next = next[0]
if next and self.settings.prevent_open_redirect_attacks:
return self.prevent_open_redirect(next, host)
return next or None
@staticmethod
def prevent_open_redirect(next, host):
# Prevent an attacker from adding an arbitrary url after the
# _next variable in the request.
if next:
parts = next.split('/')
if ':' not in parts[0] and parts[:2] != ['', '']:
return next
elif len(parts) > 2 and parts[0].endswith(':') and parts[1:3] == ['', host]:
return next
return None
def table_cas(self):
return self.db[self.settings.table_cas_name]
def table_token(self):
return self.db[self.settings.table_token_name]
def _HTTP(self, *a, **b):
"""
only used in lambda: self._HTTP(404)
"""
raise HTTP(*a, **b)
def __call__(self):
"""
Example:
Use as::
def authentication():
return dict(form=auth())
"""
request = current.request
args = request.args
if not args:
redirect(self.url(args='login', vars=request.vars))
elif args[0] in self.settings.actions_disabled:
raise HTTP(404)
if args[0] in ('login', 'logout', 'register', 'verify_email',
'retrieve_username', 'retrieve_password',
'reset_password', 'request_reset_password',
'change_password', 'profile', 'groups',
'impersonate', 'not_authorized', 'confirm_registration',
'bulk_register', 'manage_tokens', 'jwt'):
if len(request.args) >= 2 and args[0] == 'impersonate':
return getattr(self, args[0])(request.args[1])
else:
return getattr(self, args[0])()
elif args[0] == 'cas' and not self.settings.cas_provider:
if args(1) == self.settings.cas_actions['login']:
return self.cas_login(version=2)
elif args(1) == self.settings.cas_actions['validate']:
return self.cas_validate(version=1)
elif args(1) == self.settings.cas_actions['servicevalidate']:
return self.cas_validate(version=2, proxy=False)
elif args(1) == self.settings.cas_actions['proxyvalidate']:
return self.cas_validate(version=2, proxy=True)
elif (args(1) == 'p3'
and args(2) == self.settings.cas_actions['servicevalidate']):
return self.cas_validate(version=3, proxy=False)
elif (args(1) == 'p3'
and args(2) == self.settings.cas_actions['proxyvalidate']):
return self.cas_validate(version=3, proxy=True)
elif args(1) == self.settings.cas_actions['logout']:
return self.logout(next=request.vars.service or DEFAULT)
else:
raise HTTP(404)
def navbar(self, prefix='Welcome', action=None,
separators=(' [ ', ' | ', ' ] '), user_identifier=DEFAULT,
referrer_actions=DEFAULT, mode='default'):
""" Navbar with support for more templates
This uses some code from the old navbar.
Args:
mode: see options for list of
"""
items = [] # Hold all menu items in a list
self.bar = '' # The final
T = current.T
referrer_actions = [] if not referrer_actions else referrer_actions
if not action:
action = self.url(self.settings.function)
request = current.request
if URL() == action:
next = ''
else:
next = '?_next=' + urllib_quote(URL(args=request.args,
vars=request.get_vars))
href = lambda function: \
'%s/%s%s' % (action, function, next if referrer_actions is DEFAULT or function in referrer_actions else '')
if isinstance(prefix, str):
prefix = T(prefix)
if prefix:
prefix = prefix.strip() + ' '
def Anr(*a, **b):
b['_rel'] = 'nofollow'
return A(*a, **b)
if self.user_id: # User is logged in
logout_next = self.settings.logout_next
items.append({'name': T('Log Out'),
'href': '%s/logout?_next=%s' % (action, urllib_quote(logout_next)),
'icon': 'icon-off'})
if 'profile' not in self.settings.actions_disabled:
items.append({'name': T('Profile'), 'href': href('profile'),
'icon': 'icon-user'})
if 'change_password' not in self.settings.actions_disabled:
items.append({'name': T('Password'),
'href': href('change_password'),
'icon': 'icon-lock'})
if user_identifier is DEFAULT:
user_identifier = '%(first_name)s'
if callable(user_identifier):
user_identifier = user_identifier(self.user)
elif ((isinstance(user_identifier, str) or
type(user_identifier).__name__ == 'lazyT') and
re.search(r'%\(.+\)s', user_identifier)):
user_identifier = user_identifier % self.user
if not user_identifier:
user_identifier = ''
else: # User is not logged in
items.append({'name': T('Log In'), 'href': href('login'),
'icon': 'icon-off'})
if 'register' not in self.settings.actions_disabled:
items.append({'name': T('Sign Up'), 'href': href('register'),
'icon': 'icon-user'})
if 'request_reset_password' not in self.settings.actions_disabled:
items.append({'name': T('Lost password?'),
'href': href('request_reset_password'),
'icon': 'icon-lock'})
if self.settings.use_username and 'retrieve_username' not in self.settings.actions_disabled:
items.append({'name': T('Forgot username?'),
'href': href('retrieve_username'),
'icon': 'icon-edit'})
def menu(): # For inclusion in MENU
self.bar = [(items[0]['name'], False, items[0]['href'], [])]
del items[0]
for item in items:
self.bar[0][3].append((item['name'], False, item['href']))
def bootstrap3(): # Default web2py scaffolding
def rename(icon): return icon + ' ' + icon.replace('icon', 'glyphicon')
self.bar = UL(LI(Anr(I(_class=rename('icon ' + items[0]['icon'])),
' ' + items[0]['name'],
_href=items[0]['href'])), _class='dropdown-menu')
del items[0]
for item in items:
self.bar.insert(-1, LI(Anr(I(_class=rename('icon ' + item['icon'])),
' ' + item['name'],
_href=item['href'])))
self.bar.insert(-1, LI('', _class='divider'))
if self.user_id:
self.bar = LI(Anr(prefix, user_identifier,
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}),
self.bar, _class='dropdown')
else:
self.bar = LI(Anr(T('Log In'),
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}), self.bar,
_class='dropdown')
def bare():
""" In order to do advanced customization we only need the
prefix, the user_identifier and the href attribute of items
Examples:
Use as::
# in module custom_layout.py
from gluon import *
def navbar(auth_navbar):
bar = auth_navbar
user = bar["user"]
if not user:
btn_login = A(current.T("Login"),
_href=bar["login"],
_class="btn btn-success",
_rel="nofollow")
btn_register = A(current.T("Sign up"),
_href=bar["register"],
_class="btn btn-primary",
_rel="nofollow")
return DIV(btn_register, btn_login, _class="btn-group")
else:
toggletext = "%s back %s" % (bar["prefix"], user)
toggle = A(toggletext,
_href="#",
_class="dropdown-toggle",
_rel="nofollow",
**{"_data-toggle": "dropdown"})
li_profile = LI(A(I(_class="icon-user"), ' ',
current.T("Account details"),
_href=bar["profile"], _rel="nofollow"))
li_custom = LI(A(I(_class="icon-book"), ' ',
current.T("My Agenda"),
_href="#", rel="nofollow"))
li_logout = LI(A(I(_class="icon-off"), ' ',
current.T("logout"),
_href=bar["logout"], _rel="nofollow"))
dropdown = UL(li_profile,
li_custom,
LI('', _class="divider"),
li_logout,
_class="dropdown-menu", _role="menu")
return LI(toggle, dropdown, _class="dropdown")
# in models db.py
import custom_layout as custom
# in layout.html
<ul id="navbar" class="nav pull-right">
{{='auth' in globals() and \
custom.navbar(auth.navbar(mode='bare')) or ''}}</ul>
"""
bare = {'prefix': prefix, 'user': user_identifier if self.user_id else None}
for i in items:
if i['name'] == T('Log In'):
k = 'login'
elif i['name'] == T('Sign Up'):
k = 'register'
elif i['name'] == T('Lost password?'):
k = 'request_reset_password'
elif i['name'] == T('Forgot username?'):
k = 'retrieve_username'
elif i['name'] == T('Log Out'):
k = 'logout'
elif i['name'] == T('Profile'):
k = 'profile'
elif i['name'] == T('Password'):
k = 'change_password'
bare[k] = i['href']
self.bar = bare
options = {'asmenu': menu,
'dropdown': bootstrap3,
'bare': bare
} # Define custom modes.
if mode in options and callable(options[mode]):
options[mode]()
else:
s1, s2, s3 = separators
if self.user_id:
self.bar = SPAN(prefix, user_identifier, s1,
Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
else:
self.bar = SPAN(s1, Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
for item in items[1:]:
self.bar.insert(-1, s2)
self.bar.insert(-1, Anr(item['name'], _href=item['href']))
return self.bar
def enable_record_versioning(self,
tables,
archive_db=None,
archive_names='%(tablename)s_archive',
current_record='current_record',
current_record_label=None):
"""
Used to enable full record versioning (including auth tables)::
auth = Auth(db)
auth.define_tables(signature=True)
# define our own tables
db.define_table('mything',Field('name'),auth.signature)
auth.enable_record_versioning(tables=db)
tables can be the db (all table) or a list of tables.
only tables with modified_by and modified_on fiels (as created
by auth.signature) will have versioning. Old record versions will be
in table 'mything_archive' automatically defined.
when you enable enable_record_versioning, records are never
deleted but marked with is_active=False.
enable_record_versioning enables a common_filter for
every table that filters out records with is_active = False
Note:
If you use auth.enable_record_versioning,
do not use auth.archive or you will end up with duplicates.
auth.archive does explicitly what enable_record_versioning
does automatically.
"""
current_record_label = current_record_label or current.T(
current_record.replace('_', ' ').title())
for table in tables:
fieldnames = table.fields()
if 'id' in fieldnames and 'modified_on' in fieldnames and current_record not in fieldnames:
table._enable_record_versioning(archive_db=archive_db,
archive_name=archive_names,
current_record=current_record,
current_record_label=current_record_label)
def define_tables(self, username=None, signature=None, enable_tokens=False,
migrate=None, fake_migrate=None):
"""
To be called unless tables are defined manually
Examples:
Use as::
# defines all needed tables and table files
# 'myprefix_auth_user.table', ...
auth.define_tables(migrate='myprefix_')
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = self.db
if migrate is None:
migrate = db._migrate
if fake_migrate is None:
fake_migrate = db._fake_migrate
settings = self.settings
settings.enable_tokens = enable_tokens
signature_list = \
super(Auth, self).define_tables(username, signature, migrate, fake_migrate)._table_signature_list
now = current.request.now
reference_table_user = 'reference %s' % settings.table_user_name
if settings.cas_domains:
if settings.table_cas_name not in db.tables:
db.define_table(
settings.table_cas_name,
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('created_on', 'datetime', default=now),
Field('service', requires=IS_URL()),
Field('ticket'),
Field('renew', 'boolean', default=False),
*settings.extra_fields.get(settings.table_cas_name, []),
**dict(
migrate=self._get_migrate(
settings.table_cas_name, migrate),
fake_migrate=fake_migrate))
if settings.enable_tokens:
extra_fields = settings.extra_fields.get(
settings.table_token_name, []) + signature_list
if settings.table_token_name not in db.tables:
db.define_table(
settings.table_token_name,
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('expires_on', 'datetime', default=datetime.datetime(2999, 12, 31)),
Field('token', writable=False, default=web2py_uuid, unique=True),
*extra_fields,
**dict(migrate=self._get_migrate(settings.table_token_name, migrate),
fake_migrate=fake_migrate))
if not db._lazy_tables:
settings.table_user = db[settings.table_user_name]
settings.table_group = db[settings.table_group_name]
settings.table_membership = db[settings.table_membership_name]
settings.table_permission = db[settings.table_permission_name]
settings.table_event = db[settings.table_event_name]
if settings.cas_domains:
settings.table_cas = db[settings.table_cas_name]
if settings.cas_provider: # THIS IS NOT LAZY
settings.actions_disabled = \
['profile', 'register', 'change_password',
'request_reset_password', 'retrieve_username']
from gluon.contrib.login_methods.cas_auth import CasAuth
maps = settings.cas_maps
if not maps:
table_user = self.table_user()
maps = dict((name, lambda v, n=name: v.get(n, None)) for name in
table_user.fields if name != 'id'
and table_user[name].readable)
maps['registration_id'] = \
lambda v, p=settings.cas_provider: '%s/%s' % (p, v['user'])
actions = [settings.cas_actions['login'],
settings.cas_actions['servicevalidate'],
settings.cas_actions['logout']]
settings.login_form = CasAuth(
casversion=2,
urlbase=settings.cas_provider,
actions=actions,
maps=maps)
return self
def get_or_create_user(self, keys, update_fields=['email'],
login=True, get=True):
"""
Used for alternate login methods:
If the user exists already then password is updated.
If the user doesn't yet exist, then they are created.
"""
table_user = self.table_user()
create_user = self.settings.cas_create_user
user = None
checks = []
# make a guess about who this user is
guess_fields = ['registration_id', 'username', 'email']
if self.settings.login_userfield:
guess_fields.append(self.settings.login_userfield)
for fieldname in guess_fields:
if fieldname in table_user.fields() and \
keys.get(fieldname, None):
checks.append(fieldname)
value = keys[fieldname]
user = table_user(**{fieldname: value})
if user:
break
if not checks:
return None
if 'registration_id' not in keys:
keys['registration_id'] = keys[checks[0]]
# if we think we found the user but registration_id does not match,
# make new user
if 'registration_id' in checks \
and user \
and user.registration_id \
and ('registration_id' not in keys or user.registration_id != str(keys['registration_id'])):
user = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if user:
if not get:
# added for register_bare to avoid overwriting users
return None
update_keys = dict(registration_id=keys['registration_id'])
for key in update_fields:
if key in keys:
update_keys[key] = keys[key]
user.update_record(**update_keys)
elif checks:
if create_user is False:
# Remove current open session a send message
self.logout(next=None, onlogout=None, log=None)
raise HTTP(403, "Forbidden. User need to be created first.")
if 'first_name' not in keys and 'first_name' in table_user.fields:
guess = keys.get('email', 'anonymous').split('@')[0]
keys['first_name'] = keys.get('username', guess)
vars = table_user._filter_fields(keys)
user_id = table_user.insert(**vars)
user = table_user[user_id]
if self.settings.create_user_groups:
group_id = self.add_group(self.settings.create_user_groups % user)
self.add_membership(group_id, user_id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, user_id)
if login:
self.user = user
if self.settings.register_onaccept:
callback(self.settings.register_onaccept, Storage(vars=user))
return user
def basic(self, basic_auth_realm=False):
"""
Performs basic login.
Args:
basic_auth_realm: optional basic http authentication realm. Can take
str or unicode or function or callable or boolean.
reads current.request.env.http_authorization
and returns basic_allowed,basic_accepted,user.
if basic_auth_realm is defined is a callable it's return value
is used to set the basic authentication realm, if it's a string
its content is used instead. Otherwise basic authentication realm
is set to the application name.
If basic_auth_realm is None or False (the default) the behavior
is to skip sending any challenge.
"""
if not self.settings.allow_basic_login:
return (False, False, False)
basic = current.request.env.http_authorization
if basic_auth_realm:
if callable(basic_auth_realm):
basic_auth_realm = basic_auth_realm()
elif isinstance(basic_auth_realm, string_types):
basic_realm = to_unicode(basic_auth_realm)
elif basic_auth_realm is True:
basic_realm = '' + current.request.application
http_401 = HTTP(401, 'Not Authorized', **{'WWW-Authenticate': 'Basic realm="' + basic_realm + '"'})
if not basic or not basic[:6].lower() == 'basic ':
if basic_auth_realm:
raise http_401
return (True, False, False)
(username, sep, password) = base64.b64decode(basic[6:]).partition(b':')
is_valid_user = sep and self.login_bare(username, password)
if not is_valid_user and basic_auth_realm:
raise http_401
return (True, True, is_valid_user)
def _get_login_settings(self):
table_user = self.table_user()
userfield = self.settings.login_userfield or ('username' \
if self.settings.login_userfield or 'username' \
in table_user.fields else 'email')
passfield = self.settings.password_field
return Storage({'table_user': table_user,
'userfield': userfield,
'passfield': passfield})
def login_bare(self, username, password):
"""
Logins user as specified by username (or email) and password
"""
settings = self._get_login_settings()
user = settings.table_user(**{settings.userfield: username})
if user and user.get(settings.passfield, False):
password = settings.table_user[
settings.passfield].validate(password)[0]
if ((user.registration_key is None or
not user.registration_key.strip()) and
password == user[settings.passfield]):
self.login_user(user)
return user
else:
# user not in database try other login methods
for login_method in self.settings.login_methods:
if login_method != self and login_method(username, password):
self.user = user
return user
return False
def register_bare(self, **fields):
"""
Registers a user as specified by username (or email)
and a raw password.
"""
settings = self._get_login_settings()
# users can register_bare even if no password is provided,
# in this case they will have to reset their password to login
if fields.get(settings.passfield):
fields[settings.passfield] = \
settings.table_user[settings.passfield].validate(fields[settings.passfield], None)[0]
if not fields.get(settings.userfield):
raise ValueError('register_bare: userfield not provided or invalid')
user = self.get_or_create_user(fields, login=False, get=False,
update_fields=self.settings.update_fields)
if not user:
# get or create did not create a user (it ignores duplicate records)
return False
return user
def cas_login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
version=2,
):
request = current.request
response = current.response
session = current.session
db, table = self.db, self.table_cas()
session._cas_service = request.vars.service or session._cas_service
if request.env.http_host not in self.settings.cas_domains or \
not session._cas_service:
raise HTTP(403, 'not authorized')
def allow_access(interactivelogin=False):
row = table(service=session._cas_service, user_id=self.user.id)
if row:
ticket = row.ticket
else:
ticket = 'ST-' + web2py_uuid()
table.insert(service=session._cas_service,
user_id=self.user.id,
ticket=ticket,
created_on=request.now,
renew=interactivelogin)
service = session._cas_service
query_sep = '&' if '?' in service else '?'
del session._cas_service
if 'warn' in request.vars and not interactivelogin:
response.headers[
'refresh'] = "5;URL=%s" % service + query_sep + "ticket=" + ticket
return A("Continue to %s" % service,
_href=service + query_sep + "ticket=" + ticket)
else:
redirect(service + query_sep + "ticket=" + ticket)
if self.is_logged_in() and 'renew' not in request.vars:
return allow_access()
elif not self.is_logged_in() and 'gateway' in request.vars:
redirect(session._cas_service)
def cas_onaccept(form, onaccept=onaccept):
if onaccept is not DEFAULT:
onaccept(form)
return allow_access(interactivelogin=True)
return self.login(next, onvalidation, cas_onaccept, log)
def cas_validate(self, version=2, proxy=False):
request = current.request
db, table = self.db, self.table_cas()
current.response.headers['Content-Type'] = 'text'
ticket = request.vars.ticket
renew = 'renew' in request.vars
row = table(ticket=ticket)
success = False
if row:
userfield = self.settings.login_userfield or 'username' \
if 'username' in table.fields else 'email'
# If ticket is a service Ticket and RENEW flag respected
if ticket[0:3] == 'ST-' and \
not ((row.renew and renew) ^ renew):
user = self.table_user()(row.user_id)
row.delete_record()
success = True
def build_response(body):
xml_body = to_native(TAG['cas:serviceResponse'](
body, **{'_xmlns:cas': 'http://www.yale.edu/tp/cas'}).xml())
return '<?xml version="1.0" encoding="UTF-8"?>\n' + xml_body
if success:
if version == 1:
message = 'yes\n%s' % user[userfield]
elif version == 3:
username = user.get('username', user[userfield])
message = build_response(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](username),
TAG['cas:attributes'](
*[TAG['cas:' + field.name](user[field.name])
for field in self.table_user()
if field.readable])))
else: # assume version 2
username = user.get('username', user[userfield])
message = build_response(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](username),
*[TAG['cas:' + field.name](user[field.name])
for field in self.table_user()
if field.readable]))
else:
if version == 1:
message = 'no\n'
elif row:
message = build_response(TAG['cas:authenticationFailure']())
else:
message = build_response(
TAG['cas:authenticationFailure'](
'Ticket %s not recognized' % ticket,
_code='INVALID TICKET'))
raise HTTP(200, message)
def _reset_two_factor_auth(self, session):
"""
When two-step authentication is enabled, this function is used to
clear the session after successfully completing second challenge
or when the maximum number of tries allowed has expired.
"""
session.auth_two_factor_user = None
session.auth_two_factor = None
session.auth_two_factor_enabled = False
# Set the number of attempts. It should be more than 1.
session.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
def when_is_logged_in_bypass_next_in_url(self, next, session):
"""
This function should be use when someone want to avoid asking for user
credentials when loaded page contains "user/login?_next=NEXT_COMPONENT"
in the URL is refresh but user is already authenticated.
"""
if self.is_logged_in():
if next == session._auth_next:
del session._auth_next
redirect(next, client_side=self.settings.client_side)
def login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a login form
"""
settings = self.settings
request = current.request
response = current.response
session = current.session
# use session for federated login
snext = self.get_vars_next()
if snext:
session._auth_next = snext
elif session._auth_next:
snext = session._auth_next
# pass
if next is DEFAULT:
# important for security
next = settings.login_next
if callable(next):
next = next()
user_next = snext
if user_next:
external = user_next.split('://')
if external[0].lower() in ['http', 'https', 'ftp']:
host_next = user_next.split('//', 1)[-1].split('/')[0]
if host_next in settings.cas_domains:
next = user_next
else:
next = user_next
# Avoid asking unnecessary user credentials when user is logged in
self.when_is_logged_in_bypass_next_in_url(next=next, session=session)
# Moved here to avoid unnecessary execution in case of redirection to next in case of logged in user
table_user = self.table_user()
if 'username' in table_user.fields or \
not settings.login_email_validate:
tmpvalidator = IS_NOT_EMPTY(error_message=self.messages.is_empty)
if not settings.username_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
else:
tmpvalidator = IS_EMAIL(error_message=self.messages.invalid_email)
if not settings.email_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
passfield = settings.password_field
try:
table_user[passfield].requires[-1].min_length = 0
except:
pass
if onvalidation is DEFAULT:
onvalidation = settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = settings.login_onaccept
if log is DEFAULT:
log = self.messages['login_log']
onfail = settings.login_onfail
user = None # default
# Setup the default field used for the form
multi_login = False
if self.settings.login_userfield:
username = self.settings.login_userfield
else:
if 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
if self.settings.multi_login:
multi_login = True
old_requires = table_user[username].requires
table_user[username].requires = tmpvalidator
# If two-factor authentication is enabled, and the maximum
# number of tries allowed is used up, reset the session to
# pre-login state with two-factor auth
if session.auth_two_factor_enabled and session.auth_two_factor_tries_left < 1:
# Exceeded maximum allowed tries for this code. Require user to enter
# username and password again.
user = None
accepted_form = False
self._reset_two_factor_auth(session)
# Redirect to the default 'next' page without logging
# in. If that page requires login, user will be redirected
# back to the main login form
redirect(next, client_side=settings.client_side)
# Before showing the default login form, check whether
# we are already on the second step of two-step authentication.
# If we are, then skip this login form and use the form for the
# second challenge instead.
# Note to devs: The code inside the if-block is unchanged from the
# previous version of this file, other than for indentation inside
# to put it inside the if-block
if session.auth_two_factor_user is None:
if settings.remember_me_form:
extra_fields = [
Field('remember_me', 'boolean', default=False,
label=self.messages.label_remember_me)]
else:
extra_fields = []
# do we use our own login form, or from a central source?
if settings.login_form == self:
form = SQLFORM(table_user,
fields=[username, passfield],
hidden=dict(_next=next),
showid=settings.showid,
submit_button=self.messages.login_button,
delete_label=self.messages.delete_label,
formstyle=settings.formstyle,
separator=settings.label_separator,
extra_fields=extra_fields,
)
captcha = settings.login_captcha or \
(settings.login_captcha is not False and settings.captcha)
if captcha:
addrow(form, captcha.label, captcha, captcha.comment,
settings.formstyle, 'captcha__row')
accepted_form = False
specific_error = self.messages.invalid_user
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
# check for username in db
entered_username = form.vars[username]
if multi_login and '@' in entered_username:
# if '@' in username check for email, not username
user = table_user(email=entered_username)
else:
user = table_user(**{username: entered_username})
if user:
# user in db, check if registration pending or disabled
specific_error = self.messages.invalid_password
temp_user = user
if (temp_user.registration_key or '').startswith('pending'):
response.flash = self.messages.registration_pending
return form
elif temp_user.registration_key in ('disabled', 'blocked'):
response.flash = self.messages.login_disabled
return form
elif (temp_user.registration_key is not None and temp_user.registration_key.strip()):
response.flash = \
self.messages.registration_verifying
return form
# try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if self not in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
# alternates have failed, maybe because service inaccessible
if settings.login_methods[0] == self:
# try logging in locally using cached credentials
if form.vars.get(passfield, '') == temp_user[passfield]:
# success
user = temp_user
else:
# user not in db
if not settings.alternate_requires_registration:
# we're allowed to auto-register users from external systems
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if self not in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
self.log_event(self.messages['login_failed_log'],
request.post_vars)
# invalid login
session.flash = specific_error if self.settings.login_specify_error else self.messages.invalid_login
callback(onfail, None)
redirect(self.url(args=request.args, vars=request.get_vars),client_side=settings.client_side)
else: # use a central authentication server
cas = settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(
table_user._filter_fields(cas_user),
settings.update_fields)
elif hasattr(cas, 'login_form'):
return cas.login_form()
else:
# we need to pass through login again before going on
next = self.url(settings.function, args='login')
redirect(cas.login_url(next),
client_side=settings.client_side)
# Extra login logic for two-factor authentication
#################################################
# If the 'user' variable has a value, this means that the first
# authentication step was successful (i.e. user provided correct
# username and password at the first challenge).
# Check if this user is signed up for two-factor authentication
# If auth.settings.auth_two_factor_enabled it will enable two factor
# for all the app. Another way to anble two factor is that the user
# must be part of a group that is called auth.settings.two_factor_authentication_group
if user and self.settings.auth_two_factor_enabled is True:
session.auth_two_factor_enabled = True
elif user and self.settings.two_factor_authentication_group:
role = self.settings.two_factor_authentication_group
session.auth_two_factor_enabled = self.has_membership(user_id=user.id, role=role)
# challenge
if session.auth_two_factor_enabled:
form = SQLFORM.factory(
Field('authentication_code',
label=self.messages.label_two_factor,
required=True,
comment=self.messages.two_factor_comment),
hidden=dict(_next=next),
formstyle=settings.formstyle,
separator=settings.label_separator
)
# accepted_form is used by some default web2py code later in the
# function that handles running specified functions before redirect
# Set it to False until the challenge form is accepted.
accepted_form = False
# Handle the case when a user has submitted the login/password
# form successfully, and the password has been validated, but
# the two-factor form has not been displayed or validated yet.
if session.auth_two_factor_user is None and user is not None:
session.auth_two_factor_user = user # store the validated user and associate with this session
session.auth_two_factor = random.randint(100000, 999999)
session.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
# Set the way we generate the code or we send the code. For example using SMS...
two_factor_methods = self.settings.two_factor_methods
if not two_factor_methods:
# TODO: Add some error checking to handle cases where email cannot be sent
self.settings.mailer.send(
to=user.email,
subject=self.messages.retrieve_two_factor_code_subject,
message=self.messages.retrieve_two_factor_code.format(session.auth_two_factor))
else:
# Check for all method. It is possible to have multiples
for two_factor_method in two_factor_methods:
try:
# By default we use session.auth_two_factor generated before.
session.auth_two_factor = two_factor_method(user, session.auth_two_factor)
except:
pass
else:
break
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
"""
The lists is executed after form validation for each of the corresponding action.
For example, in your model:
In your models copy and paste:
# Before define tables, we add some extra field to auth_user
auth.settings.extra_fields['auth_user'] = [
Field('motp_secret', 'password', length=512, default='', label='MOTP Secret'),
Field('motp_pin', 'string', length=128, default='', label='MOTP PIN')]
OFFSET = 60 # Be sure is the same in your OTP Client
# Set session.auth_two_factor to None. Because the code is generated by external app.
# This will avoid to use the default setting and send a code by email.
def _set_two_factor(user, auth_two_factor):
return None
def verify_otp(user, otp):
import time
from hashlib import md5
epoch_time = int(time.time())
time_start = int(str(epoch_time - OFFSET)[:-1])
time_end = int(str(epoch_time + OFFSET)[:-1])
for t in range(time_start - 1, time_end + 1):
to_hash = str(t) + user.motp_secret + user.motp_pin
hash = md5(to_hash).hexdigest()[:6]
if otp == hash:
return hash
auth.settings.auth_two_factor_enabled = True
auth.messages.two_factor_comment = "Verify your OTP Client for the code."
auth.settings.two_factor_methods = [lambda user,
auth_two_factor: _set_two_factor(user, auth_two_factor)]
auth.settings.two_factor_onvalidation = [lambda user, otp: verify_otp(user, otp)]
"""
if self.settings.two_factor_onvalidation:
for two_factor_onvalidation in self.settings.two_factor_onvalidation:
try:
session.auth_two_factor = \
two_factor_onvalidation(session.auth_two_factor_user, form.vars['authentication_code'])
except:
pass
else:
break
if form.vars['authentication_code'] == str(session.auth_two_factor):
# Handle the case when the two-factor form has been successfully validated
# and the user was previously stored (the current user should be None because
# in this case, the previous username/password login form should not be displayed.
# This will allow the code after the 2-factor authentication block to proceed as
# normal.
if user is None or user == session.auth_two_factor_user:
user = session.auth_two_factor_user
# For security, because the username stored in the
# session somehow does not match the just validated
# user. Should not be possible without session stealing
# which is hard with SSL.
elif user != session.auth_two_factor_user:
user = None
# Either way, the user and code associated with this session should
# be removed. This handles cases where the session login may have
# expired but browser window is open, so the old session key and
# session usernamem will still exist
self._reset_two_factor_auth(session)
else:
session.auth_two_factor_tries_left -= 1
# If the number of retries are higher than auth_two_factor_tries_left
# Require user to enter username and password again.
if session.auth_two_factor_enabled and session.auth_two_factor_tries_left < 1:
# Exceeded maximum allowed tries for this code. Require user to enter
# username and password again.
user = None
accepted_form = False
self._reset_two_factor_auth(session)
# Redirect to the default 'next' page without logging
# in. If that page requires login, user will be redirected
# back to the main login form
redirect(next, client_side=settings.client_side)
response.flash = self.messages.invalid_two_factor_code.format(session.auth_two_factor_tries_left)
return form
else:
return form
# End login logic for two-factor authentication
# process authenticated users
if user:
user = Row(table_user._filter_fields(user, id=True))
# process authenticated users
# user wants to be logged in for longer
self.login_user(user)
session.auth.expiration = \
request.post_vars.remember_me and \
settings.long_expiration or \
settings.expiration
session.auth.remember_me = 'remember_me' in request.post_vars
self.log_event(log, user)
session.flash = self.messages.logged_in
# how to continue
if settings.login_form == self:
if accepted_form:
callback(onaccept, form)
if next == session._auth_next:
session._auth_next = None
next = replace_id(next, form)
redirect(next, client_side=settings.client_side)
table_user[username].requires = old_requires
return form
elif user:
callback(onaccept, None)
if next == session._auth_next:
del session._auth_next
redirect(next, client_side=settings.client_side)
def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT):
"""
Logouts and redirects to login
"""
# Clear out 2-step authentication information if user logs
# out. This information is also cleared on successful login.
self._reset_two_factor_auth(current.session)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.logout_next
if onlogout is DEFAULT:
onlogout = self.settings.logout_onlogout
if onlogout:
onlogout(self.user)
if log is DEFAULT:
log = self.messages['logout_log']
if self.user:
self.log_event(log, self.user)
if self.settings.login_form != self:
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
next = cas.logout_url(next)
current.session.auth = None
self.user = None
if self.settings.renew_session_onlogout:
current.session.renew(clear_session=not self.settings.keep_session_onlogout)
current.session.flash = self.messages.logged_out
if next is not None:
redirect(next)
def logout_bare(self):
self.logout(next=None, onlogout=None, log=None)
def register(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a registration form
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if self.is_logged_in():
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.register_next
if onvalidation is DEFAULT:
onvalidation = self.settings.register_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.register_onaccept
if log is DEFAULT:
log = self.messages['register_log']
table_user = self.table_user()
if self.settings.login_userfield:
username = self.settings.login_userfield
elif 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
# Ensure the username field is unique.
unique_validator = IS_NOT_IN_DB(self.db, table_user[username])
if not table_user[username].requires:
table_user[username].requires = unique_validator
elif isinstance(table_user[username].requires, (list, tuple)):
if not any([isinstance(validator, IS_NOT_IN_DB) for validator in
table_user[username].requires]):
if isinstance(table_user[username].requires, list):
table_user[username].requires.append(unique_validator)
else:
table_user[username].requires += (unique_validator, )
elif not isinstance(table_user[username].requires, IS_NOT_IN_DB):
table_user[username].requires = [table_user[username].requires,
unique_validator]
passfield = self.settings.password_field
formstyle = self.settings.formstyle
try: # Make sure we have our original minimum length as other auth forms change it
table_user[passfield].requires[-1].min_length = self.settings.password_min_length
except:
pass
if self.settings.register_verify_password:
if self.settings.register_fields is None:
self.settings.register_fields = [f.name for f in table_user if f.writable and not f.compute]
k = self.settings.register_fields.index(passfield)
self.settings.register_fields.insert(k + 1, "password_two")
extra_fields = [
Field("password_two", "password",
requires=IS_EQUAL_TO(request.post_vars.get(passfield, None),
error_message=self.messages.mismatched_password),
label=current.T("Confirm Password"))]
else:
extra_fields = []
form = SQLFORM(table_user,
fields=self.settings.register_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.register_button,
delete_label=self.messages.delete_label,
formstyle=formstyle,
separator=self.settings.label_separator,
extra_fields=extra_fields
)
captcha = self.settings.register_captcha or self.settings.captcha
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
# Add a message if specified
if self.settings.pre_registration_div:
addrow(form, '',
DIV(_id="pre-reg", *self.settings.pre_registration_div),
'', formstyle, '')
key = web2py_uuid()
if self.settings.registration_requires_approval:
key = 'pending-' + key
table_user.registration_key.default = key
if form.accepts(request, session if self.csrf_prevention else None,
formname='register',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
description = self.messages.group_description % form.vars
if self.settings.create_user_groups:
group_id = self.add_group(self.settings.create_user_groups % form.vars, description)
self.add_membership(group_id, form.vars.id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, form.vars.id)
if self.settings.registration_requires_verification:
link = self.url(
self.settings.function, args=('verify_email', key), scheme=True)
d = dict(form.vars)
d.update(dict(key=key, link=link, username=form.vars[username],
firstname=form.vars['firstname'],
lastname=form.vars['lastname']))
if not (self.settings.mailer and self.settings.mailer.send(
to=form.vars.email,
subject=self.messages.verify_email_subject,
message=self.messages.verify_email % d)):
self.db.rollback()
response.flash = self.messages.unable_send_email
return form
session.flash = self.messages.email_sent
if self.settings.registration_requires_approval and \
not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='pending')
session.flash = self.messages.registration_pending
elif (not self.settings.registration_requires_verification or self.settings.login_after_registration):
if not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='')
session.flash = self.messages.registration_successful
user = table_user(**{username: form.vars[username]})
self.login_user(user)
session.flash = self.messages.logged_in
self.log_event(log, form.vars)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def verify_email(self,
next=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Action used to verify the registration email
"""
key = getarg(-1)
table_user = self.table_user()
user = table_user(registration_key=key)
if not user:
redirect(self.settings.login_url)
if self.settings.registration_requires_approval:
user.update_record(registration_key='pending')
current.session.flash = self.messages.registration_pending
else:
user.update_record(registration_key='')
current.session.flash = self.messages.email_verified
# make sure session has same user.registrato_key as db record
if current.session.auth and current.session.auth.user:
current.session.auth.user.registration_key = user.registration_key
if log is DEFAULT:
log = self.messages['verify_email_log']
if next is DEFAULT:
next = self.settings.verify_email_next
if onaccept is DEFAULT:
onaccept = self.settings.verify_email_onaccept
self.log_event(log, user)
callback(onaccept, user)
redirect(next)
def retrieve_username(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to retrieve the user username
(only if there is a username field)
"""
table_user = self.table_user()
if 'username' not in table_user.fields:
raise HTTP(404)
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_username_captcha or \
(self.settings.retrieve_username_captcha is not False and self.settings.captcha)
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_username_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_username_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_username_onaccept
if log is DEFAULT:
log = self.messages['retrieve_username_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_username', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
users = table_user._db(table_user.email == form.vars.email).select()
if not users:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
username = ', '.join(u.username for u in users)
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_username_subject,
message=self.messages.retrieve_username % dict(username=username))
session.flash = self.messages.email_sent
for user in users:
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def random_password(self):
import string
import random
password = ''
specials = r'!#$*'
for i in range(0, 3):
password += random.choice(string.ascii_lowercase)
password += random.choice(string.ascii_uppercase)
password += random.choice(string.digits)
password += random.choice(specials)
return ''.join(random.sample(password, len(password)))
def reset_password_deprecated(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password (deprecated)
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_password_onaccept
if log is DEFAULT:
log = self.messages['retrieve_password_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_password', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
user = table_user(email=form.vars.email)
if not user:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
key = user.registration_key
if key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
current.session.flash = \
self.messages.registration_pending
redirect(self.url(args=request.args))
password = self.random_password()
passfield = self.settings.password_field
d = {
passfield: str(table_user[passfield].validate(password)[0]),
'registration_key': ''
}
user.update_record(**d)
if self.settings.mailer and \
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_password_subject,
message=self.messages.retrieve_password % dict(password=password)):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def confirm_registration(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to confirm user registration
"""
table_user = self.table_user()
request = current.request
# response = current.response
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
key = request.vars.key
if not key and len(request.args) > 1:
key = request.args[-1]
if key:
session._reset_password_key = key
if next:
redirect_vars = {'_next': next}
else:
redirect_vars = {}
redirect(self.url(args='confirm_registration',
vars=redirect_vars))
else:
key = session._reset_password_key
else:
key = request.vars.key or getarg(-1)
try:
t0 = int(key.split('-')[0])
if time.time() - t0 > 60 * 60 * 24:
raise Exception
user = table_user(reset_password_key=key)
if not user:
raise Exception
except Exception as e:
session.flash = self.messages.invalid_reset_password
redirect(next, client_side=self.settings.client_side)
passfield = self.settings.password_field
form = SQLFORM.factory(
Field('first_name',
label='First Name',
required=True),
Field('last_name',
label='Last Name',
required=True),
Field('new_password', 'password',
label=self.messages.new_password,
requires=self.table_user()[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button='Confirm Registration',
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.process().accepted:
user.update_record(
**{passfield: str(form.vars.new_password),
'first_name': str(form.vars.first_name),
'last_name': str(form.vars.last_name),
'registration_key': '',
'reset_password_key': ''})
session.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(user)
redirect(next, client_side=self.settings.client_side)
return form
def email_registration(self, subject, body, user):
"""
Sends and email invitation to a user informing they have been registered with the application
"""
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
link = self.url(self.settings.function,
args=('confirm_registration',), vars={'key': reset_password_key},
scheme=True)
d = dict(user)
d.update(dict(key=reset_password_key, link=link, site=current.request.env.http_host))
if self.settings.mailer and self.settings.mailer.send(
to=user.email,
subject=subject % d,
message=body % d):
user.update_record(reset_password_key=reset_password_key)
return True
return False
def bulk_register(self, max_emails=100):
"""
Creates a form for ther user to send invites to other users to join
"""
if not self.user:
redirect(self.settings.login_url)
if not self.settings.bulk_register_enabled:
return HTTP(404)
form = SQLFORM.factory(
Field('subject', 'string', default=self.messages.bulk_invite_subject, requires=IS_NOT_EMPTY()),
Field('emails', 'text', requires=IS_NOT_EMPTY()),
Field('message', 'text', default=self.messages.bulk_invite_body, requires=IS_NOT_EMPTY()),
formstyle=self.settings.formstyle)
if form.process().accepted:
emails = re.compile('[^\s\'"@<>,;:]+\@[^\s\'"@<>,;:]+').findall(form.vars.emails)
# send the invitations
emails_sent = []
emails_fail = []
emails_exist = []
for email in emails[:max_emails]:
if self.table_user()(email=email):
emails_exist.append(email)
else:
user = self.register_bare(email=email)
if self.email_registration(form.vars.subject, form.vars.message, user):
emails_sent.append(email)
else:
emails_fail.append(email)
emails_fail += emails[max_emails:]
form = DIV(H4('Emails sent'), UL(*[A(x, _href='mailto:' + x) for x in emails_sent]),
H4('Emails failed'), UL(*[A(x, _href='mailto:' + x) for x in emails_fail]),
H4('Emails existing'), UL(*[A(x, _href='mailto:' + x) for x in emails_exist]))
return form
def manage_tokens(self):
if not self.user:
redirect(self.settings.login_url)
table_token = self.table_token()
table_token.user_id.writable = False
table_token.user_id.default = self.user.id
table_token.token.writable = False
if current.request.args(1) == 'new':
table_token.token.readable = False
form = SQLFORM.grid(table_token, args=['manage_tokens'])
return form
def reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
# response = current.response
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
key = request.vars.key
if key:
session._reset_password_key = key
redirect(self.url(args='reset_password'))
else:
key = session._reset_password_key
else:
key = request.vars.key
try:
t0 = int(key.split('-')[0])
if time.time() - t0 > 60 * 60 * 24:
raise Exception
user = table_user(reset_password_key=key)
if not user:
raise Exception
except Exception:
session.flash = self.messages.invalid_reset_password
redirect(next, client_side=self.settings.client_side)
key = user.registration_key
if key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
session.flash = self.messages.registration_pending
redirect(next, client_side=self.settings.client_side)
if onvalidation is DEFAULT:
onvalidation = self.settings.reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.reset_password_onaccept
passfield = self.settings.password_field
form = SQLFORM.factory(
Field('new_password', 'password',
label=self.messages.new_password,
requires=self.table_user()[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_reset_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session, onvalidation=onvalidation,
hideerror=self.settings.hideerror):
user.update_record(
**{passfield: str(form.vars.new_password),
'registration_key': '',
'reset_password_key': ''})
session.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(user)
callback(onaccept, form)
redirect(next, client_side=self.settings.client_side)
return form
def request_reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_password_captcha or \
(self.settings.retrieve_password_captcha is not False and self.settings.captcha)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if onvalidation is DEFAULT:
onvalidation = self.settings.request_reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.request_reset_password_onaccept
if log is DEFAULT:
log = self.messages['reset_password_log']
userfield = self.settings.login_userfield or 'username' \
if self.settings.login_userfield or 'username' \
in table_user.fields else 'email'
if userfield == 'email':
table_user.email.requires = [
IS_EMAIL(error_message=self.messages.invalid_email),
IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
if not self.settings.email_case_sensitive:
table_user.email.requires.insert(0, IS_LOWER())
elif userfield == 'username':
table_user.username.requires = [
IS_IN_DB(self.db, table_user.username,
error_message=self.messages.invalid_username)]
if not self.settings.username_case_sensitive:
table_user.username.requires.insert(0, IS_LOWER())
form = SQLFORM(table_user,
fields=[userfield],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.password_reset_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='reset_password', dbio=False,
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
user = table_user(**{userfield: form.vars.get(userfield)})
key = user.registration_key
if not user:
session.flash = self.messages['invalid_%s' % userfield]
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
elif key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
session.flash = self.messages.registration_pending
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
if self.email_reset_password(user):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
# old_requires = table_user.email.requires
return form
def email_reset_password(self, user):
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
link = self.url(self.settings.function,
args=('reset_password',), vars={'key': reset_password_key},
scheme=True)
d = dict(user)
d.update(dict(key=reset_password_key, link=link))
if self.settings.mailer and self.settings.mailer.send(
to=user.email,
subject=self.messages.reset_password_subject,
message=self.messages.reset_password % d):
user.update_record(reset_password_key=reset_password_key)
return True
return False
def retrieve_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
if self.settings.reset_password_requires_verification:
return self.request_reset_password(next, onvalidation, onaccept, log)
else:
return self.reset_password_deprecated(next, onvalidation, onaccept, log)
def change_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change password
"""
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
# Go to external link to change the password
if self.settings.login_form != self:
cas = self.settings.login_form
# To prevent error if change_password_url function is not defined in alternate login
if hasattr(cas, 'change_password_url'):
next = cas.change_password_url(next)
if next is not None:
redirect(next)
db = self.db
table_user = self.table_user()
s = db(table_user.id == self.user.id)
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.change_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.change_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.change_password_onaccept
if log is DEFAULT:
log = self.messages['change_password_log']
passfield = self.settings.password_field
requires = table_user[passfield].requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
requires = [t for t in requires if isinstance(t, CRYPT)]
if requires:
requires[0] = CRYPT(**requires[0].__dict__) # Copy the existing CRYPT attributes
requires[0].min_length = 0 # But do not enforce minimum length for the old password
form = SQLFORM.factory(
Field('old_password', 'password', requires=requires,
label=self.messages.old_password),
Field('new_password', 'password',
label=self.messages.new_password,
requires=table_user[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_change_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session,
formname='change_password',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
current_user = s.select(limitby=(0, 1), orderby_on_limitby=False).first()
if not form.vars['old_password'] == current_user[passfield]:
form.errors['old_password'] = self.messages.invalid_password
else:
d = {passfield: str(form.vars.new_password)}
s.update(**d)
session.flash = self.messages.password_changed
self.log_event(log, self.user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def profile(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change his/her profile
"""
table_user = self.table_user()
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
passfield = self.settings.password_field
table_user[passfield].writable = False
table_user['email'].writable = False
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.profile_next
if onvalidation is DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.profile_onaccept
if log is DEFAULT:
log = self.messages['profile_log']
form = SQLFORM(
table_user,
self.user.id,
fields=self.settings.profile_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.profile_save_button,
delete_label=self.messages.delete_label,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
deletable=self.settings.allow_delete_accounts,
)
if form.accepts(request, session,
formname='profile',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
extra_fields = self.settings.extra_fields.get(self.settings.table_user_name, [])
if not form.deleted:
if any(f.compute for f in extra_fields):
user = table_user[self.user.id]
self._update_session_user(user)
self.update_groups()
else:
self.user.update(table_user._filter_fields(form.vars))
session.flash = self.messages.profile_updated
self.log_event(log, self.user)
callback(onaccept, form)
if form.deleted:
return self.logout()
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def run_login_onaccept(self):
onaccept = self.settings.login_onaccept
if onaccept:
form = Storage(dict(vars=self.user))
if not isinstance(onaccept, (list, tuple)):
onaccept = [onaccept]
for callback in onaccept:
callback(form)
def jwt(self):
"""
To use JWT authentication:
1) instantiate auth with::
auth = Auth(db, jwt = {'secret_key':'secret'})
where 'secret' is your own secret string.
2) Decorate functions that require login but should accept the JWT token credentials::
@auth.allows_jwt()
@auth.requires_login()
def myapi(): return 'hello %s' % auth.user.email
Notice jwt is allowed but not required. if user is logged in, myapi is accessible.
3) Use it!
Now API users can obtain a token with
http://.../app/default/user/jwt?username=...&password=....
(returns json object with a token attribute)
API users can refresh an existing token with
http://.../app/default/user/jwt?token=...
they can authenticate themselves when calling http:/.../myapi by injecting a header
Authorization: Bearer <the jwt token>
Any additional attributes in the jwt argument of Auth() below::
auth = Auth(db, jwt = {...})
are passed to the constructor of class AuthJWT. Look there for documentation.
"""
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
rtn = self.jwt_handler.jwt_token_manager()
raise HTTP(200, rtn, cookies=None, **current.response.headers)
def is_impersonating(self):
return self.is_logged_in() and 'impersonator' in current.session.auth
def impersonate(self, user_id=DEFAULT):
"""
To use this make a POST to
`http://..../impersonate request.post_vars.user_id=<id>`
Set request.post_vars.user_id to 0 to restore original user.
requires impersonator is logged in and::
has_permission('impersonate', 'auth_user', user_id)
"""
request = current.request
session = current.session
auth = session.auth
table_user = self.table_user()
if not self.is_logged_in():
raise HTTP(401, "Not Authorized")
current_id = auth.user.id
requested_id = user_id
user = None
if user_id is DEFAULT:
user_id = current.request.post_vars.user_id
if user_id and user_id != self.user.id and user_id != '0':
if not self.has_permission('impersonate',
self.table_user(),
user_id):
raise HTTP(403, "Forbidden")
user = table_user(user_id)
if not user:
raise HTTP(401, "Not Authorized")
auth.impersonator = pickle.dumps(session, pickle.HIGHEST_PROTOCOL)
auth.user.update(
table_user._filter_fields(user, True))
self.user = auth.user
self.update_groups()
log = self.messages['impersonate_log']
self.log_event(log, dict(id=current_id, other_id=auth.user.id))
self.run_login_onaccept()
elif user_id in (0, '0'):
if self.is_impersonating():
session.clear()
session.update(pickle.loads(auth.impersonator))
self.user = session.auth.user
self.update_groups()
self.run_login_onaccept()
return None
if requested_id is DEFAULT and not request.post_vars:
return SQLFORM.factory(Field('user_id', 'integer'))
elif not user:
return None
else:
return SQLFORM(table_user, user.id, readonly=True)
def groups(self):
"""
Displays the groups and their roles for the logged in user
"""
if not self.is_logged_in():
redirect(self.settings.login_url)
table_membership = self.table_membership()
memberships = self.db(
table_membership.user_id == self.user.id).select()
table = TABLE()
for membership in memberships:
table_group = self.table_group()
groups = self.db(table_group.id == membership.group_id).select()
if groups:
group = groups[0]
table.append(TR(H3(group.role, '(%s)' % group.id)))
table.append(TR(P(group.description)))
if not memberships:
return None
return table
def not_authorized(self):
"""
You can change the view for this page to make it look as you like
"""
if current.request.ajax:
raise HTTP(403, 'ACCESS DENIED')
return self.messages.access_denied
def allows_jwt(self, otherwise=None):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
return self.jwt_handler.allows_jwt(otherwise=otherwise)
def requires(self, condition, requires_login=True, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
def decorator(action):
def f(*a, **b):
basic_allowed, basic_accepted, user = self.basic()
user = user or self.user
login_required = requires_login
if callable(login_required):
login_required = login_required()
if login_required:
if not user:
if current.request.ajax:
raise HTTP(401, self.messages.ajax_failed_authentication)
elif otherwise is not None:
if callable(otherwise):
return otherwise()
redirect(otherwise)
elif self.settings.allow_basic_login_only or \
basic_accepted or current.request.is_restful:
raise HTTP(403, "Not authorized")
else:
next = self.here()
current.session.flash = current.response.flash
return call_or_redirect(self.settings.on_failed_authentication,
self.settings.login_url + '?_next=' + urllib_quote(next))
if callable(condition):
flag = condition()
else:
flag = condition
if not flag:
current.session.flash = self.messages.access_denied
return call_or_redirect(
self.settings.on_failed_authorization)
return action(*a, **b)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
def requires_login(self, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
return self.requires(True, otherwise=otherwise)
def requires_login_or_token(self, otherwise=None):
if self.settings.enable_tokens is True:
user = None
request = current.request
token = request.env.http_web2py_user_token or request.vars._token
table_token = self.table_token()
table_user = self.table_user()
from gluon.settings import global_settings
if global_settings.web2py_runtime_gae:
row = table_token(token=token)
if row:
user = table_user(row.user_id)
else:
row = self.db(table_token.token == token)(table_user.id == table_token.user_id).select().first()
if row:
user = row[table_user._tablename]
if user:
self.login_user(user)
return self.requires(True, otherwise=otherwise)
def requires_membership(self, role=None, group_id=None, otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def has_membership(self=self, group_id=group_id, role=role):
return self.has_membership(group_id=group_id, role=role)
return self.requires(has_membership, otherwise=otherwise)
def requires_permission(self, name, table_name='', record_id=0,
otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of any group (role) that
has 'name' access to 'table_name', 'record_id'.
"""
def has_permission(self=self, name=name, table_name=table_name, record_id=record_id):
return self.has_permission(name, table_name, record_id)
return self.requires(has_permission, otherwise=otherwise)
def requires_signature(self, otherwise=None, hash_vars=True, hash_extension=True):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def verify():
return URL.verify(current.request, user_signature=True, hash_vars=hash_vars, hash_extension=True)
return self.requires(verify, otherwise)
def accessible_query(self, name, table, user_id=None):
"""
Returns a query with all accessible records for user_id or
the current logged in user
this method does not work on GAE because uses JOIN and IN
Example:
Use as::
db(auth.accessible_query('read', db.mytable)).select(db.mytable.ALL)
"""
if not user_id:
user_id = self.user_id
db = self.db
if isinstance(table, str) and table in self.db.tables():
table = self.db[table]
elif isinstance(table, (Set, Query)):
# experimental: build a chained query for all tables
if isinstance(table, Set):
cquery = table.query
else:
cquery = table
tablenames = db._adapter.tables(cquery)
for tablename in tablenames:
cquery &= self.accessible_query(name, tablename, user_id=user_id)
return cquery
if not isinstance(table, str) and \
self.has_permission(name, table, 0, user_id):
return table.id > 0
membership = self.table_membership()
permission = self.table_permission()
query = table.id.belongs(
db(membership.user_id == user_id)
(membership.group_id == permission.group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
if self.settings.everybody_group_id:
query |= table.id.belongs(
db(permission.group_id == self.settings.everybody_group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
return query
@staticmethod
def archive(form,
archive_table=None,
current_record='current_record',
archive_current=False,
fields=None):
"""
If you have a table (db.mytable) that needs full revision history you
can just do::
form = crud.update(db.mytable, myrecord, onaccept=auth.archive)
or::
form = SQLFORM(db.mytable, myrecord).process(onaccept=auth.archive)
crud.archive will define a new table "mytable_archive" and store
a copy of the current record (if archive_current=True)
or a copy of the previous record (if archive_current=False)
in the newly created table including a reference
to the current record.
fields allows to specify extra fields that need to be archived.
If you want to access such table you need to define it yourself
in a model::
db.define_table('mytable_archive',
Field('current_record', db.mytable),
db.mytable)
Notice such table includes all fields of db.mytable plus one: current_record.
crud.archive does not timestamp the stored record unless your original table
has a fields like::
db.define_table(...,
Field('saved_on', 'datetime',
default=request.now, update=request.now, writable=False),
Field('saved_by', auth.user,
default=auth.user_id, update=auth.user_id, writable=False),
there is nothing special about these fields since they are filled before
the record is archived.
If you want to change the archive table name and the name of the reference field
you can do, for example::
db.define_table('myhistory',
Field('parent_record', db.mytable), db.mytable)
and use it as::
form = crud.update(db.mytable, myrecord,
onaccept=lambda form:crud.archive(form,
archive_table=db.myhistory,
current_record='parent_record'))
"""
if not archive_current and not form.record:
return None
table = form.table
if not archive_table:
archive_table_name = '%s_archive' % table
if archive_table_name not in table._db:
table._db.define_table(
archive_table_name,
Field(current_record, table),
*[field.clone(unique=False) for field in table])
archive_table = table._db[archive_table_name]
new_record = {current_record: form.vars.id}
for fieldname in archive_table.fields:
if fieldname not in ['id', current_record]:
if archive_current and fieldname in form.vars:
new_record[fieldname] = form.vars[fieldname]
elif form.record and fieldname in form.record:
new_record[fieldname] = form.record[fieldname]
if fields:
new_record.update(fields)
id = archive_table.insert(**new_record)
return id
def wiki(self,
slug=None,
env=None,
render='markmin',
manage_permissions=False,
force_prefix='',
restrict_search=False,
resolve=True,
extra=None,
menu_groups=None,
templates=None,
migrate=True,
controller=None,
function=None,
force_render=False,
groups=None):
if controller and function:
resolve = False
if not hasattr(self, '_wiki'):
self._wiki = Wiki(self, render=render,
manage_permissions=manage_permissions,
force_prefix=force_prefix,
restrict_search=restrict_search,
env=env, extra=extra or {},
menu_groups=menu_groups,
templates=templates,
migrate=migrate,
controller=controller,
function=function,
groups=groups)
else:
self._wiki.settings.extra = extra or {}
self._wiki.env.update(env or {})
# if resolve is set to True, process request as wiki call
# resolve=False allows initial setup without wiki redirection
wiki = None
if resolve:
if slug:
wiki = self._wiki.read(slug, force_render)
if isinstance(wiki, dict) and 'content' in wiki:
# We don't want to return a dict object, just the wiki
wiki = wiki['content']
else:
wiki = self._wiki()
if isinstance(wiki, basestring):
wiki = XML(wiki)
return wiki
def wikimenu(self):
"""To be used in menu.py for app wide wiki menus"""
if (hasattr(self, "_wiki") and
self._wiki.settings.controller and
self._wiki.settings.function):
self._wiki.automenu()
class Crud(object): # pragma: no cover
default_messages = dict(
submit_button='Submit',
delete_label='Check to delete',
record_created='Record Created',
record_updated='Record Updated',
record_deleted='Record Deleted',
update_log='Record %(id)s updated',
create_log='Record %(id)s created',
read_log='Record %(id)s read',
delete_log='Record %(id)s deleted',
)
def url(self, f=None, args=None, vars=None):
"""
This should point to the controller that exposes
download and crud
"""
if args is None:
args = []
if vars is None:
vars = {}
return URL(c=self.settings.controller, f=f, args=args, vars=vars)
def __init__(self, environment, db=None, controller='default'):
self.db = db
if not db and environment and isinstance(environment, DAL):
self.db = environment
elif not db:
raise SyntaxError("must pass db as first or second argument")
self.environment = current
settings = self.settings = Settings()
settings.auth = None
settings.logger = None
settings.create_next = None
settings.update_next = None
settings.controller = controller
settings.delete_next = self.url()
settings.download_url = self.url('download')
settings.create_onvalidation = StorageList()
settings.update_onvalidation = StorageList()
settings.delete_onvalidation = StorageList()
settings.create_onaccept = StorageList()
settings.update_onaccept = StorageList()
settings.update_ondelete = StorageList()
settings.delete_onaccept = StorageList()
settings.update_deletable = True
settings.showid = False
settings.keepvalues = False
settings.create_captcha = None
settings.update_captcha = None
settings.captcha = None
settings.formstyle = 'table3cols'
settings.label_separator = ': '
settings.hideerror = False
settings.detect_record_change = True
settings.hmac_key = None
settings.lock_keys = True
messages = self.messages = Messages(current.T)
messages.update(Crud.default_messages)
messages.lock_keys = True
def __call__(self):
args = current.request.args
if len(args) < 1:
raise HTTP(404)
elif args[0] == 'tables':
return self.tables()
elif len(args) > 1 and not args(1) in self.db.tables:
raise HTTP(404)
table = self.db[args(1)]
if args[0] == 'create':
return self.create(table)
elif args[0] == 'select':
return self.select(table, linkto=self.url(args='read'))
elif args[0] == 'search':
form, rows = self.search(table, linkto=self.url(args='read'))
return DIV(form, SQLTABLE(rows))
elif args[0] == 'read':
return self.read(table, args(2))
elif args[0] == 'update':
return self.update(table, args(2))
elif args[0] == 'delete':
return self.delete(table, args(2))
else:
raise HTTP(404)
def log_event(self, message, vars):
if self.settings.logger:
self.settings.logger.log_event(message, vars, origin='crud')
def has_permission(self, name, table, record=0):
if not self.settings.auth:
return True
try:
record_id = record.id
except:
record_id = record
return self.settings.auth.has_permission(name, str(table), record_id)
def tables(self):
return TABLE(*[TR(A(name,
_href=self.url(args=('select', name))))
for name in self.db.tables])
@staticmethod
def archive(form, archive_table=None, current_record='current_record'):
return Auth.archive(form, archive_table=archive_table,
current_record=current_record)
def update(self,
table,
record,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
ondelete=DEFAULT,
log=DEFAULT,
message=DEFAULT,
deletable=DEFAULT,
formname=DEFAULT,
**attributes
):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
try:
record_id = record.id
except:
record_id = record or 0
if record_id and not self.has_permission('update', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
if not record_id and not self.has_permission('create', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
response = current.response
session = current.session
if request.extension == 'json' and request.vars.json:
request.vars.update(json.loads(request.vars.json))
if next is DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.update_next
if onvalidation is DEFAULT:
onvalidation = self.settings.update_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.update_onaccept
if ondelete is DEFAULT:
ondelete = self.settings.update_ondelete
if log is DEFAULT:
log = self.messages['update_log']
if deletable is DEFAULT:
deletable = self.settings.update_deletable
if message is DEFAULT:
message = self.messages.record_updated
if 'hidden' not in attributes:
attributes['hidden'] = {}
attributes['hidden']['_next'] = next
form = SQLFORM(
table,
record,
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
deletable=deletable,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
**attributes # contains hidden
)
self.accepted = False
self.deleted = False
captcha = self.settings.update_captcha or self.settings.captcha
if record and captcha:
addrow(form, captcha.label, captcha, captcha.comment, self.settings.formstyle, 'captcha__row')
captcha = self.settings.create_captcha or self.settings.captcha
if not record and captcha:
addrow(form, captcha.label, captcha, captcha.comment, self.settings.formstyle, 'captcha__row')
if request.extension not in ('html', 'load'):
(_session, _formname) = (None, None)
else:
(_session, _formname) = (
session, '%s/%s' % (table._tablename, form.record_id))
if formname is not DEFAULT:
_formname = formname
keepvalues = self.settings.keepvalues
if request.vars.delete_this_record:
keepvalues = False
if isinstance(onvalidation, StorageList):
onvalidation = onvalidation.get(table._tablename, [])
if form.accepts(request, _session, formname=_formname,
onvalidation=onvalidation, keepvalues=keepvalues,
hideerror=self.settings.hideerror,
detect_record_change=self.settings.detect_record_change):
self.accepted = True
response.flash = message
if log:
self.log_event(log, form.vars)
if request.vars.delete_this_record:
self.deleted = True
message = self.messages.record_deleted
callback(ondelete, form, table._tablename)
response.flash = message
callback(onaccept, form, table._tablename)
if request.extension not in ('html', 'load'):
raise HTTP(200, 'RECORD CREATED/UPDATED')
if isinstance(next, (list, tuple)): # fix issue with 2.6
next = next[0]
if next: # Only redirect when explicit
next = replace_id(next, form)
session.flash = response.flash
redirect(next)
elif request.extension not in ('html', 'load'):
raise HTTP(401, serializers.json(dict(errors=form.errors)))
return form
def create(self,
table,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
message=DEFAULT,
formname=DEFAULT,
**attributes
):
if next is DEFAULT:
next = self.settings.create_next
if onvalidation is DEFAULT:
onvalidation = self.settings.create_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.create_onaccept
if log is DEFAULT:
log = self.messages['create_log']
if message is DEFAULT:
message = self.messages.record_created
return self.update(table,
None,
next=next,
onvalidation=onvalidation,
onaccept=onaccept,
log=log,
message=message,
deletable=False,
formname=formname,
**attributes
)
def read(self, table, record):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('read', table, record):
redirect(self.settings.auth.settings.on_failed_authorization)
form = SQLFORM(
table,
record,
readonly=True,
comments=False,
upload=self.settings.download_url,
showid=self.settings.showid,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if current.request.extension not in ('html', 'load'):
return table._filter_fields(form.record, id=True)
return form
def delete(self,
table,
record_id,
next=DEFAULT,
message=DEFAULT,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('delete', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
session = current.session
if next is DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.delete_next
if message is DEFAULT:
message = self.messages.record_deleted
record = table[record_id]
if record:
callback(self.settings.delete_onvalidation, record)
del table[record_id]
callback(self.settings.delete_onaccept, record, table._tablename)
session.flash = message
redirect(next)
def rows(self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not self.has_permission('select', table):
redirect(self.settings.auth.settings.on_failed_authorization)
# if record_id and not self.has_permission('select', table):
# redirect(self.settings.auth.settings.on_failed_authorization)
if not isinstance(table, Table):
table = self.db[table]
if not query:
query = table.id > 0
if not fields:
fields = [field for field in table if field.readable]
else:
fields = [table[f] if isinstance(f, str) else f for f in fields]
rows = self.db(query).select(*fields, **dict(orderby=orderby,
limitby=limitby))
return rows
def select(self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
headers=None,
**attr
):
headers = headers or {}
rows = self.rows(table, query, fields, orderby, limitby)
if not rows:
return None # Nicer than an empty table.
if 'upload' not in attr:
attr['upload'] = self.url('download')
if current.request.extension not in ('html', 'load'):
return rows.as_list()
if not headers:
if isinstance(table, str):
table = self.db[table]
headers = dict((str(k), k.label) for k in table)
return SQLTABLE(rows, headers=headers, **attr)
def get_format(self, field):
rtable = field._db[field.type[10:]]
format = rtable.get('_format', None)
if format and isinstance(format, str):
return format[2:-2]
return field.name
def get_query(self, field, op, value, refsearch=False):
try:
if refsearch:
format = self.get_format(field)
if op == 'equals':
if not refsearch:
return field == value
else:
return lambda row: row[field.name][format] == value
elif op == 'not equal':
if not refsearch:
return field != value
else:
return lambda row: row[field.name][format] != value
elif op == 'greater than':
if not refsearch:
return field > value
else:
return lambda row: row[field.name][format] > value
elif op == 'less than':
if not refsearch:
return field < value
else:
return lambda row: row[field.name][format] < value
elif op == 'starts with':
if not refsearch:
return field.like(value + '%')
else:
return lambda row: str(row[field.name][format]).startswith(value)
elif op == 'ends with':
if not refsearch:
return field.like('%' + value)
else:
return lambda row: str(row[field.name][format]).endswith(value)
elif op == 'contains':
if not refsearch:
return field.like('%' + value + '%')
else:
return lambda row: value in row[field.name][format]
except:
return None
def search(self, *tables, **args):
"""
Creates a search form and its results for a table
Examples:
Use as::
form, results = crud.search(db.test,
queries = ['equals', 'not equal', 'contains'],
query_labels={'equals':'Equals',
'not equal':'Not equal'},
fields = ['id','children'],
field_labels = {
'id':'ID','children':'Children'},
zero='Please choose',
query = (db.test.id > 0)&(db.test.id != 3) )
"""
table = tables[0]
fields = args.get('fields', table.fields)
validate = args.get('validate', True)
request = current.request
db = self.db
if not (isinstance(table, Table) or table in db.tables):
raise HTTP(404)
attributes = {}
for key in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):
if key in args:
attributes[key] = args[key]
tbl = TABLE()
selected = []
refsearch = []
results = []
showall = args.get('showall', False)
if showall:
selected = fields
chkall = args.get('chkall', False)
if chkall:
for f in fields:
request.vars['chk%s' % f] = 'on'
ops = args.get('queries', [])
zero = args.get('zero', '')
if not ops:
ops = ['equals', 'not equal', 'greater than',
'less than', 'starts with',
'ends with', 'contains']
ops.insert(0, zero)
query_labels = args.get('query_labels', {})
query = args.get('query', table.id > 0)
field_labels = args.get('field_labels', {})
for field in fields:
field = table[field]
if not field.readable:
continue
fieldname = field.name
chkval = request.vars.get('chk' + fieldname, None)
txtval = request.vars.get('txt' + fieldname, None)
opval = request.vars.get('op' + fieldname, None)
row = TR(TD(INPUT(_type="checkbox", _name="chk" + fieldname,
_disabled=(field.type == 'id'),
value=(field.type == 'id' or chkval == 'on'))),
TD(field_labels.get(fieldname, field.label)),
TD(SELECT([OPTION(query_labels.get(op, op),
_value=op) for op in ops],
_name="op" + fieldname,
value=opval)),
TD(INPUT(_type="text", _name="txt" + fieldname,
_value=txtval, _id='txt' + fieldname,
_class=str(field.type))))
tbl.append(row)
if request.post_vars and (chkval or field.type == 'id'):
if txtval and opval != '':
if field.type[0:10] == 'reference ':
refsearch.append(self.get_query(field, opval, txtval, refsearch=True))
elif validate:
value, error = field.validate(txtval)
if not error:
# TODO deal with 'starts with', 'ends with', 'contains' on GAE
query &= self.get_query(field, opval, value)
else:
row[3].append(DIV(error, _class='error'))
else:
query &= self.get_query(field, opval, txtval)
selected.append(field)
form = FORM(tbl, INPUT(_type="submit"))
if selected:
try:
results = db(query).select(*selected, **attributes)
for r in refsearch:
results = results.find(r)
except: # TODO: hmmm, we should do better here
results = None
return form, results
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor()))
def fetch(url, data=None, headers=None,
cookie=Cookie.SimpleCookie(),
user_agent='Mozilla/5.0'):
headers = headers or {}
if data is not None:
data = urlencode(data)
if user_agent:
headers['User-agent'] = user_agent
headers['Cookie'] = ' '.join(
['%s=%s;' % (c.key, c.value) for c in cookie.values()])
try:
from google.appengine.api import urlfetch
except ImportError:
req = urllib2.Request(url, data, headers)
html = urlopen(req).read()
else:
method = ((data is None) and urlfetch.GET) or urlfetch.POST
while url is not None:
response = urlfetch.fetch(url=url, payload=data,
method=method, headers=headers,
allow_truncated=False, follow_redirects=False,
deadline=10)
# next request will be a get, so no need to send the data again
data = None
method = urlfetch.GET
# load cookies from the response
cookie.load(response.headers.get('set-cookie', ''))
url = response.headers.get('location')
html = response.content
return html
regex_geocode = \
re.compile(r"""<geometry>[\W]*?<location>[\W]*?<lat>(?P<la>[^<]*)</lat>[\W]*?<lng>(?P<lo>[^<]*)</lng>[\W]*?</location>""")
def geocode(address):
try:
a = urllib_quote(address)
txt = fetch('http://maps.googleapis.com/maps/api/geocode/xml?sensor=false&address=%s' % a)
item = regex_geocode.search(txt)
(la, lo) = (float(item.group('la')), float(item.group('lo')))
return (la, lo)
except:
return (0.0, 0.0)
def reverse_geocode(lat, lng, lang=None):
""" Try to get an approximate address for a given latitude, longitude. """
if not lang:
lang = current.T.accepted_language
try:
return json.loads(fetch('http://maps.googleapis.com/maps/api/geocode/json?latlng=%(lat)s,%(lng)s&language=%(lang)s' % locals()))['results'][0]['formatted_address']
except:
return ''
def universal_caller(f, *a, **b):
c = f.__code__.co_argcount
n = f.__code__.co_varnames[:c]
defaults = f.__defaults__ or []
pos_args = n[0:-len(defaults)]
named_args = n[-len(defaults):]
arg_dict = {}
# Fill the arg_dict with name and value for the submitted, positional values
for pos_index, pos_val in enumerate(a[:c]):
arg_dict[n[pos_index]] = pos_val # n[pos_index] is the name of the argument
# There might be pos_args left, that are sent as named_values. Gather them as well.
# If a argument already is populated with values we simply replaces them.
for arg_name in pos_args[len(arg_dict):]:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
if len(arg_dict) >= len(pos_args):
# All the positional arguments is found. The function may now be called.
# However, we need to update the arg_dict with the values from the named arguments as well.
for arg_name in named_args:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
return f(**arg_dict)
# Raise an error, the function cannot be called.
raise HTTP(404, "Object does not exist")
class Service(object):
def __init__(self, environment=None, check_args=False):
self.check_args = check_args
self.run_procedures = {}
self.csv_procedures = {}
self.xml_procedures = {}
self.rss_procedures = {}
self.json_procedures = {}
self.jsonrpc_procedures = {}
self.jsonrpc2_procedures = {}
self.xmlrpc_procedures = {}
self.amfrpc_procedures = {}
self.amfrpc3_procedures = {}
self.soap_procedures = {}
def run(self, f):
"""
Example:
Use as::
service = Service()
@service.run
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/run/myfunction?a=3&b=4
"""
self.run_procedures[f.__name__] = f
return f
def csv(self, f):
"""
Example:
Use as::
service = Service()
@service.csv
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/csv/myfunction?a=3&b=4
"""
self.csv_procedures[f.__name__] = f
return f
def xml(self, f):
"""
Example:
Use as::
service = Service()
@service.xml
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/xml/myfunction?a=3&b=4
"""
self.xml_procedures[f.__name__] = f
return f
def rss(self, f):
"""
Example:
Use as::
service = Service()
@service.rss
def myfunction():
return dict(title=..., link=..., description=...,
created_on=..., entries=[dict(title=..., link=...,
description=..., created_on=...])
def call():
return service()
Then call it with:
wget http://..../app/default/call/rss/myfunction
"""
self.rss_procedures[f.__name__] = f
return f
def json(self, f):
"""
Example:
Use as::
service = Service()
@service.json
def myfunction(a, b):
return [{a: b}]
def call():
return service()
Then call it with:;
wget http://..../app/default/call/json/myfunction?a=hello&b=world
"""
self.json_procedures[f.__name__] = f
return f
def jsonrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/jsonrpc/myfunction?a=hello&b=world
"""
self.jsonrpc_procedures[f.__name__] = f
return f
def jsonrpc2(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc2
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget --post-data '{"jsonrpc": "2.0",
"id": 1,
"method": "myfunction",
"params": {"a": 1, "b": 2}}' http://..../app/default/call/jsonrpc2
"""
self.jsonrpc2_procedures[f.__name__] = f
return f
def xmlrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.xmlrpc
def myfunction(a, b):
return a + b
def call():
return service()
The call it with:
wget http://..../app/default/call/xmlrpc/myfunction?a=hello&b=world
"""
self.xmlrpc_procedures[f.__name__] = f
return f
def amfrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.amfrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/amfrpc/myfunction?a=hello&b=world
"""
self.amfrpc_procedures[f.__name__] = f
return f
def amfrpc3(self, domain='default'):
"""
Example:
Use as::
service = Service()
@service.amfrpc3('domain')
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/amfrpc3/myfunction?a=hello&b=world
"""
if not isinstance(domain, str):
raise SyntaxError("AMF3 requires a domain for function")
def _amfrpc3(f):
if domain:
self.amfrpc3_procedures[domain + '.' + f.__name__] = f
else:
self.amfrpc3_procedures[f.__name__] = f
return f
return _amfrpc3
def soap(self, name=None, returns=None, args=None, doc=None, response_element_name=None):
"""
Example:
Use as::
service = Service()
@service.soap('MyFunction',returns={'result':int},args={'a':int,'b':int,})
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
from gluon.contrib.pysimplesoap.client import SoapClient
client = SoapClient(wsdl="http://..../app/default/call/soap?WSDL")
response = client.MyFunction(a=1,b=2)
return response['result']
It also exposes online generated documentation and xml example messages
at `http://..../app/default/call/soap`
"""
def _soap(f):
self.soap_procedures[name or f.__name__] = f, returns, args, doc, response_element_name
return f
return _soap
def serve_run(self, args=None):
request = current.request
if not args:
args = request.args
if args and args[0] in self.run_procedures:
return str(self.call_service_function(self.run_procedures[args[0]],
*args[1:], **dict(request.vars)))
self.error()
def serve_csv(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/x-csv'
if not args:
args = request.args
def none_exception(value):
if isinstance(value, unicodeT):
return value.encode('utf8')
if hasattr(value, 'isoformat'):
return value.isoformat()[:19].replace('T', ' ')
if value is None:
return '<NULL>'
return value
if args and args[0] in self.csv_procedures:
import types
r = self.call_service_function(self.csv_procedures[args[0]],
*args[1:], **dict(request.vars))
s = StringIO()
if hasattr(r, 'export_to_csv_file'):
r.export_to_csv_file(s)
elif r and not isinstance(r, types.GeneratorType) and isinstance(r[0], (dict, Storage)):
import csv
writer = csv.writer(s)
writer.writerow(list(r[0].keys()))
for line in r:
writer.writerow([none_exception(v)
for v in line.values()])
else:
import csv
writer = csv.writer(s)
for line in r:
writer.writerow(line)
return s.getvalue()
self.error()
def serve_xml(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/xml'
if not args:
args = request.args
if args and args[0] in self.xml_procedures:
s = self.call_service_function(self.xml_procedures[args[0]],
*args[1:], **dict(request.vars))
if hasattr(s, 'as_list'):
s = s.as_list()
return serializers.xml(s, quote=False)
self.error()
def serve_rss(self, args=None):
request = current.request
response = current.response
if not args:
args = request.args
if args and args[0] in self.rss_procedures:
feed = self.call_service_function(self.rss_procedures[args[0]],
*args[1:], **dict(request.vars))
else:
self.error()
response.headers['Content-Type'] = 'application/rss+xml'
return serializers.rss(feed)
def serve_json(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
if not args:
args = request.args
d = dict(request.vars)
if args and args[0] in self.json_procedures:
s = self.call_service_function(self.json_procedures[args[0]], *args[1:], **d)
if hasattr(s, 'as_list'):
s = s.as_list()
return response.json(s)
self.error()
class JsonRpcException(Exception):
def __init__(self, code, info):
jrpc_error = Service.jsonrpc_errors.get(code)
if jrpc_error:
self.message, self.description = jrpc_error
self.code, self.info = code, info
# jsonrpc 2.0 error types. records the following structure {code: (message,meaning)}
jsonrpc_errors = {
-32700: ("Parse error. Invalid JSON was received by the server.",
"An error occurred on the server while parsing the JSON text."),
-32600: ("Invalid Request", "The JSON sent is not a valid Request object."),
-32601: ("Method not found", "The method does not exist / is not available."),
-32602: ("Invalid params", "Invalid method parameter(s)."),
-32603: ("Internal error", "Internal JSON-RPC error."),
-32099: ("Server error", "Reserved for implementation-defined server-errors.")}
def serve_jsonrpc(self):
def return_response(id, result):
return serializers.json({'version': '1.1', 'id': id, 'result': result, 'error': None})
def return_error(id, code, message, data=None):
error = {'name': 'JSONRPCError',
'code': code, 'message': message}
if data is not None:
error['data'] = data
return serializers.json({'id': id,
'version': '1.1',
'error': error,
})
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
methods = self.jsonrpc_procedures
data = json.loads(request.body.read())
jsonrpc_2 = data.get('jsonrpc')
if jsonrpc_2: # hand over to version 2 of the protocol
return self.serve_jsonrpc2(data)
id, method, params = data.get('id'), data.get('method'), data.get('params', [])
if id is None:
return return_error(0, 100, 'missing id')
if method not in methods:
return return_error(id, 100, 'method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
return return_response(id, s)
except Service.JsonRpcException as e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
message = '%s: %s' % (etype.__name__, eval)
data = request.is_local and traceback.format_tb(etb)
logger.warning('jsonrpc exception %s\n%s' % (message, traceback.format_tb(etb)))
return return_error(id, 100, message, data)
def serve_jsonrpc2(self, data=None, batch_element=False):
def return_response(id, result):
if not must_respond:
return None
return serializers.json({'jsonrpc': '2.0', 'id': id, 'result': result})
def return_error(id, code, message=None, data=None):
error = {'code': code}
if code in Service.jsonrpc_errors:
error['message'] = Service.jsonrpc_errors[code][0]
error['data'] = Service.jsonrpc_errors[code][1]
if message is not None:
error['message'] = message
if data is not None:
error['data'] = data
return serializers.json({'jsonrpc': '2.0', 'id': id, 'error': error})
def validate(data):
"""
Validate request as defined in: http://www.jsonrpc.org/specification#request_object.
Args:
data(str): The json object.
Returns:
- True -- if successful
- False -- if no error should be reported (i.e. data is missing 'id' member)
Raises:
JsonRPCException
"""
iparms = set(data.keys())
mandatory_args = set(['jsonrpc', 'method'])
missing_args = mandatory_args - iparms
if missing_args:
raise Service.JsonRpcException(-32600, 'Missing arguments %s.' % list(missing_args))
if data['jsonrpc'] != '2.0':
raise Service.JsonRpcException(-32603, 'Unsupported jsonrpc version "%s"' % data['jsonrpc'])
if 'id' not in iparms:
return False
return True
request = current.request
response = current.response
if not data:
response.headers['Content-Type'] = 'application/json; charset=utf-8'
try:
data = json.loads(request.body.read())
except ValueError: # decoding error in json lib
return return_error(None, -32700)
# Batch handling
if isinstance(data, list) and not batch_element:
retlist = []
for c in data:
retstr = self.serve_jsonrpc2(c, batch_element=True)
if retstr: # do not add empty responses
retlist.append(retstr)
if len(retlist) == 0: # return nothing
return ''
else:
return "[" + ','.join(retlist) + "]"
methods = self.jsonrpc2_procedures
methods.update(self.jsonrpc_procedures)
try:
must_respond = validate(data)
except Service.JsonRpcException as e:
return return_error(None, e.code, e.info)
id, method, params = data.get('id'), data['method'], data.get('params', '')
if method not in methods:
return return_error(id, -32601, data='Method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
if must_respond:
return return_response(id, s)
else:
return ''
except HTTP as e:
raise e
except Service.JsonRpcException as e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
data = '%s: %s\n' % (etype.__name__, eval) + str(request.is_local and traceback.format_tb(etb))
logger.warning('%s: %s\n%s' % (etype.__name__, eval, traceback.format_tb(etb)))
return return_error(id, -32099, data=data)
def serve_xmlrpc(self):
request = current.request
response = current.response
services = list(self.xmlrpc_procedures.values())
return response.xmlrpc(request, services)
def serve_amfrpc(self, version=0):
try:
import pyamf
import pyamf.remoting.gateway
except:
return "pyamf not installed or not in Python sys.path"
request = current.request
response = current.response
if version == 3:
services = self.amfrpc3_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
pyamf_request = pyamf.remoting.decode(request.body)
else:
services = self.amfrpc_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
context = pyamf.get_context(pyamf.AMF0)
pyamf_request = pyamf.remoting.decode(request.body, context)
pyamf_response = pyamf.remoting.Envelope(pyamf_request.amfVersion)
for name, message in pyamf_request:
pyamf_response[name] = base_gateway.getProcessor(message)(message)
response.headers['Content-Type'] = pyamf.remoting.CONTENT_TYPE
if version == 3:
return pyamf.remoting.encode(pyamf_response).getvalue()
else:
return pyamf.remoting.encode(pyamf_response, context).getvalue()
def serve_soap(self, version="1.1"):
try:
from gluon.contrib.pysimplesoap.server import SoapDispatcher
except:
return "pysimplesoap not installed in contrib"
request = current.request
response = current.response
procedures = self.soap_procedures
location = "%s://%s%s" % (request.env.wsgi_url_scheme,
request.env.http_host,
URL(r=request, f="call/soap", vars={}))
namespace = 'namespace' in response and response.namespace or location
documentation = response.description or ''
dispatcher = SoapDispatcher(
name=response.title,
location=location,
action=location, # SOAPAction
namespace=namespace,
prefix='pys',
documentation=documentation,
ns=True)
for method, (function, returns, args, doc, resp_elem_name) in iteritems(procedures):
dispatcher.register_function(method, function, returns, args, doc, resp_elem_name)
if request.env.request_method == 'POST':
fault = {}
# Process normal Soap Operation
response.headers['Content-Type'] = 'text/xml'
xml = dispatcher.dispatch(request.body.read(), fault=fault)
if fault:
# May want to consider populating a ticket here...
response.status = 500
# return the soap response
return xml
elif 'WSDL' in request.vars:
# Return Web Service Description
response.headers['Content-Type'] = 'text/xml'
return dispatcher.wsdl()
elif 'op' in request.vars:
# Return method help webpage
response.headers['Content-Type'] = 'text/html'
method = request.vars['op']
sample_req_xml, sample_res_xml, doc = dispatcher.help(method)
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
A("See all webservice operations",
_href=URL(r=request, f="call/soap", vars={})),
H2(method),
P(doc),
UL(LI("Location: %s" % dispatcher.location),
LI("Namespace: %s" % dispatcher.namespace),
LI("SoapAction: %s" % dispatcher.action),
),
H3("Sample SOAP XML Request Message:"),
CODE(sample_req_xml, language="xml"),
H3("Sample SOAP XML Response Message:"),
CODE(sample_res_xml, language="xml"),
]
return {'body': body}
else:
# Return general help and method list webpage
response.headers['Content-Type'] = 'text/html'
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
P(response.description),
P("The following operations are available"),
A("See WSDL for webservice description",
_href=URL(r=request, f="call/soap", vars={"WSDL": None})),
UL([LI(A("%s: %s" % (method, doc or ''),
_href=URL(r=request, f="call/soap", vars={'op': method})))
for method, doc in dispatcher.list_methods()]),
]
return {'body': body}
def __call__(self):
"""
Registers services with::
service = Service()
@service.run
@service.rss
@service.json
@service.jsonrpc
@service.xmlrpc
@service.amfrpc
@service.amfrpc3('domain')
@service.soap('Method', returns={'Result':int}, args={'a':int,'b':int,})
Exposes services with::
def call():
return service()
You can call services with::
http://..../app/default/call/run?[parameters]
http://..../app/default/call/rss?[parameters]
http://..../app/default/call/json?[parameters]
http://..../app/default/call/jsonrpc
http://..../app/default/call/xmlrpc
http://..../app/default/call/amfrpc
http://..../app/default/call/amfrpc3
http://..../app/default/call/soap
"""
request = current.request
if len(request.args) < 1:
raise HTTP(404, "Not Found")
arg0 = request.args(0)
if arg0 == 'run':
return self.serve_run(request.args[1:])
elif arg0 == 'rss':
return self.serve_rss(request.args[1:])
elif arg0 == 'csv':
return self.serve_csv(request.args[1:])
elif arg0 == 'xml':
return self.serve_xml(request.args[1:])
elif arg0 == 'json':
return self.serve_json(request.args[1:])
elif arg0 == 'jsonrpc':
return self.serve_jsonrpc()
elif arg0 == 'jsonrpc2':
return self.serve_jsonrpc2()
elif arg0 == 'xmlrpc':
return self.serve_xmlrpc()
elif arg0 == 'amfrpc':
return self.serve_amfrpc()
elif arg0 == 'amfrpc3':
return self.serve_amfrpc(3)
elif arg0 == 'soap':
return self.serve_soap()
else:
self.error()
def error(self):
raise HTTP(404, "Object does not exist")
# we make this a method so that subclasses can override it if they want to do more specific argument-checking
# but the default implmentation is the simplest: just pass the arguments we got, with no checking
def call_service_function(self, f, *a, **b):
if self.check_args:
return universal_caller(f, *a, **b)
else:
return f(*a, **b)
def completion(callback):
"""
Executes a task on completion of the called action.
Example:
Use as::
from gluon.tools import completion
@completion(lambda d: logging.info(repr(d)))
def index():
return dict(message='hello')
It logs the output of the function every time input is called.
The argument of completion is executed in a new thread.
"""
def _completion(f):
def __completion(*a, **b):
d = None
try:
d = f(*a, **b)
return d
finally:
thread.start_new_thread(callback, (d,))
return __completion
return _completion
def prettydate(d, T=lambda x: x, utc=False):
now = datetime.datetime.utcnow() if utc else datetime.datetime.now()
if isinstance(d, datetime.datetime):
dt = now - d
elif isinstance(d, datetime.date):
dt = now.date() - d
elif not d:
return ''
else:
return '[invalid date]'
if dt.days < 0:
suffix = ' from now'
dt = -dt
else:
suffix = ' ago'
if dt.days >= 2 * 365:
return T('%d years' + suffix) % int(dt.days // 365)
elif dt.days >= 365:
return T('1 year' + suffix)
elif dt.days >= 60:
return T('%d months' + suffix) % int(dt.days // 30)
elif dt.days >= 27: # 4 weeks ugly
return T('1 month' + suffix)
elif dt.days >= 14:
return T('%d weeks' + suffix) % int(dt.days // 7)
elif dt.days >= 7:
return T('1 week' + suffix)
elif dt.days > 1:
return T('%d days' + suffix) % dt.days
elif dt.days == 1:
return T('1 day' + suffix)
elif dt.seconds >= 2 * 60 * 60:
return T('%d hours' + suffix) % int(dt.seconds // 3600)
elif dt.seconds >= 60 * 60:
return T('1 hour' + suffix)
elif dt.seconds >= 2 * 60:
return T('%d minutes' + suffix) % int(dt.seconds // 60)
elif dt.seconds >= 60:
return T('1 minute' + suffix)
elif dt.seconds > 1:
return T('%d seconds' + suffix) % dt.seconds
elif dt.seconds == 1:
return T('1 second' + suffix)
else:
return T('now')
def test_thread_separation():
def f():
c = PluginManager()
lock1.acquire()
lock2.acquire()
c.x = 7
lock1.release()
lock2.release()
lock1 = thread.allocate_lock()
lock2 = thread.allocate_lock()
lock1.acquire()
thread.start_new_thread(f, ())
a = PluginManager()
a.x = 5
lock1.release()
lock2.acquire()
return a.x
class PluginManager(object):
"""
Plugin Manager is similar to a storage object but it is a single level
singleton. This means that multiple instances within the same thread share
the same attributes.
Its constructor is also special. The first argument is the name of the
plugin you are defining.
The named arguments are parameters needed by the plugin with default values.
If the parameters were previous defined, the old values are used.
Example:
in some general configuration file::
plugins = PluginManager()
plugins.me.param1=3
within the plugin model::
_ = PluginManager('me',param1=5,param2=6,param3=7)
where the plugin is used::
>>> print(plugins.me.param1)
3
>>> print(plugins.me.param2)
6
>>> plugins.me.param3 = 8
>>> print(plugins.me.param3)
8
Here are some tests::
>>> a=PluginManager()
>>> a.x=6
>>> b=PluginManager('check')
>>> print(b.x)
6
>>> b=PluginManager() # reset settings
>>> print(b.x)
<Storage {}>
>>> b.x=7
>>> print(a.x)
7
>>> a.y.z=8
>>> print(b.y.z)
8
>>> test_thread_separation()
5
>>> plugins=PluginManager('me',db='mydb')
>>> print(plugins.me.db)
mydb
>>> print('me' in plugins)
True
>>> print(plugins.me.installed)
True
"""
instances = {}
def __new__(cls, *a, **b):
id = thread.get_ident()
lock = thread.allocate_lock()
try:
lock.acquire()
try:
return cls.instances[id]
except KeyError:
instance = object.__new__(cls, *a, **b)
cls.instances[id] = instance
return instance
finally:
lock.release()
def __init__(self, plugin=None, **defaults):
if not plugin:
self.__dict__.clear()
settings = self.__getattr__(plugin)
settings.installed = True
settings.update(
(k, v) for k, v in defaults.items() if k not in settings)
def __getattr__(self, key):
if key not in self.__dict__:
self.__dict__[key] = Storage()
return self.__dict__[key]
def keys(self):
return list(self.__dict__.keys())
def __contains__(self, key):
return key in self.__dict__
class Expose(object):
def __init__(self, base=None, basename=None, extensions=None,
allow_download=True, follow_symlink_out=False):
"""
Examples:
Use as::
def static():
return dict(files=Expose())
or::
def static():
path = os.path.join(request.folder,'static','public')
return dict(files=Expose(path,basename='public'))
Args:
extensions: an optional list of file extensions for filtering
displayed files: e.g. `['.py', '.jpg']`
allow_download: whether to allow downloading selected files
follow_symlink_out: whether to follow symbolic links that points
points outside of `base`.
Warning: setting this to `True` might pose a security risk
if you don't also have complete control over writing
and file creation under `base`.
"""
self.follow_symlink_out = follow_symlink_out
self.base = self.normalize_path(
base or os.path.join(current.request.folder, 'static'))
self.basename = basename or current.request.function
self.base = base = os.path.realpath(base or os.path.join(current.request.folder, 'static'))
basename = basename or current.request.function
self.basename = basename
if current.request.raw_args:
self.args = [arg for arg in current.request.raw_args.split('/') if arg]
else:
self.args = [arg for arg in current.request.args if arg]
filename = os.path.join(self.base, *self.args)
if not os.path.exists(filename):
raise HTTP(404, "FILE NOT FOUND")
if not self.in_base(filename):
raise HTTP(401, "NOT AUTHORIZED")
if allow_download and not os.path.isdir(filename):
current.response.headers['Content-Type'] = contenttype(filename)
raise HTTP(200, open(filename, 'rb'), **current.response.headers)
self.path = path = os.path.join(filename, '*')
dirname_len = len(path) - 1
allowed = [f for f in sorted(glob.glob(path))
if not any([self.isprivate(f), self.issymlink_out(f)])]
self.folders = [f[dirname_len:]
for f in allowed if os.path.isdir(f)]
self.filenames = [f[dirname_len:]
for f in allowed if not os.path.isdir(f)]
if 'README' in self.filenames:
with open(os.path.join(filename, 'README')) as f:
readme = f.read()
self.paragraph = MARKMIN(readme)
else:
self.paragraph = None
if extensions:
self.filenames = [f for f in self.filenames
if os.path.splitext(f)[-1] in extensions]
def breadcrumbs(self, basename):
path = []
span = SPAN()
span.append(A(basename, _href=URL()))
for arg in self.args:
span.append('/')
path.append(arg)
span.append(A(arg, _href=URL(args='/'.join(path))))
return span
def table_folders(self):
if self.folders:
return SPAN(H3('Folders'),
TABLE(*[TR(TD(A(folder, _href=URL(args=self.args + [folder]))))
for folder in self.folders], **dict(_class="table")))
return ''
@staticmethod
def __in_base(subdir, basedir, sep=os.path.sep):
"""True if subdir/ is under basedir/"""
s = lambda f: '%s%s' % (f.rstrip(sep), sep) # f -> f/
# The trailing '/' is for the case of '/foobar' in_base of '/foo':
# - becase '/foobar' starts with '/foo'
# - but '/foobar/' doesn't start with '/foo/'
return s(subdir).startswith(s(basedir))
def in_base(self, f):
"""True if f/ is under self.base/
Where f ans slef.base are normalized paths
"""
return self.__in_base(self.normalize_path(f), self.base)
def normalize_path(self, f):
if self.follow_symlink_out:
return os.path.normpath(f)
else:
return os.path.realpath(f)
def issymlink_out(self, f):
"""True if f is a symlink and is pointing outside of self.base"""
return os.path.islink(f) and not self.in_base(f)
@staticmethod
def isprivate(f):
# remove '/private' prefix to deal with symbolic links on OSX
if f.startswith('/private/'):
f = f[8:]
return 'private' in f or f.startswith('.') or f.endswith('~')
@staticmethod
def isimage(f):
return os.path.splitext(f)[-1].lower() in (
'.png', '.jpg', '.jpeg', '.gif', '.tiff')
def table_files(self, width=160):
if self.filenames:
return SPAN(H3('Files'),
TABLE(*[TR(TD(A(f, _href=URL(args=self.args + [f]))),
TD(IMG(_src=URL(args=self.args + [f]),
_style='max-width:%spx' % width)
if width and self.isimage(f) else ''))
for f in self.filenames], **dict(_class="table")))
return ''
def xml(self):
return DIV(
H2(self.breadcrumbs(self.basename)),
self.paragraph or '',
self.table_folders(),
self.table_files()).xml()
class Wiki(object):
everybody = 'everybody'
rows_page = 25
def markmin_base(self, body):
return MARKMIN(body, extra=self.settings.extra,
url=True, environment=self.env,
autolinks=lambda link: expand_one(link, {})).xml()
def render_tags(self, tags):
return DIV(
_class='w2p_wiki_tags',
*[A(t.strip(), _href=URL(args='_search', vars=dict(q=t)))
for t in tags or [] if t.strip()])
def markmin_render(self, page):
return self.markmin_base(page.body) + self.render_tags(page.tags).xml()
def html_render(self, page):
html = page.body
# @///function -> http://..../function
html = replace_at_urls(html, URL)
# http://...jpg -> <img src="http://...jpg/> or embed
html = replace_autolinks(html, lambda link: expand_one(link, {}))
# @{component:name} -> <script>embed component name</script>
html = replace_components(html, self.env)
html = html + self.render_tags(page.tags).xml()
return html
@staticmethod
def component(text):
"""
In wiki docs allows `@{component:controller/function/args}`
which renders as a `LOAD(..., ajax=True)`
"""
items = text.split('/')
controller, function, args = items[0], items[1], items[2:]
return LOAD(controller, function, args=args, ajax=True).xml()
def get_renderer(self):
if isinstance(self.settings.render, basestring):
r = getattr(self, "%s_render" % self.settings.render)
elif callable(self.settings.render):
r = self.settings.render
elif isinstance(self.settings.render, dict):
def custom_render(page):
if page.render:
if page.render in self.settings.render.keys():
my_render = self.settings.render[page.render]
else:
my_render = getattr(self, "%s_render" % page.render)
else:
my_render = self.markmin_render
return my_render(page)
r = custom_render
else:
raise ValueError(
"Invalid render type %s" % type(self.settings.render))
return r
def __init__(self, auth, env=None, render='markmin',
manage_permissions=False, force_prefix='',
restrict_search=False, extra=None,
menu_groups=None, templates=None, migrate=True,
controller=None, function=None, groups=None):
settings = self.settings = auth.settings.wiki
"""
Args:
render:
- "markmin"
- "html"
- `<function>` : Sets a custom render function
- `dict(html=<function>, markmin=...)`: dict(...) allows
multiple custom render functions
- "multiple" : Is the same as `{}`. It enables per-record
formats using builtins
"""
engines = set(['markmin', 'html'])
show_engine = False
if render == "multiple":
render = {}
if isinstance(render, dict):
[engines.add(key) for key in render]
show_engine = True
settings.render = render
perms = settings.manage_permissions = manage_permissions
settings.force_prefix = force_prefix
settings.restrict_search = restrict_search
settings.extra = extra or {}
settings.menu_groups = menu_groups
settings.templates = templates
settings.controller = controller
settings.function = function
settings.groups = list(auth.user_groups.values()) \
if groups is None else groups
db = auth.db
self.env = env or {}
self.env['component'] = Wiki.component
self.auth = auth
self.wiki_menu_items = None
if self.auth.user:
self.settings.force_prefix = force_prefix % self.auth.user
else:
self.settings.force_prefix = force_prefix
self.host = current.request.env.http_host
table_definitions = [
('wiki_page', {
'args': [
Field('slug',
requires=[IS_SLUG(),
IS_NOT_IN_DB(db, 'wiki_page.slug')],
writable=False),
Field('title', length=255, unique=True),
Field('body', 'text', notnull=True),
Field('tags', 'list:string'),
Field('can_read', 'list:string',
writable=perms,
readable=perms,
default=[Wiki.everybody]),
Field('can_edit', 'list:string',
writable=perms, readable=perms,
default=[Wiki.everybody]),
Field('changelog'),
Field('html', 'text',
compute=self.get_renderer(),
readable=False, writable=False),
Field('render', default="markmin",
readable=show_engine,
writable=show_engine,
requires=IS_EMPTY_OR(
IS_IN_SET(engines))),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
('wiki_tag', {
'args': [
Field('name'),
Field('wiki_page', 'reference wiki_page'),
auth.signature],
'vars':{'format': '%(title)s', 'migrate': migrate}}),
('wiki_media', {
'args': [
Field('wiki_page', 'reference wiki_page'),
Field('title', required=True),
Field('filename', 'upload', required=True),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
]
# define only non-existent tables
for key, value in table_definitions:
args = []
if key not in db.tables():
# look for wiki_ extra fields in auth.settings
extra_fields = auth.settings.extra_fields
if extra_fields:
if key in extra_fields:
if extra_fields[key]:
for field in extra_fields[key]:
args.append(field)
args += value['args']
db.define_table(key, *args, **value['vars'])
if self.settings.templates is None and not self.settings.manage_permissions:
self.settings.templates = \
db.wiki_page.tags.contains('template') & db.wiki_page.can_read.contains('everybody')
def update_tags_insert(page, id, db=db):
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=id)
def update_tags_update(dbset, page, db=db):
page = dbset.select(limitby=(0, 1)).first()
db(db.wiki_tag.wiki_page == page.id).delete()
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=page.id)
db.wiki_page._after_insert.append(update_tags_insert)
db.wiki_page._after_update.append(update_tags_update)
if (auth.user and
check_credentials(current.request, gae_login=False) and
'wiki_editor' not in auth.user_groups.values() and
self.settings.groups == list(auth.user_groups.values())):
group = db.auth_group(role='wiki_editor')
gid = group.id if group else db.auth_group.insert(
role='wiki_editor')
auth.add_membership(gid)
settings.lock_keys = True
# WIKI ACCESS POLICY
def not_authorized(self, page=None):
raise HTTP(401)
def can_read(self, page):
if 'everybody' in page.can_read or not self.settings.manage_permissions:
return True
elif self.auth.user:
groups = self.settings.groups
if ('wiki_editor' in groups or
set(groups).intersection(set(page.can_read + page.can_edit)) or
page.created_by == self.auth.user.id):
return True
return False
def can_edit(self, page=None):
if not self.auth.user:
redirect(self.auth.settings.login_url)
groups = self.settings.groups
return ('wiki_editor' in groups or
(page is None and 'wiki_author' in groups) or
page is not None and (set(groups).intersection(set(page.can_edit)) or
page.created_by == self.auth.user.id))
def can_manage(self):
if not self.auth.user:
return False
groups = self.settings.groups
return 'wiki_editor' in groups
def can_search(self):
return True
def can_see_menu(self):
if self.auth.user:
if self.settings.menu_groups is None:
return True
else:
groups = self.settings.groups
if any(t in self.settings.menu_groups for t in groups):
return True
return False
# END POLICY
def automenu(self):
"""adds the menu if not present"""
if (not self.wiki_menu_items and self.settings.controller and self.settings.function):
self.wiki_menu_items = self.menu(self.settings.controller,
self.settings.function)
current.response.menu += self.wiki_menu_items
def __call__(self):
request = current.request
settings = self.settings
settings.controller = settings.controller or request.controller
settings.function = settings.function or request.function
self.automenu()
zero = request.args(0) or 'index'
if zero and zero.isdigit():
return self.media(int(zero))
elif not zero or not zero.startswith('_'):
return self.read(zero)
elif zero == '_edit':
return self.edit(request.args(1) or 'index', request.args(2) or 0)
elif zero == '_editmedia':
return self.editmedia(request.args(1) or 'index')
elif zero == '_create':
return self.create()
elif zero == '_pages':
return self.pages()
elif zero == '_search':
return self.search()
elif zero == '_recent':
ipage = int(request.vars.page or 0)
query = self.auth.db.wiki_page.created_by == request.args(
1, cast=int)
return self.search(query=query,
orderby=~self.auth.db.wiki_page.created_on,
limitby=(ipage * self.rows_page,
(ipage + 1) * self.rows_page),
)
elif zero == '_cloud':
return self.cloud()
elif zero == '_preview':
return self.preview(self.get_renderer())
def first_paragraph(self, page):
if not self.can_read(page):
mm = (page.body or '').replace('\r', '')
ps = [p for p in mm.split('\n\n') if not p.startswith('#') and p.strip()]
if ps:
return ps[0]
return ''
def fix_hostname(self, body):
return (body or '').replace('://HOSTNAME', '://%s' % self.host)
def read(self, slug, force_render=False):
if slug in '_cloud':
return self.cloud()
elif slug in '_search':
return self.search()
page = self.auth.db.wiki_page(slug=slug)
if page and (not self.can_read(page)):
return self.not_authorized(page)
if current.request.extension == 'html':
if not page:
url = URL(args=('_create', slug))
return dict(content=A('Create page "%s"' % slug, _href=url, _class="btn"))
else:
html = page.html if not force_render else self.get_renderer()(page)
content = XML(self.fix_hostname(html))
return dict(title=page.title,
slug=page.slug,
page=page,
content=content,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
elif current.request.extension == 'load':
return self.fix_hostname(page.html) if page else ''
else:
if not page:
raise HTTP(404)
else:
return dict(title=page.title,
slug=page.slug,
page=page,
content=page.body,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
def edit(self, slug, from_template=0):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not self.can_edit(page):
return self.not_authorized(page)
title_guess = ' '.join(c.capitalize() for c in slug.split('-'))
if not page:
if not (self.can_manage() or
slug.startswith(self.settings.force_prefix)):
current.session.flash = 'slug must have "%s" prefix' \
% self.settings.force_prefix
redirect(URL(args=('_create')))
db.wiki_page.can_read.default = [Wiki.everybody]
db.wiki_page.can_edit.default = [auth.user_group_role()]
db.wiki_page.title.default = title_guess
db.wiki_page.slug.default = slug
if slug == 'wiki-menu':
db.wiki_page.body.default = \
'- Menu Item > @////index\n- - Submenu > http://web2py.com'
else:
db.wiki_page.body.default = db(db.wiki_page.id == from_template).select(db.wiki_page.body)[0].body \
if int(from_template) > 0 else '## %s\n\npage content' % title_guess
vars = current.request.post_vars
if vars.body:
vars.body = vars.body.replace('://%s' % self.host, '://HOSTNAME')
form = SQLFORM(db.wiki_page, page, deletable=True,
formstyle='table2cols', showid=False).process()
if form.deleted:
current.session.flash = 'page deleted'
redirect(URL())
elif form.accepted:
current.session.flash = 'page created'
redirect(URL(args=slug))
script = """
jQuery(function() {
if (!jQuery('#wiki_page_body').length) return;
var pagecontent = jQuery('#wiki_page_body');
pagecontent.css('font-family',
'Monaco,Menlo,Consolas,"Courier New",monospace');
var prevbutton = jQuery('<button class="btn nopreview">Preview</button>');
var preview = jQuery('<div id="preview"></div>').hide();
var previewmedia = jQuery('<div id="previewmedia"></div>');
var form = pagecontent.closest('form');
preview.insertBefore(form);
prevbutton.insertBefore(form);
if(%(link_media)s) {
var mediabutton = jQuery('<button class="btn nopreview">Media</button>');
mediabutton.insertBefore(form);
previewmedia.insertBefore(form);
mediabutton.click(function() {
if (mediabutton.hasClass('nopreview')) {
web2py_component('%(urlmedia)s', 'previewmedia');
} else {
previewmedia.empty();
}
mediabutton.toggleClass('nopreview');
});
}
prevbutton.click(function(e) {
e.preventDefault();
if (prevbutton.hasClass('nopreview')) {
prevbutton.addClass('preview').removeClass(
'nopreview').html('Edit Source');
try{var wiki_render = jQuery('#wiki_page_render').val()}
catch(e){var wiki_render = null;}
web2py_ajax_page('post', \
'%(url)s', {body: jQuery('#wiki_page_body').val(), \
render: wiki_render}, 'preview');
form.fadeOut('fast', function() {preview.fadeIn()});
} else {
prevbutton.addClass(
'nopreview').removeClass('preview').html('Preview');
preview.fadeOut('fast', function() {form.fadeIn()});
}
})
})
""" % dict(url=URL(args=('_preview', slug)), link_media=('true' if page else 'false'),
urlmedia=URL(extension='load',
args=('_editmedia', slug),
vars=dict(embedded=1)))
return dict(content=TAG[''](form, SCRIPT(script)))
def editmedia(self, slug):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not (page and self.can_edit(page)):
return self.not_authorized(page)
self.auth.db.wiki_media.id.represent = lambda id, row: \
id if not row.filename else \
SPAN('@////%i/%s.%s' % (id, IS_SLUG.urlify(row.title.split('.')[0]), row.filename.split('.')[-1]))
self.auth.db.wiki_media.wiki_page.default = page.id
self.auth.db.wiki_media.wiki_page.writable = False
links = []
csv = True
create = True
if current.request.vars.embedded:
script = "var c = jQuery('#wiki_page_body'); c.val(c.val() + jQuery('%s').text()); return false;"
fragment = self.auth.db.wiki_media.id.represent
csv = False
create = False
links = [lambda row: A('copy into source', _href='#', _onclick=script % (fragment(row.id, row)))]
content = SQLFORM.grid(
self.auth.db.wiki_media.wiki_page == page.id,
orderby=self.auth.db.wiki_media.title,
links=links,
csv=csv,
create=create,
args=['_editmedia', slug],
user_signature=False)
return dict(content=content)
def create(self):
if not self.can_edit():
return self.not_authorized()
db = self.auth.db
slugs = db(db.wiki_page.id > 0).select(db.wiki_page.id, db.wiki_page.slug)
options = [OPTION(row.slug, _value=row.id) for row in slugs]
options.insert(0, OPTION('', _value=''))
fields = [Field("slug", default=current.request.args(1) or
self.settings.force_prefix,
requires=(IS_SLUG(), IS_NOT_IN_DB(db, db.wiki_page.slug))), ]
if self.settings.templates:
fields.append(
Field("from_template", "reference wiki_page",
requires=IS_EMPTY_OR(IS_IN_DB(db(self.settings.templates), db.wiki_page._id, '%(slug)s')),
comment=current.T("Choose Template or empty for new Page")))
form = SQLFORM.factory(*fields, **dict(_class="well"))
form.element("[type=submit]").attributes["_value"] = \
current.T("Create Page from Slug")
if form.process().accepted:
form.vars.from_template = 0 if not form.vars.from_template else form.vars.from_template
redirect(URL(args=('_edit', form.vars.slug, form.vars.from_template or 0))) # added param
return dict(content=form)
def pages(self):
if not self.can_manage():
return self.not_authorized()
self.auth.db.wiki_page.slug.represent = lambda slug, row: SPAN(
'@////%s' % slug)
self.auth.db.wiki_page.title.represent = lambda title, row: \
A(title, _href=URL(args=row.slug))
wiki_table = self.auth.db.wiki_page
content = SQLFORM.grid(
wiki_table,
fields=[wiki_table.slug,
wiki_table.title, wiki_table.tags,
wiki_table.can_read, wiki_table.can_edit],
links=[
lambda row:
A('edit', _href=URL(args=('_edit', row.slug)), _class='btn'),
lambda row:
A('media', _href=URL(args=('_editmedia', row.slug)), _class='btn')],
details=False, editable=False, deletable=False, create=False,
orderby=self.auth.db.wiki_page.title,
args=['_pages'],
user_signature=False)
return dict(content=content)
def media(self, id):
request, response, db = current.request, current.response, self.auth.db
media = db.wiki_media(id)
if media:
if self.settings.manage_permissions:
page = db.wiki_page(media.wiki_page)
if not self.can_read(page):
return self.not_authorized(page)
request.args = [media.filename]
m = response.download(request, db)
current.session.forget() # get rid of the cookie
response.headers['Last-Modified'] = \
request.utcnow.strftime("%a, %d %b %Y %H:%M:%S GMT")
if 'Content-Disposition' in response.headers:
del response.headers['Content-Disposition']
response.headers['Pragma'] = 'cache'
response.headers['Cache-Control'] = 'private'
return m
else:
raise HTTP(404)
def menu(self, controller='default', function='index'):
db = self.auth.db
request = current.request
menu_page = db.wiki_page(slug='wiki-menu')
menu = []
if menu_page:
tree = {'': menu}
regex = re.compile('[\r\n\t]*(?P<base>(\s*\-\s*)+)(?P<title>\w.*?)\s+\>\s+(?P<link>\S+)')
for match in regex.finditer(self.fix_hostname(menu_page.body)):
base = match.group('base').replace(' ', '')
title = match.group('title')
link = match.group('link')
title_page = None
if link.startswith('@'):
items = link[2:].split('/')
if len(items) > 3:
title_page = items[3]
link = URL(a=items[0] or None, c=items[1] or controller,
f=items[2] or function, args=items[3:])
parent = tree.get(base[1:], tree[''])
subtree = []
tree[base] = subtree
parent.append((current.T(title),
request.args(0) == title_page,
link, subtree))
if self.can_see_menu():
submenu = []
menu.append((current.T('[Wiki]'), None, None, submenu))
if URL() == URL(controller, function):
if not str(request.args(0)).startswith('_'):
slug = request.args(0) or 'index'
mode = 1
elif request.args(0) == '_edit':
slug = request.args(1) or 'index'
mode = 2
elif request.args(0) == '_editmedia':
slug = request.args(1) or 'index'
mode = 3
else:
mode = 0
if mode in (2, 3):
submenu.append((current.T('View Page'), None,
URL(controller, function, args=slug)))
if mode in (1, 3):
submenu.append((current.T('Edit Page'), None,
URL(controller, function, args=('_edit', slug))))
if mode in (1, 2):
submenu.append((current.T('Edit Page Media'), None,
URL(controller, function, args=('_editmedia', slug))))
submenu.append((current.T('Create New Page'), None,
URL(controller, function, args=('_create'))))
# Moved next if to inside self.auth.user check
if self.can_manage():
submenu.append((current.T('Manage Pages'), None,
URL(controller, function, args=('_pages'))))
submenu.append((current.T('Edit Menu'), None,
URL(controller, function, args=('_edit', 'wiki-menu'))))
# Also moved inside self.auth.user check
submenu.append((current.T('Search Pages'), None,
URL(controller, function, args=('_search'))))
return menu
def search(self, tags=None, query=None, cloud=True, preview=True,
limitby=(0, 100), orderby=None):
if not self.can_search():
return self.not_authorized()
request = current.request
content = CAT()
if tags is None and query is None:
form = FORM(INPUT(_name='q', requires=IS_NOT_EMPTY(),
value=request.vars.q),
INPUT(_type="submit", _value=current.T('Search')),
_method='GET')
content.append(DIV(form, _class='w2p_wiki_form'))
if request.vars.q:
tags = [v.strip() for v in request.vars.q.split(',')]
tags = [v.lower() for v in tags if v]
if tags or query is not None:
db = self.auth.db
count = db.wiki_tag.wiki_page.count()
fields = [db.wiki_page.id, db.wiki_page.slug,
db.wiki_page.title, db.wiki_page.tags,
db.wiki_page.can_read, db.wiki_page.can_edit]
if preview:
fields.append(db.wiki_page.body)
if query is None:
query = (db.wiki_page.id == db.wiki_tag.wiki_page) &\
(db.wiki_tag.name.belongs(tags))
query = query | db.wiki_page.title.contains(request.vars.q)
if self.settings.restrict_search and not self.can_manage():
query = query & (db.wiki_page.created_by == self.auth.user_id)
pages = db(query).select(count,
*fields, **dict(orderby=orderby or ~count,
groupby=reduce(lambda a, b: a | b, fields),
distinct=True,
limitby=limitby))
if request.extension in ('html', 'load'):
if not pages:
content.append(DIV(current.T("No results"),
_class='w2p_wiki_form'))
def link(t):
return A(t, _href=URL(args='_search', vars=dict(q=t)))
items = [DIV(H3(A(p.wiki_page.title, _href=URL(
args=p.wiki_page.slug))),
MARKMIN(self.first_paragraph(p.wiki_page))
if preview else '',
DIV(_class='w2p_wiki_tags',
*[link(t.strip()) for t in
p.wiki_page.tags or [] if t.strip()]),
_class='w2p_wiki_search_item')
for p in pages]
content.append(DIV(_class='w2p_wiki_pages', *items))
else:
cloud = False
content = [p.wiki_page.as_dict() for p in pages]
elif cloud:
content.append(self.cloud()['content'])
if request.extension == 'load':
return content
return dict(content=content)
def cloud(self):
db = self.auth.db
count = db.wiki_tag.wiki_page.count(distinct=True)
ids = db(db.wiki_tag).select(
db.wiki_tag.name, count,
distinct=True,
groupby=db.wiki_tag.name,
orderby=~count, limitby=(0, 20))
if ids:
a, b = ids[0](count), ids[-1](count)
def style(c):
STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'
size = (1.5 * (c - b) / max(a - b, 1) + 1.3)
return STYLE % (1.3, size)
items = []
for item in ids:
items.append(A(item.wiki_tag.name,
_style=style(item(count)),
_href=URL(args='_search',
vars=dict(q=item.wiki_tag.name))))
items.append(' ')
return dict(content=DIV(_class='w2p_cloud', *items))
def preview(self, render):
request = current.request
# FIXME: This is an ugly hack to ensure a default render
# engine if not specified (with multiple render engines)
if 'render' not in request.post_vars:
request.post_vars.render = None
return render(request.post_vars)
class Config(object):
def __init__(
self,
filename,
section,
default_values={}
):
self.config = configparser.ConfigParser(default_values)
self.config.read(filename)
if not self.config.has_section(section):
self.config.add_section(section)
self.section = section
self.filename = filename
def read(self):
if not(isinstance(current.session['settings_%s' % self.section], dict)):
settings = dict(self.config.items(self.section))
else:
settings = current.session['settings_%s' % self.section]
return settings
def save(self, options):
for option, value in options:
self.config.set(self.section, option, value)
try:
self.config.write(open(self.filename, 'w'))
result = True
except:
current.session['settings_%s' % self.section] = dict(self.config.items(self.section))
result = False
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
| #!/bin/python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Auth, Mail, PluginManager and various utilities
------------------------------------------------
"""
import base64
from functools import reduce
from gluon._compat import pickle, thread, urllib2, Cookie, StringIO, urlencode
from gluon._compat import configparser, MIMEBase, MIMEMultipart, MIMEText, Header
from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems
from gluon._compat import to_bytes, to_native, add_charset, string_types
from gluon._compat import charset_QP, basestring, unicodeT, to_unicode
from gluon._compat import urllib2, urlopen
import datetime
import logging
import sys
import glob
import os
import re
import time
import fnmatch
import traceback
import smtplib
import email.utils
import random
import hmac
import hashlib
import json
from email import message_from_string
from gluon.authapi import AuthAPI
from gluon.contenttype import contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid, compare
from gluon.fileutils import read_file, check_credentials
from gluon import *
from gluon.contrib.autolinks import expand_one
from gluon.contrib.markmin.markmin2html import replace_at_urls
from gluon.contrib.markmin.markmin2html import replace_autolinks
from gluon.contrib.markmin.markmin2html import replace_components
from pydal.objects import Row, Set, Query
import gluon.serializers as serializers
Table = DAL.Table
Field = DAL.Field
__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',
'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']
# mind there are two loggers here (logger and crud.settings.logger)!
logger = logging.getLogger("web2py")
DEFAULT = lambda: None
def getarg(position, default=None):
args = current.request.args
if position < 0 and len(args) >= -position:
return args[position]
elif position >= 0 and len(args) > position:
return args[position]
else:
return default
def callback(actions, form, tablename=None):
if actions:
if tablename and isinstance(actions, dict):
actions = actions.get(tablename, [])
if not isinstance(actions, (list, tuple)):
actions = [actions]
[action(form) for action in actions]
def validators(*a):
b = []
for item in a:
if isinstance(item, (list, tuple)):
b = b + list(item)
else:
b.append(item)
return b
def call_or_redirect(f, *args):
if callable(f):
redirect(f(*args))
else:
redirect(f)
def replace_id(url, form):
if url:
url = url.replace('[id]', str(form.vars.id))
if url[0] == '/' or url[:4] == 'http':
return url
return URL(url)
REGEX_OPEN_REDIRECT = re.compile(r"^(\w+)?[:]?(/$|//.*|/\\.*|[~]/.*)")
def prevent_open_redirect(url):
# Prevent an attacker from adding an arbitrary url after the
# _next variable in the request.
host = current.request.env.http_host
print(host)
if not url:
return None
if REGEX_OPEN_REDIRECT.match(url):
parts = url.split('/')
if len(parts) > 2 and parts[2] == host:
return url
return None
return url
class Mail(object):
"""
Class for configuring and sending emails with alternative text / html
body, multiple attachments and encryption support
Works with SMTP and Google App Engine.
Args:
server: SMTP server address in address:port notation
sender: sender email address
login: sender login name and password in login:password notation
or None if no authentication is required
tls: enables/disables encryption (True by default)
In Google App Engine use ::
server='gae'
For sake of backward compatibility all fields are optional and default
to None, however, to be able to send emails at least server and sender
must be specified. They are available under following fields::
mail.settings.server
mail.settings.sender
mail.settings.login
mail.settings.timeout = 60 # seconds (default)
When server is 'logging', email is logged but not sent (debug mode)
Optionally you can use PGP encryption or X509::
mail.settings.cipher_type = None
mail.settings.gpg_home = None
mail.settings.sign = True
mail.settings.sign_passphrase = None
mail.settings.encrypt = True
mail.settings.x509_sign_keyfile = None
mail.settings.x509_sign_certfile = None
mail.settings.x509_sign_chainfile = None
mail.settings.x509_nocerts = False
mail.settings.x509_crypt_certfiles = None
cipher_type : None
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults
to True
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name /
string or a list of file names /
strings (PEM format)
Examples:
Create Mail object with authentication data for remote server::
mail = Mail('example.com:25', 'me@example.com', 'me:password')
Notice for GAE users:
attachments have an automatic content_id='attachment-i' where i is progressive number
in this way the can be referenced from the HTML as <img src="cid:attachment-0" /> etc.
"""
class Attachment(MIMEBase):
"""
Email attachment
Args:
payload: path to file or file-like object with read() method
filename: name of the attachment stored in message; if set to
None, it will be fetched from payload path; file-like
object payload must have explicit filename specified
content_id: id of the attachment; automatically contained within
`<` and `>`
content_type: content type of the attachment; if set to None,
it will be fetched from filename using gluon.contenttype
module
encoding: encoding of all strings passed to this function (except
attachment body)
Content ID is used to identify attachments within the html body;
in example, attached image with content ID 'photo' may be used in
html message as a source of img tag `<img src="cid:photo" />`.
Example::
Create attachment from text file::
attachment = Mail.Attachment('/path/to/file.txt')
Content-Type: text/plain
MIME-Version: 1.0
Content-Disposition: attachment; filename="file.txt"
Content-Transfer-Encoding: base64
SOMEBASE64CONTENT=
Create attachment from image file with custom filename and cid::
attachment = Mail.Attachment('/path/to/file.png',
filename='photo.png',
content_id='photo')
Content-Type: image/png
MIME-Version: 1.0
Content-Disposition: attachment; filename="photo.png"
Content-Id: <photo>
Content-Transfer-Encoding: base64
SOMEOTHERBASE64CONTENT=
"""
def __init__(
self,
payload,
filename=None,
content_id=None,
content_type=None,
encoding='utf-8'):
if isinstance(payload, str):
if filename is None:
filename = os.path.basename(payload)
payload = read_file(payload, 'rb')
else:
if filename is None:
raise Exception('Missing attachment name')
payload = payload.read()
# FIXME PY3 can be used to_native?
filename = filename.encode(encoding)
if content_type is None:
content_type = contenttype(filename)
self.my_filename = filename
self.my_payload = payload
MIMEBase.__init__(self, *content_type.split('/', 1))
self.set_payload(payload)
self['Content-Disposition'] = Header('attachment; filename="%s"' % to_native(filename, encoding), 'utf-8')
if content_id is not None:
self['Content-Id'] = '<%s>' % to_native(content_id, encoding)
Encoders.encode_base64(self)
def __init__(self, server=None, sender=None, login=None, tls=True):
settings = self.settings = Settings()
settings.server = server
settings.sender = sender
settings.login = login
settings.tls = tls
settings.timeout = 5 # seconds
settings.hostname = None
settings.ssl = False
settings.cipher_type = None
settings.gpg_home = None
settings.sign = True
settings.sign_passphrase = None
settings.encrypt = True
settings.x509_sign_keyfile = None
settings.x509_sign_certfile = None
settings.x509_sign_chainfile = None
settings.x509_nocerts = False
settings.x509_crypt_certfiles = None
settings.debug = False
settings.lock_keys = True
self.result = {}
self.error = None
def send(self,
to,
subject='[no subject]',
message='[no message]',
attachments=None,
cc=None,
bcc=None,
reply_to=None,
sender=None,
encoding='utf-8',
raw=False,
headers={},
from_address=None,
cipher_type=None,
sign=None,
sign_passphrase=None,
encrypt=None,
x509_sign_keyfile=None,
x509_sign_chainfile=None,
x509_sign_certfile=None,
x509_crypt_certfiles=None,
x509_nocerts=None
):
"""
Sends an email using data specified in constructor
Args:
to: list or tuple of receiver addresses; will also accept single
object
subject: subject of the email
message: email body text; depends on type of passed object:
- if 2-list or 2-tuple is passed: first element will be
source of plain text while second of html text;
- otherwise: object will be the only source of plain text
and html source will be set to None
If text or html source is:
- None: content part will be ignored,
- string: content part will be set to it,
- file-like object: content part will be fetched from it using
it's read() method
attachments: list or tuple of Mail.Attachment objects; will also
accept single object
cc: list or tuple of carbon copy receiver addresses; will also
accept single object
bcc: list or tuple of blind carbon copy receiver addresses; will
also accept single object
reply_to: address to which reply should be composed
encoding: encoding of all strings passed to this method (including
message bodies)
headers: dictionary of headers to refine the headers just before
sending mail, e.g. `{'X-Mailer' : 'web2py mailer'}`
from_address: address to appear in the 'From:' header, this is not
the envelope sender. If not specified the sender will be used
cipher_type :
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults to True.
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name / string or
a list of file names / strings (PEM format)
Examples:
Send plain text message to single address::
mail.send('you@example.com',
'Message subject',
'Plain text body of the message')
Send html message to single address::
mail.send('you@example.com',
'Message subject',
'<html>Plain text body of the message</html>')
Send text and html message to three addresses (two in cc)::
mail.send('you@example.com',
'Message subject',
('Plain text body', '<html>html body</html>'),
cc=['other1@example.com', 'other2@example.com'])
Send html only message with image attachment available from the
message by 'photo' content id::
mail.send('you@example.com',
'Message subject',
(None, '<html><img src="cid:photo" /></html>'),
Mail.Attachment('/path/to/photo.jpg'
content_id='photo'))
Send email with two attachments and no body text::
mail.send('you@example.com,
'Message subject',
None,
[Mail.Attachment('/path/to/fist.file'),
Mail.Attachment('/path/to/second.file')])
Returns:
True on success, False on failure.
Before return, method updates two object's fields:
- self.result: return value of smtplib.SMTP.sendmail() or GAE's
mail.send_mail() method
- self.error: Exception message or None if above was successful
"""
# We don't want to use base64 encoding for unicode mail
add_charset('utf-8', charset_QP, charset_QP, 'utf-8')
def encode_header(key):
if [c for c in key if 32 > ord(c) or ord(c) > 127]:
return Header(key.encode('utf-8'), 'utf-8')
else:
return key
# encoded or raw text
def encoded_or_raw(text):
if raw:
text = encode_header(text)
return text
sender = sender or self.settings.sender
if not isinstance(self.settings.server, str):
raise Exception('Server address not specified')
if not isinstance(sender, str):
raise Exception('Sender address not specified')
if not raw and attachments:
# Use multipart/mixed if there is attachments
payload_in = MIMEMultipart('mixed')
elif raw:
# no encoding configuration for raw messages
if not isinstance(message, basestring):
message = message.read()
if isinstance(message, unicodeT):
text = message.encode('utf-8')
elif not encoding == 'utf-8':
text = message.decode(encoding).encode('utf-8')
else:
text = message
# No charset passed to avoid transport encoding
# NOTE: some unicode encoded strings will produce
# unreadable mail contents.
payload_in = MIMEText(text)
if to:
if not isinstance(to, (list, tuple)):
to = [to]
else:
raise Exception('Target receiver address not specified')
if reply_to:
if not isinstance(reply_to, (list, tuple)):
reply_to = [reply_to]
if cc:
if not isinstance(cc, (list, tuple)):
cc = [cc]
if bcc:
if not isinstance(bcc, (list, tuple)):
bcc = [bcc]
if message is None:
text = html = None
elif isinstance(message, (list, tuple)):
text, html = message
elif message.strip().startswith('<html') and \
message.strip().endswith('</html>'):
text = self.settings.server == 'gae' and message or None
html = message
else:
text = message
html = None
if (text is not None or html is not None) and (not raw):
if text is not None:
if not isinstance(text, basestring):
text = text.read()
if isinstance(text, unicodeT):
text = text.encode('utf-8')
elif not encoding == 'utf-8':
text = text.decode(encoding).encode('utf-8')
if html is not None:
if not isinstance(html, basestring):
html = html.read()
if isinstance(html, unicodeT):
html = html.encode('utf-8')
elif not encoding == 'utf-8':
html = html.decode(encoding).encode('utf-8')
# Construct mime part only if needed
if text is not None and html:
# We have text and html we need multipart/alternative
attachment = MIMEMultipart('alternative')
attachment.attach(MIMEText(text, _charset='utf-8'))
attachment.attach(MIMEText(html, 'html', _charset='utf-8'))
elif text is not None:
attachment = MIMEText(text, _charset='utf-8')
elif html:
attachment = MIMEText(html, 'html', _charset='utf-8')
if attachments:
# If there is attachments put text and html into
# multipart/mixed
payload_in.attach(attachment)
else:
# No attachments no multipart/mixed
payload_in = attachment
if (attachments is None) or raw:
pass
elif isinstance(attachments, (list, tuple)):
for attachment in attachments:
payload_in.attach(attachment)
else:
payload_in.attach(attachments)
attachments = [attachments]
#######################################################
# CIPHER #
#######################################################
cipher_type = cipher_type or self.settings.cipher_type
sign = sign if sign is not None else self.settings.sign
sign_passphrase = sign_passphrase or self.settings.sign_passphrase
encrypt = encrypt if encrypt is not None else self.settings.encrypt
#######################################################
# GPGME #
#######################################################
if cipher_type == 'gpg':
if self.settings.gpg_home:
# Set GNUPGHOME environment variable to set home of gnupg
import os
os.environ['GNUPGHOME'] = self.settings.gpg_home
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to gpg"
return False
# need a python-pyme package and gpgme lib
from pyme import core, errors
from pyme.constants.sig import mode
############################################
# sign #
############################################
if sign:
import string
core.check_version(None)
pin = string.replace(payload_in.as_string(), '\n', '\r\n')
plain = core.Data(pin)
sig = core.Data()
c = core.Context()
c.set_armor(1)
c.signers_clear()
# search for signing key for From:
for sigkey in c.op_keylist_all(sender, 1):
if sigkey.can_sign:
c.signers_add(sigkey)
if not c.signers_enum(0):
self.error = 'No key for signing [%s]' % sender
return False
c.set_passphrase_cb(lambda x, y, z: sign_passphrase)
try:
# make a signature
c.op_sign(plain, sig, mode.DETACH)
sig.seek(0, 0)
# make it part of the email
payload = MIMEMultipart('signed',
boundary=None,
_subparts=None,
**dict(micalg="pgp-sha1",
protocol="application/pgp-signature"))
# insert the origin payload
payload.attach(payload_in)
# insert the detached signature
p = MIMEBase("application", 'pgp-signature')
p.set_payload(sig.read())
payload.attach(p)
# it's just a trick to handle the no encryption case
payload_in = payload
except errors.GPGMEError as ex:
self.error = "GPG error: %s" % ex.getstring()
return False
############################################
# encrypt #
############################################
if encrypt:
core.check_version(None)
plain = core.Data(payload_in.as_string())
cipher = core.Data()
c = core.Context()
c.set_armor(1)
# collect the public keys for encryption
recipients = []
rec = to[:]
if cc:
rec.extend(cc)
if bcc:
rec.extend(bcc)
for addr in rec:
c.op_keylist_start(addr, 0)
r = c.op_keylist_next()
if r is None:
self.error = 'No key for [%s]' % addr
return False
recipients.append(r)
try:
# make the encryption
c.op_encrypt(recipients, 1, plain, cipher)
cipher.seek(0, 0)
# make it a part of the email
payload = MIMEMultipart('encrypted',
boundary=None,
_subparts=None,
**dict(protocol="application/pgp-encrypted"))
p = MIMEBase("application", 'pgp-encrypted')
p.set_payload("Version: 1\r\n")
payload.attach(p)
p = MIMEBase("application", 'octet-stream')
p.set_payload(cipher.read())
payload.attach(p)
except errors.GPGMEError as ex:
self.error = "GPG error: %s" % ex.getstring()
return False
#######################################################
# X.509 #
#######################################################
elif cipher_type == 'x509':
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to x509"
return False
import os
x509_sign_keyfile = x509_sign_keyfile or self.settings.x509_sign_keyfile
x509_sign_chainfile = x509_sign_chainfile or self.settings.x509_sign_chainfile
x509_sign_certfile = x509_sign_certfile or self.settings.x509_sign_certfile or \
x509_sign_keyfile or self.settings.x509_sign_certfile
# crypt certfiles could be a string or a list
x509_crypt_certfiles = x509_crypt_certfiles or self.settings.x509_crypt_certfiles
x509_nocerts = x509_nocerts or\
self.settings.x509_nocerts
# need m2crypto
try:
from M2Crypto import BIO, SMIME, X509
except Exception as e:
self.error = "Can't load M2Crypto module"
return False
msg_bio = BIO.MemoryBuffer(payload_in.as_string())
s = SMIME.SMIME()
# SIGN
if sign:
# key for signing
try:
keyfile_bio = BIO.openfile(x509_sign_keyfile)\
if os.path.isfile(x509_sign_keyfile)\
else BIO.MemoryBuffer(x509_sign_keyfile)
sign_certfile_bio = BIO.openfile(x509_sign_certfile)\
if os.path.isfile(x509_sign_certfile)\
else BIO.MemoryBuffer(x509_sign_certfile)
s.load_key_bio(keyfile_bio, sign_certfile_bio,
callback=lambda x: sign_passphrase)
if x509_sign_chainfile:
sk = X509.X509_Stack()
chain = X509.load_cert(x509_sign_chainfile)\
if os.path.isfile(x509_sign_chainfile)\
else X509.load_cert_string(x509_sign_chainfile)
sk.push(chain)
s.set_x509_stack(sk)
except Exception as e:
self.error = "Something went wrong on certificate / private key loading: <%s>" % str(e)
return False
try:
if x509_nocerts:
flags = SMIME.PKCS7_NOCERTS
else:
flags = 0
if not encrypt:
flags += SMIME.PKCS7_DETACHED
p7 = s.sign(msg_bio, flags=flags)
msg_bio = BIO.MemoryBuffer(payload_in.as_string(
)) # Recreate coz sign() has consumed it.
except Exception as e:
self.error = "Something went wrong on signing: <%s> %s" % (
str(e), str(flags))
return False
# ENCRYPT
if encrypt:
try:
sk = X509.X509_Stack()
if not isinstance(x509_crypt_certfiles, (list, tuple)):
x509_crypt_certfiles = [x509_crypt_certfiles]
# make an encryption cert's stack
for crypt_certfile in x509_crypt_certfiles:
certfile = X509.load_cert(crypt_certfile)\
if os.path.isfile(crypt_certfile)\
else X509.load_cert_string(crypt_certfile)
sk.push(certfile)
s.set_x509_stack(sk)
s.set_cipher(SMIME.Cipher('des_ede3_cbc'))
tmp_bio = BIO.MemoryBuffer()
if sign:
s.write(tmp_bio, p7)
else:
tmp_bio.write(payload_in.as_string())
p7 = s.encrypt(tmp_bio)
except Exception as e:
self.error = "Something went wrong on encrypting: <%s>" % str(e)
return False
# Final stage in sign and encryption
out = BIO.MemoryBuffer()
if encrypt:
s.write(out, p7)
else:
if sign:
s.write(out, p7, msg_bio, SMIME.PKCS7_DETACHED)
else:
out.write('\r\n')
out.write(payload_in.as_string())
out.close()
st = str(out.read())
payload = message_from_string(st)
else:
# no cryptography process as usual
payload = payload_in
if from_address:
payload['From'] = encoded_or_raw(to_unicode(from_address, encoding))
else:
payload['From'] = encoded_or_raw(to_unicode(sender, encoding))
origTo = to[:]
if to:
payload['To'] = encoded_or_raw(to_unicode(', '.join(to), encoding))
if reply_to:
payload['Reply-To'] = encoded_or_raw(to_unicode(', '.join(reply_to), encoding))
if cc:
payload['Cc'] = encoded_or_raw(to_unicode(', '.join(cc), encoding))
to.extend(cc)
if bcc:
to.extend(bcc)
payload['Subject'] = encoded_or_raw(to_unicode(subject, encoding))
payload['Date'] = email.utils.formatdate()
for k, v in iteritems(headers):
payload[k] = encoded_or_raw(to_unicode(v, encoding))
result = {}
try:
if self.settings.server == 'logging':
entry = 'email not sent\n%s\nFrom: %s\nTo: %s\nSubject: %s\n\n%s\n%s\n' % \
('-' * 40, sender, ', '.join(to), subject, text or html, '-' * 40)
logger.warning(entry)
elif self.settings.server.startswith('logging:'):
entry = 'email not sent\n%s\nFrom: %s\nTo: %s\nSubject: %s\n\n%s\n%s\n' % \
('-' * 40, sender, ', '.join(to), subject, text or html, '-' * 40)
open(self.settings.server[8:], 'a').write(entry)
elif self.settings.server == 'gae':
xcc = dict()
if cc:
xcc['cc'] = cc
if bcc:
xcc['bcc'] = bcc
if reply_to:
xcc['reply_to'] = reply_to
from google.appengine.api import mail
attachments = attachments and [mail.Attachment(
a.my_filename,
a.my_payload,
content_id='<attachment-%s>' % k
) for k, a in enumerate(attachments) if not raw]
if attachments:
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding),
body=to_unicode(text or '', encoding),
html=html,
attachments=attachments, **xcc)
elif html and (not raw):
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding), body=to_unicode(text or '', encoding), html=html, **xcc)
else:
result = mail.send_mail(
sender=sender, to=origTo,
subject=to_unicode(subject, encoding), body=to_unicode(text or '', encoding), **xcc)
elif self.settings.server == 'aws':
import boto3
from botocore.exceptions import ClientError
client = boto3.client('ses')
try:
raw = {'Data': payload.as_string()}
response = client.send_raw_email(RawMessage=raw,
Source=sender,
Destinations=to)
return True
except ClientError as e:
# we should log this error:
# print e.response['Error']['Message']
return False
else:
smtp_args = self.settings.server.split(':')
kwargs = dict(timeout=self.settings.timeout)
func = smtplib.SMTP_SSL if self.settings.ssl else smtplib.SMTP
server = func(*smtp_args, **kwargs)
try:
if self.settings.tls and not self.settings.ssl:
server.ehlo(self.settings.hostname)
server.starttls()
server.ehlo(self.settings.hostname)
if self.settings.login:
server.login(*self.settings.login.split(':', 1))
result = server.sendmail(sender, to, payload.as_string())
finally:
# do not want to hide errors raising some exception here
try:
server.quit()
except smtplib.SMTPException:
# ensure to close any socket with SMTP server
try:
server.close()
except Exception:
pass
except Exception as e:
logger.warning('Mail.send failure:%s' % e)
self.result = result
self.error = e
return False
self.result = result
self.error = None
return True
class Recaptcha2(DIV):
"""
Experimental:
Creates a DIV holding the newer Recaptcha from Google (v2)
Args:
request : the request. If not passed, uses current request
public_key : the public key Google gave you
private_key : the private key Google gave you
error_message : the error message to show if verification fails
label : the label to use
options (dict) : takes these parameters
- hl
- theme
- type
- tabindex
- callback
- expired-callback
see https://developers.google.com/recaptcha/docs/display for docs about those
comment : the comment
Examples:
Use as::
form = FORM(Recaptcha2(public_key='...', private_key='...'))
or::
form = SQLFORM(...)
form.append(Recaptcha2(public_key='...', private_key='...'))
to protect the login page instead, use::
from gluon.tools import Recaptcha2
auth.settings.captcha = Recaptcha2(request, public_key='...', private_key='...')
"""
API_URI = 'https://www.google.com/recaptcha/api.js'
VERIFY_SERVER = 'https://www.google.com/recaptcha/api/siteverify'
def __init__(self,
request=None,
public_key='',
private_key='',
error_message='invalid',
label='Verify:',
options=None,
comment='',
):
request = request or current.request
self.request_vars = request and request.vars or current.request.vars
self.remote_addr = request.env.remote_addr
self.public_key = public_key
self.private_key = private_key
self.errors = Storage()
self.error_message = error_message
self.components = []
self.attributes = {}
self.label = label
self.options = options or {}
self.comment = comment
def _validate(self):
recaptcha_response_field = self.request_vars.pop('g-recaptcha-response', None)
remoteip = self.remote_addr
if not recaptcha_response_field:
self.errors['captcha'] = self.error_message
return False
params = urlencode({
'secret': self.private_key,
'remoteip': remoteip,
'response': recaptcha_response_field,
}).encode('utf-8')
request = urllib2.Request(
url=self.VERIFY_SERVER,
data=to_bytes(params),
headers={'Content-type': 'application/x-www-form-urlencoded',
'User-agent': 'reCAPTCHA Python'})
httpresp = urlopen(request)
content = httpresp.read()
httpresp.close()
try:
response_dict = json.loads(to_native(content))
except:
self.errors['captcha'] = self.error_message
return False
if response_dict.get('success', False):
self.request_vars.captcha = ''
return True
else:
self.errors['captcha'] = self.error_message
return False
def xml(self):
api_uri = self.API_URI
hl = self.options.pop('hl', None)
if hl:
api_uri = self.API_URI + '?hl=%s' % hl
public_key = self.public_key
self.options['sitekey'] = public_key
captcha = DIV(
SCRIPT(_src=api_uri, _async='', _defer=''),
DIV(_class="g-recaptcha", data=self.options),
TAG.noscript(XML("""
<div style="width: 302px; height: 352px;">
<div style="width: 302px; height: 352px; position: relative;">
<div style="width: 302px; height: 352px; position: absolute;">
<iframe src="https://www.google.com/recaptcha/api/fallback?k=%(public_key)s"
frameborder="0" scrolling="no"
style="width: 302px; height:352px; border-style: none;">
</iframe>
</div>
<div style="width: 250px; height: 80px; position: absolute; border-style: none;
bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;">
<textarea id="g-recaptcha-response" name="g-recaptcha-response"
class="g-recaptcha-response"
style="width: 250px; height: 80px; border: 1px solid #c1c1c1;
margin: 0px; padding: 0px; resize: none;" value="">
</textarea>
</div>
</div>
</div>""" % dict(public_key=public_key))
)
)
if not self.errors.captcha:
return XML(captcha).xml()
else:
captcha.append(DIV(self.errors['captcha'], _class='error'))
return XML(captcha).xml()
# this should only be used for captcha and perhaps not even for that
def addrow(form, a, b, c, style, _id, position=-1):
if style == "divs":
form[0].insert(position, DIV(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "table2cols":
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(c, _class='w2p_fc')))
form[0].insert(position + 1, TR(TD(b, _class='w2p_fw'),
_colspan=2, _id=_id))
elif style == "ul":
form[0].insert(position, LI(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "bootstrap":
form[0].insert(position, DIV(LABEL(a, _class='control-label'),
DIV(b, SPAN(c, _class='inline-help'),
_class='controls'),
_class='control-group', _id=_id))
elif style in ("bootstrap3_inline", "bootstrap4_inline"):
form[0].insert(position, DIV(LABEL(a, _class='control-label col-sm-3'),
DIV(b, SPAN(c, _class='help-block'),
_class='col-sm-9'),
_class='form-group row', _id=_id))
elif style in ("bootstrap3_stacked", "bootstrap4_stacked"):
form[0].insert(position, DIV(LABEL(a, _class='control-label'),
b, SPAN(c, _class='help-block'),
_class='form-group row', _id=_id))
else:
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(b, _class='w2p_fw'),
TD(c, _class='w2p_fc'), _id=_id))
class AuthJWT(object):
"""
Experimental!
Args:
- secret_key: the secret. Without salting, an attacker knowing this can impersonate
any user
- algorithm : uses as they are in the JWT specs, HS256, HS384 or HS512 basically means
signing with HMAC with a 256, 284 or 512bit hash
- verify_expiration : verifies the expiration checking the exp claim
- leeway: allow n seconds of skew when checking for token expiration
- expiration : how many seconds a token may be valid
- allow_refresh: enable the machinery to get a refreshed token passing a not-already-expired
token
- refresh_expiration_delta: to avoid continous refresh of the token
- header_prefix : self-explanatory. "JWT" and "Bearer" seems to be the emerging standards
- jwt_add_header: a dict holding additional mappings to the header. by default only alg and typ are filled
- user_param: the name of the parameter holding the username when requesting a token. Can be useful, e.g, for
email-based authentication, with "email" as a parameter
- pass_param: same as above, but for the password
- realm: self-explanatory
- salt: can be static or a function that takes the payload as an argument.
Example:
def mysalt(payload):
return payload['hmac_key'].split('-')[0]
- additional_payload: can be a dict to merge with the payload or a function that takes
the payload as input and returns the modified payload
Example:
def myadditional_payload(payload):
payload['my_name_is'] = 'bond,james bond'
return payload
- before_authorization: can be a callable that takes the deserialized token (a dict) as input.
Gets called right after signature verification but before the actual
authorization takes place. It may be use to cast
the extra auth_user fields to their actual types.
You can raise with HTTP a proper error message
Example:
def mybefore_authorization(tokend):
if not tokend['my_name_is'] == 'bond,james bond':
raise HTTP(400, 'Invalid JWT my_name_is claim')
- max_header_length: check max length to avoid load()ing unusually large tokens (could mean crafted, e.g. in a DDoS.)
Basic Usage:
in models (or the controller needing it)
myjwt = AuthJWT(auth, secret_key='secret')
in the controller issuing tokens
def login_and_take_token():
return myjwt.jwt_token_manager()
A call then to /app/controller/login_and_take_token with username and password returns the token
A call to /app/controller/login_and_take_token with the original token returns the refreshed token
To protect a function with JWT
@myjwt.allows_jwt()
@auth.requires_login()
def protected():
return '%s$%s' % (request.now, auth.user_id)
To inject optional auth info into the action with JWT
@myjwt.allows_jwt()
def unprotected():
if auth.user:
return '%s$%s' % (request.now, auth.user_id)
return "No auth info!"
"""
def __init__(self,
auth,
secret_key,
algorithm='HS256',
verify_expiration=True,
leeway=30,
expiration=60 * 5,
allow_refresh=True,
refresh_expiration_delta=60 * 60,
header_prefix='Bearer',
jwt_add_header=None,
user_param='username',
pass_param='password',
realm='Login required',
salt=None,
additional_payload=None,
before_authorization=None,
max_header_length=4 * 1024,
):
self.secret_key = secret_key
self.auth = auth
self.algorithm = algorithm
if self.algorithm not in ('HS256', 'HS384', 'HS512'):
raise NotImplementedError('Algorithm %s not allowed' % algorithm)
self.verify_expiration = verify_expiration
self.leeway = leeway
self.expiration = expiration
self.allow_refresh = allow_refresh
self.refresh_expiration_delta = refresh_expiration_delta
self.header_prefix = header_prefix
self.jwt_add_header = jwt_add_header or {}
base_header = {'alg': self.algorithm, 'typ': 'JWT'}
for k, v in iteritems(self.jwt_add_header):
base_header[k] = v
self.cached_b64h = self.jwt_b64e(json.dumps(base_header))
digestmod_mapping = {
'HS256': hashlib.sha256,
'HS384': hashlib.sha384,
'HS512': hashlib.sha512
}
self.digestmod = digestmod_mapping[algorithm]
self.user_param = user_param
self.pass_param = pass_param
self.realm = realm
self.salt = salt
self.additional_payload = additional_payload
self.before_authorization = before_authorization
self.max_header_length = max_header_length
self.recvd_token = None
@staticmethod
def jwt_b64e(string):
string = to_bytes(string)
return base64.urlsafe_b64encode(string).strip(b'=')
@staticmethod
def jwt_b64d(string):
"""base64 decodes a single bytestring (and is tolerant to getting
called with a unicode string).
The result is also a bytestring.
"""
string = to_bytes(string, 'ascii', 'ignore')
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4))
def generate_token(self, payload):
secret = to_bytes(self.secret_key)
if self.salt:
if callable(self.salt):
secret = "%s$%s" % (secret, self.salt(payload))
else:
secret = "%s$%s" % (secret, self.salt)
if isinstance(secret, unicodeT):
secret = secret.encode('ascii', 'ignore')
b64h = self.cached_b64h
b64p = self.jwt_b64e(serializers.json(payload))
jbody = b64h + b'.' + b64p
mauth = hmac.new(key=secret, msg=jbody, digestmod=self.digestmod)
jsign = self.jwt_b64e(mauth.digest())
return to_native(jbody + b'.' + jsign)
def verify_signature(self, body, signature, secret):
mauth = hmac.new(key=secret, msg=body, digestmod=self.digestmod)
return compare(self.jwt_b64e(mauth.digest()), signature)
def load_token(self, token):
token = to_bytes(token, 'utf-8', 'strict')
body, sig = token.rsplit(b'.', 1)
b64h, b64b = body.split(b'.', 1)
if b64h != self.cached_b64h:
# header not the same
raise HTTP(400, 'Invalid JWT Header')
secret = self.secret_key
tokend = serializers.loads_json(to_native(self.jwt_b64d(b64b)))
if self.salt:
if callable(self.salt):
secret = "%s$%s" % (secret, self.salt(tokend))
else:
secret = "%s$%s" % (secret, self.salt)
secret = to_bytes(secret, 'ascii', 'ignore')
if not self.verify_signature(body, sig, secret):
# signature verification failed
raise HTTP(400, 'Token signature is invalid')
if self.verify_expiration:
now = time.mktime(datetime.datetime.utcnow().timetuple())
if tokend['exp'] + self.leeway < now:
raise HTTP(400, 'Token is expired')
if callable(self.before_authorization):
self.before_authorization(tokend)
return tokend
def serialize_auth_session(self, session_auth):
"""
As bad as it sounds, as long as this is rarely used (vs using the token)
this is the faster method, even if we ditch session in jwt_token_manager().
We (mis)use the heavy default auth mechanism to avoid any further computation,
while sticking to a somewhat-stable Auth API.
"""
# TODO: Check the following comment
# is the following safe or should we use
# calendar.timegm(datetime.datetime.utcnow().timetuple())
# result seem to be the same (seconds since epoch, in UTC)
now = time.mktime(datetime.datetime.utcnow().timetuple())
expires = now + self.expiration
payload = dict(
hmac_key=session_auth['hmac_key'],
user_groups=session_auth['user_groups'],
user=session_auth['user'].as_dict(),
iat=now,
exp=expires
)
return payload
def refresh_token(self, orig_payload):
now = time.mktime(datetime.datetime.utcnow().timetuple())
if self.verify_expiration:
orig_exp = orig_payload['exp']
if orig_exp + self.leeway < now:
# token already expired, can't be used for refresh
raise HTTP(400, 'Token already expired')
orig_iat = orig_payload.get('orig_iat') or orig_payload['iat']
if orig_iat + self.refresh_expiration_delta < now:
# refreshed too long ago
raise HTTP(400, 'Token issued too long ago')
expires = now + self.expiration
orig_payload.update(
orig_iat=orig_iat,
iat=now,
exp=expires,
hmac_key=web2py_uuid()
)
self.alter_payload(orig_payload)
return orig_payload
def alter_payload(self, payload):
if self.additional_payload:
if callable(self.additional_payload):
payload = self.additional_payload(payload)
elif isinstance(self.additional_payload, dict):
payload.update(self.additional_payload)
return payload
def jwt_token_manager(self, token_param='_token'):
"""
The part that issues (and refreshes) tokens.
Used in a controller, given myjwt is the istantiated class, as
@myjwt.allow_jwt(required=False, verify_expiration=False)
def api_auth():
return myjwt.jwt_token_manager()
Then, a call to /app/c/api_auth with username and password
returns a token, while /app/c/api_auth with the current token
issues another token (expired, but within grace time)
"""
request = current.request
response = current.response
session = current.session
# forget and unlock response
session.forget(response)
valid_user = None
ret = None
token = None
try:
token = self.recvd_token or self.get_jwt_token_from_request(token_param)
except HTTP:
pass
if token:
if not self.allow_refresh:
raise HTTP(403, 'Refreshing token is not allowed')
tokend = self.load_token(token)
# verification can fail here
refreshed = self.refresh_token(tokend)
ret = {'token': self.generate_token(refreshed)}
elif self.user_param in request.vars and self.pass_param in request.vars:
username = request.vars[self.user_param]
password = request.vars[self.pass_param]
valid_user = self.auth.login_bare(username, password)
else:
valid_user = self.auth.user
self.auth.login_user(valid_user)
if valid_user:
payload = self.serialize_auth_session(session.auth)
self.alter_payload(payload)
ret = {'token': self.generate_token(payload)}
elif ret is None:
raise HTTP(401,
'Not Authorized - need to be logged in, to pass a token '
'for refresh or username and password for login',
**{'WWW-Authenticate': 'JWT realm="%s"' % self.realm})
response.headers['Content-Type'] = 'application/json'
return serializers.json(ret)
def inject_token(self, tokend):
"""
The real deal, not touching the db but still logging-in the user
"""
self.auth.user = Storage(tokend['user'])
self.auth.user_groups = tokend['user_groups']
self.auth.hmac_key = tokend['hmac_key']
def get_jwt_token_from_request(self, token_param='_token'):
"""
The method that extracts and validates the token, either
from the header or the _token var
token_param: request.vars attribute with the token used only if the http authorization header is not present.
"""
token = None
token_in_header = current.request.env.http_authorization
if token_in_header:
parts = token_in_header.split()
if parts[0].lower() != self.header_prefix.lower():
raise HTTP(400, 'Invalid JWT header')
elif len(parts) == 1:
raise HTTP(400, 'Invalid JWT header, missing token')
elif len(parts) > 2:
raise HTTP(400, 'Invalid JWT header, token contains spaces')
token = parts[1]
else:
token = current.request.vars.get(token_param)
if token is None:
raise HTTP(400, 'JWT header not found and JWT parameter {} missing in request'.format(token_param))
self.recvd_token = token
return token
def allows_jwt(self, otherwise=None, required=True, verify_expiration=True, token_param='_token'):
"""
The decorator that takes care of injecting auth info in the decorated action.
Works w/o resorting to session.
Args:
required: the token is mandatory (either in request.var._token or in the HTTP hearder Authorization Bearer)
verify_expiration: allows to bypass expiration check. Useful to manage token renewal.
token_param: request.vars attribute with the token used only if the http authorization header is not present (default: "_token").
"""
def decorator(action):
def f(*args, **kwargs):
try:
token = self.get_jwt_token_from_request(token_param=token_param)
except HTTP as e:
if required:
raise e
token = None
if token and len(token) < self.max_header_length:
old_verify_expiration = self.verify_expiration
try:
self.verify_expiration = verify_expiration
tokend = self.load_token(token)
except ValueError:
raise HTTP(400, 'Invalid JWT header, wrong token format')
finally:
self.verify_expiration = old_verify_expiration
self.inject_token(tokend)
return action(*args, **kwargs)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
class Auth(AuthAPI):
default_settings = dict(AuthAPI.default_settings,
allow_basic_login=False,
allow_basic_login_only=False,
allow_delete_accounts=False,
alternate_requires_registration=False,
auth_manager_role=None,
auth_two_factor_enabled=False,
auth_two_factor_tries_left=3,
bulk_register_enabled=False,
captcha=None,
cas_maps=None,
client_side=True,
formstyle=None,
hideerror=False,
label_separator=None,
login_after_password_change=True,
login_after_registration=False,
login_captcha=None,
login_specify_error=False,
long_expiration=3600 * 30 * 24, # one month
mailer=None,
manager_actions={},
multi_login=False,
on_failed_authentication=lambda x: redirect(x),
pre_registration_div=None,
prevent_open_redirect_attacks=True,
prevent_password_reset_attacks=True,
profile_fields=None,
register_captcha=None,
register_fields=None,
register_verify_password=True,
remember_me_form=True,
reset_password_requires_verification=False,
retrieve_password_captcha=None,
retrieve_username_captcha=None,
showid=False,
table_cas=None,
table_cas_name='auth_cas',
table_event=None,
table_group=None,
table_membership=None,
table_permission=None,
table_token_name='auth_token',
table_user=None,
two_factor_authentication_group=None,
update_fields=['email'],
wiki=Settings()
)
# ## these are messages that can be customized
default_messages = dict(AuthAPI.default_messages,
access_denied='Insufficient privileges',
bulk_invite_body='You have been invited to join %(site)s, click %(link)s to complete '
'the process',
bulk_invite_subject='Invitation to join %(site)s',
delete_label='Check to delete',
email_sent='Email sent',
email_verified='Email verified',
function_disabled='Function disabled',
impersonate_log='User %(id)s is impersonating %(other_id)s',
invalid_reset_password='Invalid reset password',
invalid_two_factor_code='Incorrect code. {0} more attempt(s) remaining.',
is_empty="Cannot be empty",
label_client_ip='Client IP',
label_description='Description',
label_email='E-mail',
label_first_name='First name',
label_group_id='Group ID',
label_last_name='Last name',
label_name='Name',
label_origin='Origin',
label_password='Password',
label_record_id='Record ID',
label_registration_id='Registration identifier',
label_registration_key='Registration key',
label_remember_me="Remember me (for 30 days)",
label_reset_password_key='Reset Password key',
label_role='Role',
label_table_name='Object or table name',
label_time_stamp='Timestamp',
label_two_factor='Authentication code',
label_user_id='User ID',
label_username='Username',
login_button='Log In',
login_disabled='Login disabled by administrator',
new_password='New password',
new_password_sent='A new password was emailed to you',
old_password='Old password',
password_change_button='Change password',
password_reset_button='Request reset password',
profile_save_button='Apply changes',
register_button='Sign Up',
reset_password='Click on the link %(link)s to reset your password',
reset_password_log='User %(id)s Password reset',
reset_password_subject='Password reset',
retrieve_password='Your password is: %(password)s',
retrieve_password_log='User %(id)s Password retrieved',
retrieve_password_subject='Password retrieve',
retrieve_two_factor_code='Your temporary login code is {0}',
retrieve_two_factor_code_subject='Two-step Login Authentication Code',
retrieve_username='Your username is: %(username)s',
retrieve_username_log='User %(id)s Username retrieved',
retrieve_username_subject='Username retrieve',
submit_button='Submit',
two_factor_comment='This code was emailed to you and is required for login.',
unable_send_email='Unable to send email',
username_sent='Your username was emailed to you',
verify_email='Welcome %(username)s! Click on the link %(link)s to verify your email',
verify_email_log='User %(id)s Verification email sent',
verify_email_subject='Email verification',
verify_password='Verify Password',
verify_password_comment='please input your password again'
)
"""
Class for authentication, authorization, role based access control.
Includes:
- registration and profile
- login and logout
- username and password retrieval
- event logging
- role creation and assignment
- user defined group/role based permission
Args:
environment: is there for legacy but unused (awful)
db: has to be the database where to create tables for authentication
mailer: `Mail(...)` or None (no mailer) or True (make a mailer)
hmac_key: can be a hmac_key or hmac_key=Auth.get_or_create_key()
controller: (where is the user action?)
cas_provider: (delegate authentication to the URL, CAS2)
Authentication Example::
from gluon.contrib.utils import *
mail=Mail()
mail.settings.server='smtp.gmail.com:587'
mail.settings.sender='you@somewhere.com'
mail.settings.login='username:password'
auth=Auth(db)
auth.settings.mailer=mail
# auth.settings....=...
auth.define_tables()
def authentication():
return dict(form=auth())
Exposes:
- `http://.../{application}/{controller}/authentication/login`
- `http://.../{application}/{controller}/authentication/logout`
- `http://.../{application}/{controller}/authentication/register`
- `http://.../{application}/{controller}/authentication/verify_email`
- `http://.../{application}/{controller}/authentication/retrieve_username`
- `http://.../{application}/{controller}/authentication/retrieve_password`
- `http://.../{application}/{controller}/authentication/reset_password`
- `http://.../{application}/{controller}/authentication/profile`
- `http://.../{application}/{controller}/authentication/change_password`
On registration a group with role=new_user.id is created
and user is given membership of this group.
You can create a group with::
group_id=auth.add_group('Manager', 'can access the manage action')
auth.add_permission(group_id, 'access to manage')
Here "access to manage" is just a user defined string.
You can give access to a user::
auth.add_membership(group_id, user_id)
If user id is omitted, the logged in user is assumed
Then you can decorate any action::
@auth.requires_permission('access to manage')
def manage():
return dict()
You can restrict a permission to a specific table::
auth.add_permission(group_id, 'edit', db.sometable)
@auth.requires_permission('edit', db.sometable)
Or to a specific record::
auth.add_permission(group_id, 'edit', db.sometable, 45)
@auth.requires_permission('edit', db.sometable, 45)
If authorization is not granted calls::
auth.settings.on_failed_authorization
Other options::
auth.settings.mailer=None
auth.settings.expiration=3600 # seconds
...
### these are messages that can be customized
...
"""
@staticmethod
def get_or_create_key(filename=None, alg='sha512'):
request = current.request
if not filename:
filename = os.path.join(request.folder, 'private', 'auth.key')
if os.path.exists(filename):
key = open(filename, 'r').read().strip()
else:
key = alg + ':' + web2py_uuid()
open(filename, 'w').write(key)
return key
def url(self, f=None, args=None, vars=None, scheme=False):
if args is None:
args = []
if vars is None:
vars = {}
host = scheme and self.settings.host
return URL(c=self.settings.controller,
f=f, args=args, vars=vars, scheme=scheme, host=host)
def here(self):
return URL(args=current.request.args, vars=current.request.get_vars)
def select_host(self, host, host_names=None):
"""
checks that host is valid, i.e. in the list of glob host_names
if the host is missing, then is it selects the first entry from host_names
read more here: https://github.com/web2py/web2py/issues/1196
"""
if host:
if host_names:
for item in host_names:
if fnmatch.fnmatch(host, item):
break
else:
raise HTTP(403, "Invalid Hostname")
elif host_names:
host = host_names[0]
else:
host = 'localhost'
return host
def __init__(self, environment=None, db=None, mailer=True,
hmac_key=None, controller='default', function='user',
cas_provider=None, signature=True, secure=False,
csrf_prevention=True, propagate_extension=None,
url_index=None, jwt=None, host_names=None):
# next two lines for backward compatibility
if not db and environment and isinstance(environment, DAL):
db = environment
self.db = db
self.environment = current
self.csrf_prevention = csrf_prevention
request = current.request
session = current.session
auth = session.auth
self.user_groups = auth and auth.user_groups or {}
if secure:
request.requires_https()
now = request.now
# if we have auth info
# if not expired it, used it
# if expired, clear the session
# else, only clear auth info in the session
if auth:
delta = datetime.timedelta(days=0, seconds=auth.expiration)
if auth.last_visit and auth.last_visit + delta > now:
self.user = auth.user
# this is a trick to speed up sessions to avoid many writes
if (now - auth.last_visit).seconds > (auth.expiration // 10):
auth.last_visit = now
else:
self.user = None
if session.auth:
del session.auth
session.renew(clear_session=True)
else:
self.user = None
if session.auth:
del session.auth
# ## what happens after login?
url_index = url_index or URL(controller, 'index')
url_login = URL(controller, function, args='login',
extension=propagate_extension)
# ## what happens after registration?
settings = self.settings = Settings()
settings.update(Auth.default_settings)
host = self.select_host(request.env.http_host, host_names)
settings.update(
cas_domains=[host],
enable_tokens=False,
cas_provider=cas_provider,
cas_actions=dict(login='login',
validate='validate',
servicevalidate='serviceValidate',
proxyvalidate='proxyValidate',
logout='logout'),
cas_create_user=True,
extra_fields={},
actions_disabled=[],
controller=controller,
function=function,
login_url=url_login,
logged_url=URL(controller, function, args='profile'),
download_url=URL(controller, 'download'),
mailer=(mailer is True) and Mail() or mailer,
on_failed_authorization=URL(controller, function, args='not_authorized'),
login_next=url_index,
login_onvalidation=[],
login_onaccept=[],
login_onfail=[],
login_methods=[self],
login_form=self,
logout_next=url_index,
logout_onlogout=None,
register_next=url_index,
register_onvalidation=[],
register_onaccept=[],
verify_email_next=url_login,
verify_email_onaccept=[],
profile_next=url_index,
profile_onvalidation=[],
profile_onaccept=[],
retrieve_username_next=url_index,
retrieve_password_next=url_index,
request_reset_password_next=url_login,
reset_password_next=url_index,
change_password_next=url_index,
change_password_onvalidation=[],
change_password_onaccept=[],
retrieve_password_onvalidation=[],
request_reset_password_onvalidation=[],
request_reset_password_onaccept=[],
reset_password_onvalidation=[],
reset_password_onaccept=[],
hmac_key=hmac_key,
formstyle=current.response.formstyle,
label_separator=current.response.form_label_separator,
two_factor_methods=[],
two_factor_onvalidation=[],
host=host,
)
settings.lock_keys = True
# ## these are messages that can be customized
messages = self.messages = Messages(current.T)
messages.update(Auth.default_messages)
messages.update(ajax_failed_authentication=
DIV(H4('NOT AUTHORIZED'),
'Please ',
A('login',
_href=self.settings.login_url +
('?_next=' + urllib_quote(current.request.env.http_web2py_component_location))
if current.request.env.http_web2py_component_location else ''),
' to view this content.',
_class='not-authorized alert alert-block'))
messages.lock_keys = True
# for "remember me" option
response = current.response
if auth and auth.remember_me:
# when user wants to be logged in for longer
response.session_cookie_expires = auth.expiration
if signature:
self.define_signature()
else:
self.signature = None
self.jwt_handler = jwt and AuthJWT(self, **jwt)
def get_vars_next(self):
next = current.request.vars._next
if isinstance(next, (list, tuple)):
next = next[0]
if next and self.settings.prevent_open_redirect_attacks:
return prevent_open_redirect(next)
return next or None
def table_cas(self):
return self.db[self.settings.table_cas_name]
def table_token(self):
return self.db[self.settings.table_token_name]
def _HTTP(self, *a, **b):
"""
only used in lambda: self._HTTP(404)
"""
raise HTTP(*a, **b)
def __call__(self):
"""
Example:
Use as::
def authentication():
return dict(form=auth())
"""
request = current.request
args = request.args
if not args:
redirect(self.url(args='login', vars=request.vars))
elif args[0] in self.settings.actions_disabled:
raise HTTP(404)
if args[0] in ('login', 'logout', 'register', 'verify_email',
'retrieve_username', 'retrieve_password',
'reset_password', 'request_reset_password',
'change_password', 'profile', 'groups',
'impersonate', 'not_authorized', 'confirm_registration',
'bulk_register', 'manage_tokens', 'jwt'):
if len(request.args) >= 2 and args[0] == 'impersonate':
return getattr(self, args[0])(request.args[1])
else:
return getattr(self, args[0])()
elif args[0] == 'cas' and not self.settings.cas_provider:
if args(1) == self.settings.cas_actions['login']:
return self.cas_login(version=2)
elif args(1) == self.settings.cas_actions['validate']:
return self.cas_validate(version=1)
elif args(1) == self.settings.cas_actions['servicevalidate']:
return self.cas_validate(version=2, proxy=False)
elif args(1) == self.settings.cas_actions['proxyvalidate']:
return self.cas_validate(version=2, proxy=True)
elif (args(1) == 'p3'
and args(2) == self.settings.cas_actions['servicevalidate']):
return self.cas_validate(version=3, proxy=False)
elif (args(1) == 'p3'
and args(2) == self.settings.cas_actions['proxyvalidate']):
return self.cas_validate(version=3, proxy=True)
elif args(1) == self.settings.cas_actions['logout']:
return self.logout(next=request.vars.service or DEFAULT)
else:
raise HTTP(404)
def navbar(self, prefix='Welcome', action=None,
separators=(' [ ', ' | ', ' ] '), user_identifier=DEFAULT,
referrer_actions=DEFAULT, mode='default'):
""" Navbar with support for more templates
This uses some code from the old navbar.
Args:
mode: see options for list of
"""
items = [] # Hold all menu items in a list
self.bar = '' # The final
T = current.T
referrer_actions = [] if not referrer_actions else referrer_actions
if not action:
action = self.url(self.settings.function)
request = current.request
if URL() == action:
next = ''
else:
next = '?_next=' + urllib_quote(URL(args=request.args,
vars=request.get_vars))
href = lambda function: \
'%s/%s%s' % (action, function, next if referrer_actions is DEFAULT or function in referrer_actions else '')
if isinstance(prefix, str):
prefix = T(prefix)
if prefix:
prefix = prefix.strip() + ' '
def Anr(*a, **b):
b['_rel'] = 'nofollow'
return A(*a, **b)
if self.user_id: # User is logged in
logout_next = self.settings.logout_next
items.append({'name': T('Log Out'),
'href': '%s/logout?_next=%s' % (action, urllib_quote(logout_next)),
'icon': 'icon-off'})
if 'profile' not in self.settings.actions_disabled:
items.append({'name': T('Profile'), 'href': href('profile'),
'icon': 'icon-user'})
if 'change_password' not in self.settings.actions_disabled:
items.append({'name': T('Password'),
'href': href('change_password'),
'icon': 'icon-lock'})
if user_identifier is DEFAULT:
user_identifier = '%(first_name)s'
if callable(user_identifier):
user_identifier = user_identifier(self.user)
elif ((isinstance(user_identifier, str) or
type(user_identifier).__name__ == 'lazyT') and
re.search(r'%\(.+\)s', user_identifier)):
user_identifier = user_identifier % self.user
if not user_identifier:
user_identifier = ''
else: # User is not logged in
items.append({'name': T('Log In'), 'href': href('login'),
'icon': 'icon-off'})
if 'register' not in self.settings.actions_disabled:
items.append({'name': T('Sign Up'), 'href': href('register'),
'icon': 'icon-user'})
if 'request_reset_password' not in self.settings.actions_disabled:
items.append({'name': T('Lost password?'),
'href': href('request_reset_password'),
'icon': 'icon-lock'})
if self.settings.use_username and 'retrieve_username' not in self.settings.actions_disabled:
items.append({'name': T('Forgot username?'),
'href': href('retrieve_username'),
'icon': 'icon-edit'})
def menu(): # For inclusion in MENU
self.bar = [(items[0]['name'], False, items[0]['href'], [])]
del items[0]
for item in items:
self.bar[0][3].append((item['name'], False, item['href']))
def bootstrap3(): # Default web2py scaffolding
def rename(icon): return icon + ' ' + icon.replace('icon', 'glyphicon')
self.bar = UL(LI(Anr(I(_class=rename('icon ' + items[0]['icon'])),
' ' + items[0]['name'],
_href=items[0]['href'])), _class='dropdown-menu')
del items[0]
for item in items:
self.bar.insert(-1, LI(Anr(I(_class=rename('icon ' + item['icon'])),
' ' + item['name'],
_href=item['href'])))
self.bar.insert(-1, LI('', _class='divider'))
if self.user_id:
self.bar = LI(Anr(prefix, user_identifier,
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}),
self.bar, _class='dropdown')
else:
self.bar = LI(Anr(T('Log In'),
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}), self.bar,
_class='dropdown')
def bare():
""" In order to do advanced customization we only need the
prefix, the user_identifier and the href attribute of items
Examples:
Use as::
# in module custom_layout.py
from gluon import *
def navbar(auth_navbar):
bar = auth_navbar
user = bar["user"]
if not user:
btn_login = A(current.T("Login"),
_href=bar["login"],
_class="btn btn-success",
_rel="nofollow")
btn_register = A(current.T("Sign up"),
_href=bar["register"],
_class="btn btn-primary",
_rel="nofollow")
return DIV(btn_register, btn_login, _class="btn-group")
else:
toggletext = "%s back %s" % (bar["prefix"], user)
toggle = A(toggletext,
_href="#",
_class="dropdown-toggle",
_rel="nofollow",
**{"_data-toggle": "dropdown"})
li_profile = LI(A(I(_class="icon-user"), ' ',
current.T("Account details"),
_href=bar["profile"], _rel="nofollow"))
li_custom = LI(A(I(_class="icon-book"), ' ',
current.T("My Agenda"),
_href="#", rel="nofollow"))
li_logout = LI(A(I(_class="icon-off"), ' ',
current.T("logout"),
_href=bar["logout"], _rel="nofollow"))
dropdown = UL(li_profile,
li_custom,
LI('', _class="divider"),
li_logout,
_class="dropdown-menu", _role="menu")
return LI(toggle, dropdown, _class="dropdown")
# in models db.py
import custom_layout as custom
# in layout.html
<ul id="navbar" class="nav pull-right">
{{='auth' in globals() and \
custom.navbar(auth.navbar(mode='bare')) or ''}}</ul>
"""
bare = {'prefix': prefix, 'user': user_identifier if self.user_id else None}
for i in items:
if i['name'] == T('Log In'):
k = 'login'
elif i['name'] == T('Sign Up'):
k = 'register'
elif i['name'] == T('Lost password?'):
k = 'request_reset_password'
elif i['name'] == T('Forgot username?'):
k = 'retrieve_username'
elif i['name'] == T('Log Out'):
k = 'logout'
elif i['name'] == T('Profile'):
k = 'profile'
elif i['name'] == T('Password'):
k = 'change_password'
bare[k] = i['href']
self.bar = bare
options = {'asmenu': menu,
'dropdown': bootstrap3,
'bare': bare
} # Define custom modes.
if mode in options and callable(options[mode]):
options[mode]()
else:
s1, s2, s3 = separators
if self.user_id:
self.bar = SPAN(prefix, user_identifier, s1,
Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
else:
self.bar = SPAN(s1, Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
for item in items[1:]:
self.bar.insert(-1, s2)
self.bar.insert(-1, Anr(item['name'], _href=item['href']))
return self.bar
def enable_record_versioning(self,
tables,
archive_db=None,
archive_names='%(tablename)s_archive',
current_record='current_record',
current_record_label=None):
"""
Used to enable full record versioning (including auth tables)::
auth = Auth(db)
auth.define_tables(signature=True)
# define our own tables
db.define_table('mything',Field('name'),auth.signature)
auth.enable_record_versioning(tables=db)
tables can be the db (all table) or a list of tables.
only tables with modified_by and modified_on fiels (as created
by auth.signature) will have versioning. Old record versions will be
in table 'mything_archive' automatically defined.
when you enable enable_record_versioning, records are never
deleted but marked with is_active=False.
enable_record_versioning enables a common_filter for
every table that filters out records with is_active = False
Note:
If you use auth.enable_record_versioning,
do not use auth.archive or you will end up with duplicates.
auth.archive does explicitly what enable_record_versioning
does automatically.
"""
current_record_label = current_record_label or current.T(
current_record.replace('_', ' ').title())
for table in tables:
fieldnames = table.fields()
if 'id' in fieldnames and 'modified_on' in fieldnames and current_record not in fieldnames:
table._enable_record_versioning(archive_db=archive_db,
archive_name=archive_names,
current_record=current_record,
current_record_label=current_record_label)
def define_tables(self, username=None, signature=None, enable_tokens=False,
migrate=None, fake_migrate=None):
"""
To be called unless tables are defined manually
Examples:
Use as::
# defines all needed tables and table files
# 'myprefix_auth_user.table', ...
auth.define_tables(migrate='myprefix_')
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = self.db
if migrate is None:
migrate = db._migrate
if fake_migrate is None:
fake_migrate = db._fake_migrate
settings = self.settings
settings.enable_tokens = enable_tokens
signature_list = \
super(Auth, self).define_tables(username, signature, migrate, fake_migrate)._table_signature_list
now = current.request.now
reference_table_user = 'reference %s' % settings.table_user_name
if settings.cas_domains:
if settings.table_cas_name not in db.tables:
db.define_table(
settings.table_cas_name,
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('created_on', 'datetime', default=now),
Field('service', requires=IS_URL()),
Field('ticket'),
Field('renew', 'boolean', default=False),
*settings.extra_fields.get(settings.table_cas_name, []),
**dict(
migrate=self._get_migrate(
settings.table_cas_name, migrate),
fake_migrate=fake_migrate))
if settings.enable_tokens:
extra_fields = settings.extra_fields.get(
settings.table_token_name, []) + signature_list
if settings.table_token_name not in db.tables:
db.define_table(
settings.table_token_name,
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('expires_on', 'datetime', default=datetime.datetime(2999, 12, 31)),
Field('token', writable=False, default=web2py_uuid, unique=True),
*extra_fields,
**dict(migrate=self._get_migrate(settings.table_token_name, migrate),
fake_migrate=fake_migrate))
if not db._lazy_tables:
settings.table_user = db[settings.table_user_name]
settings.table_group = db[settings.table_group_name]
settings.table_membership = db[settings.table_membership_name]
settings.table_permission = db[settings.table_permission_name]
settings.table_event = db[settings.table_event_name]
if settings.cas_domains:
settings.table_cas = db[settings.table_cas_name]
if settings.cas_provider: # THIS IS NOT LAZY
settings.actions_disabled = \
['profile', 'register', 'change_password',
'request_reset_password', 'retrieve_username']
from gluon.contrib.login_methods.cas_auth import CasAuth
maps = settings.cas_maps
if not maps:
table_user = self.table_user()
maps = dict((name, lambda v, n=name: v.get(n, None)) for name in
table_user.fields if name != 'id'
and table_user[name].readable)
maps['registration_id'] = \
lambda v, p=settings.cas_provider: '%s/%s' % (p, v['user'])
actions = [settings.cas_actions['login'],
settings.cas_actions['servicevalidate'],
settings.cas_actions['logout']]
settings.login_form = CasAuth(
casversion=2,
urlbase=settings.cas_provider,
actions=actions,
maps=maps)
return self
def get_or_create_user(self, keys, update_fields=['email'],
login=True, get=True):
"""
Used for alternate login methods:
If the user exists already then password is updated.
If the user doesn't yet exist, then they are created.
"""
table_user = self.table_user()
create_user = self.settings.cas_create_user
user = None
checks = []
# make a guess about who this user is
guess_fields = ['registration_id', 'username', 'email']
if self.settings.login_userfield:
guess_fields.append(self.settings.login_userfield)
for fieldname in guess_fields:
if fieldname in table_user.fields() and \
keys.get(fieldname, None):
checks.append(fieldname)
value = keys[fieldname]
user = table_user(**{fieldname: value})
if user:
break
if not checks:
return None
if 'registration_id' not in keys:
keys['registration_id'] = keys[checks[0]]
# if we think we found the user but registration_id does not match,
# make new user
if 'registration_id' in checks \
and user \
and user.registration_id \
and ('registration_id' not in keys or user.registration_id != str(keys['registration_id'])):
user = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if user:
if not get:
# added for register_bare to avoid overwriting users
return None
update_keys = dict(registration_id=keys['registration_id'])
for key in update_fields:
if key in keys:
update_keys[key] = keys[key]
user.update_record(**update_keys)
elif checks:
if create_user is False:
# Remove current open session a send message
self.logout(next=None, onlogout=None, log=None)
raise HTTP(403, "Forbidden. User need to be created first.")
if 'first_name' not in keys and 'first_name' in table_user.fields:
guess = keys.get('email', 'anonymous').split('@')[0]
keys['first_name'] = keys.get('username', guess)
vars = table_user._filter_fields(keys)
user_id = table_user.insert(**vars)
user = table_user[user_id]
if self.settings.create_user_groups:
group_id = self.add_group(self.settings.create_user_groups % user)
self.add_membership(group_id, user_id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, user_id)
if login:
self.user = user
if self.settings.register_onaccept:
callback(self.settings.register_onaccept, Storage(vars=user))
return user
def basic(self, basic_auth_realm=False):
"""
Performs basic login.
Args:
basic_auth_realm: optional basic http authentication realm. Can take
str or unicode or function or callable or boolean.
reads current.request.env.http_authorization
and returns basic_allowed,basic_accepted,user.
if basic_auth_realm is defined is a callable it's return value
is used to set the basic authentication realm, if it's a string
its content is used instead. Otherwise basic authentication realm
is set to the application name.
If basic_auth_realm is None or False (the default) the behavior
is to skip sending any challenge.
"""
if not self.settings.allow_basic_login:
return (False, False, False)
basic = current.request.env.http_authorization
if basic_auth_realm:
if callable(basic_auth_realm):
basic_auth_realm = basic_auth_realm()
elif isinstance(basic_auth_realm, string_types):
basic_realm = to_unicode(basic_auth_realm)
elif basic_auth_realm is True:
basic_realm = '' + current.request.application
http_401 = HTTP(401, 'Not Authorized', **{'WWW-Authenticate': 'Basic realm="' + basic_realm + '"'})
if not basic or not basic[:6].lower() == 'basic ':
if basic_auth_realm:
raise http_401
return (True, False, False)
(username, sep, password) = base64.b64decode(basic[6:]).partition(b':')
is_valid_user = sep and self.login_bare(username, password)
if not is_valid_user and basic_auth_realm:
raise http_401
return (True, True, is_valid_user)
def _get_login_settings(self):
table_user = self.table_user()
userfield = self.settings.login_userfield or ('username' \
if self.settings.login_userfield or 'username' \
in table_user.fields else 'email')
passfield = self.settings.password_field
return Storage({'table_user': table_user,
'userfield': userfield,
'passfield': passfield})
def login_bare(self, username, password):
"""
Logins user as specified by username (or email) and password
"""
settings = self._get_login_settings()
user = settings.table_user(**{settings.userfield: username})
if user and user.get(settings.passfield, False):
password = settings.table_user[
settings.passfield].validate(password)[0]
if ((user.registration_key is None or
not user.registration_key.strip()) and
password == user[settings.passfield]):
self.login_user(user)
return user
else:
# user not in database try other login methods
for login_method in self.settings.login_methods:
if login_method != self and login_method(username, password):
self.user = user
return user
return False
def register_bare(self, **fields):
"""
Registers a user as specified by username (or email)
and a raw password.
"""
settings = self._get_login_settings()
# users can register_bare even if no password is provided,
# in this case they will have to reset their password to login
if fields.get(settings.passfield):
fields[settings.passfield] = \
settings.table_user[settings.passfield].validate(fields[settings.passfield], None)[0]
if not fields.get(settings.userfield):
raise ValueError('register_bare: userfield not provided or invalid')
user = self.get_or_create_user(fields, login=False, get=False,
update_fields=self.settings.update_fields)
if not user:
# get or create did not create a user (it ignores duplicate records)
return False
return user
def cas_login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
version=2,
):
request = current.request
response = current.response
session = current.session
db, table = self.db, self.table_cas()
session._cas_service = request.vars.service or session._cas_service
if request.env.http_host not in self.settings.cas_domains or \
not session._cas_service:
raise HTTP(403, 'not authorized')
def allow_access(interactivelogin=False):
row = table(service=session._cas_service, user_id=self.user.id)
if row:
ticket = row.ticket
else:
ticket = 'ST-' + web2py_uuid()
table.insert(service=session._cas_service,
user_id=self.user.id,
ticket=ticket,
created_on=request.now,
renew=interactivelogin)
service = session._cas_service
query_sep = '&' if '?' in service else '?'
del session._cas_service
if 'warn' in request.vars and not interactivelogin:
response.headers[
'refresh'] = "5;URL=%s" % service + query_sep + "ticket=" + ticket
return A("Continue to %s" % service,
_href=service + query_sep + "ticket=" + ticket)
else:
redirect(service + query_sep + "ticket=" + ticket)
if self.is_logged_in() and 'renew' not in request.vars:
return allow_access()
elif not self.is_logged_in() and 'gateway' in request.vars:
redirect(session._cas_service)
def cas_onaccept(form, onaccept=onaccept):
if onaccept is not DEFAULT:
onaccept(form)
return allow_access(interactivelogin=True)
return self.login(next, onvalidation, cas_onaccept, log)
def cas_validate(self, version=2, proxy=False):
request = current.request
db, table = self.db, self.table_cas()
current.response.headers['Content-Type'] = 'text'
ticket = request.vars.ticket
renew = 'renew' in request.vars
row = table(ticket=ticket)
success = False
if row:
userfield = self.settings.login_userfield or 'username' \
if 'username' in table.fields else 'email'
# If ticket is a service Ticket and RENEW flag respected
if ticket[0:3] == 'ST-' and \
not ((row.renew and renew) ^ renew):
user = self.table_user()(row.user_id)
row.delete_record()
success = True
def build_response(body):
xml_body = to_native(TAG['cas:serviceResponse'](
body, **{'_xmlns:cas': 'http://www.yale.edu/tp/cas'}).xml())
return '<?xml version="1.0" encoding="UTF-8"?>\n' + xml_body
if success:
if version == 1:
message = 'yes\n%s' % user[userfield]
elif version == 3:
username = user.get('username', user[userfield])
message = build_response(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](username),
TAG['cas:attributes'](
*[TAG['cas:' + field.name](user[field.name])
for field in self.table_user()
if field.readable])))
else: # assume version 2
username = user.get('username', user[userfield])
message = build_response(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](username),
*[TAG['cas:' + field.name](user[field.name])
for field in self.table_user()
if field.readable]))
else:
if version == 1:
message = 'no\n'
elif row:
message = build_response(TAG['cas:authenticationFailure']())
else:
message = build_response(
TAG['cas:authenticationFailure'](
'Ticket %s not recognized' % ticket,
_code='INVALID TICKET'))
raise HTTP(200, message)
def _reset_two_factor_auth(self, session):
"""
When two-step authentication is enabled, this function is used to
clear the session after successfully completing second challenge
or when the maximum number of tries allowed has expired.
"""
session.auth_two_factor_user = None
session.auth_two_factor = None
session.auth_two_factor_enabled = False
# Set the number of attempts. It should be more than 1.
session.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
def when_is_logged_in_bypass_next_in_url(self, next, session):
"""
This function should be use when someone want to avoid asking for user
credentials when loaded page contains "user/login?_next=NEXT_COMPONENT"
in the URL is refresh but user is already authenticated.
"""
if self.is_logged_in():
if next == session._auth_next:
del session._auth_next
redirect(next, client_side=self.settings.client_side)
def login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a login form
"""
settings = self.settings
request = current.request
response = current.response
session = current.session
# use session for federated login
snext = self.get_vars_next()
if snext:
session._auth_next = snext
elif session._auth_next:
snext = session._auth_next
# pass
if next is DEFAULT:
# important for security
next = settings.login_next
if callable(next):
next = next()
user_next = snext
if user_next:
external = user_next.split('://')
if external[0].lower() in ['http', 'https', 'ftp']:
host_next = user_next.split('//', 1)[-1].split('/')[0]
if host_next in settings.cas_domains:
next = user_next
else:
next = user_next
# Avoid asking unnecessary user credentials when user is logged in
self.when_is_logged_in_bypass_next_in_url(next=next, session=session)
# Moved here to avoid unnecessary execution in case of redirection to next in case of logged in user
table_user = self.table_user()
if 'username' in table_user.fields or \
not settings.login_email_validate:
tmpvalidator = IS_NOT_EMPTY(error_message=self.messages.is_empty)
if not settings.username_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
else:
tmpvalidator = IS_EMAIL(error_message=self.messages.invalid_email)
if not settings.email_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
passfield = settings.password_field
try:
table_user[passfield].requires[-1].min_length = 0
except:
pass
if onvalidation is DEFAULT:
onvalidation = settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = settings.login_onaccept
if log is DEFAULT:
log = self.messages['login_log']
onfail = settings.login_onfail
user = None # default
# Setup the default field used for the form
multi_login = False
if self.settings.login_userfield:
username = self.settings.login_userfield
else:
if 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
if self.settings.multi_login:
multi_login = True
old_requires = table_user[username].requires
table_user[username].requires = tmpvalidator
# If two-factor authentication is enabled, and the maximum
# number of tries allowed is used up, reset the session to
# pre-login state with two-factor auth
if session.auth_two_factor_enabled and session.auth_two_factor_tries_left < 1:
# Exceeded maximum allowed tries for this code. Require user to enter
# username and password again.
user = None
accepted_form = False
self._reset_two_factor_auth(session)
# Redirect to the default 'next' page without logging
# in. If that page requires login, user will be redirected
# back to the main login form
redirect(next, client_side=settings.client_side)
# Before showing the default login form, check whether
# we are already on the second step of two-step authentication.
# If we are, then skip this login form and use the form for the
# second challenge instead.
# Note to devs: The code inside the if-block is unchanged from the
# previous version of this file, other than for indentation inside
# to put it inside the if-block
if session.auth_two_factor_user is None:
if settings.remember_me_form:
extra_fields = [
Field('remember_me', 'boolean', default=False,
label=self.messages.label_remember_me)]
else:
extra_fields = []
# do we use our own login form, or from a central source?
if settings.login_form == self:
form = SQLFORM(table_user,
fields=[username, passfield],
hidden=dict(_next=next),
showid=settings.showid,
submit_button=self.messages.login_button,
delete_label=self.messages.delete_label,
formstyle=settings.formstyle,
separator=settings.label_separator,
extra_fields=extra_fields,
)
captcha = settings.login_captcha or \
(settings.login_captcha is not False and settings.captcha)
if captcha:
addrow(form, captcha.label, captcha, captcha.comment,
settings.formstyle, 'captcha__row')
accepted_form = False
specific_error = self.messages.invalid_user
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
# check for username in db
entered_username = form.vars[username]
if multi_login and '@' in entered_username:
# if '@' in username check for email, not username
user = table_user(email=entered_username)
else:
user = table_user(**{username: entered_username})
if user:
# user in db, check if registration pending or disabled
specific_error = self.messages.invalid_password
temp_user = user
if (temp_user.registration_key or '').startswith('pending'):
response.flash = self.messages.registration_pending
return form
elif temp_user.registration_key in ('disabled', 'blocked'):
response.flash = self.messages.login_disabled
return form
elif (temp_user.registration_key is not None and temp_user.registration_key.strip()):
response.flash = \
self.messages.registration_verifying
return form
# try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if self not in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
# alternates have failed, maybe because service inaccessible
if settings.login_methods[0] == self:
# try logging in locally using cached credentials
if form.vars.get(passfield, '') == temp_user[passfield]:
# success
user = temp_user
else:
# user not in db
if not settings.alternate_requires_registration:
# we're allowed to auto-register users from external systems
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if self not in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
self.log_event(self.messages['login_failed_log'],
request.post_vars)
# invalid login
session.flash = specific_error if self.settings.login_specify_error else self.messages.invalid_login
callback(onfail, None)
redirect(self.url(args=request.args, vars=request.get_vars),client_side=settings.client_side)
else: # use a central authentication server
cas = settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(
table_user._filter_fields(cas_user),
settings.update_fields)
elif hasattr(cas, 'login_form'):
return cas.login_form()
else:
# we need to pass through login again before going on
next = self.url(settings.function, args='login')
redirect(cas.login_url(next),
client_side=settings.client_side)
# Extra login logic for two-factor authentication
#################################################
# If the 'user' variable has a value, this means that the first
# authentication step was successful (i.e. user provided correct
# username and password at the first challenge).
# Check if this user is signed up for two-factor authentication
# If auth.settings.auth_two_factor_enabled it will enable two factor
# for all the app. Another way to anble two factor is that the user
# must be part of a group that is called auth.settings.two_factor_authentication_group
if user and self.settings.auth_two_factor_enabled is True:
session.auth_two_factor_enabled = True
elif user and self.settings.two_factor_authentication_group:
role = self.settings.two_factor_authentication_group
session.auth_two_factor_enabled = self.has_membership(user_id=user.id, role=role)
# challenge
if session.auth_two_factor_enabled:
form = SQLFORM.factory(
Field('authentication_code',
label=self.messages.label_two_factor,
required=True,
comment=self.messages.two_factor_comment),
hidden=dict(_next=next),
formstyle=settings.formstyle,
separator=settings.label_separator
)
# accepted_form is used by some default web2py code later in the
# function that handles running specified functions before redirect
# Set it to False until the challenge form is accepted.
accepted_form = False
# Handle the case when a user has submitted the login/password
# form successfully, and the password has been validated, but
# the two-factor form has not been displayed or validated yet.
if session.auth_two_factor_user is None and user is not None:
session.auth_two_factor_user = user # store the validated user and associate with this session
session.auth_two_factor = random.randint(100000, 999999)
session.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
# Set the way we generate the code or we send the code. For example using SMS...
two_factor_methods = self.settings.two_factor_methods
if not two_factor_methods:
# TODO: Add some error checking to handle cases where email cannot be sent
self.settings.mailer.send(
to=user.email,
subject=self.messages.retrieve_two_factor_code_subject,
message=self.messages.retrieve_two_factor_code.format(session.auth_two_factor))
else:
# Check for all method. It is possible to have multiples
for two_factor_method in two_factor_methods:
try:
# By default we use session.auth_two_factor generated before.
session.auth_two_factor = two_factor_method(user, session.auth_two_factor)
except:
pass
else:
break
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
"""
The lists is executed after form validation for each of the corresponding action.
For example, in your model:
In your models copy and paste:
# Before define tables, we add some extra field to auth_user
auth.settings.extra_fields['auth_user'] = [
Field('motp_secret', 'password', length=512, default='', label='MOTP Secret'),
Field('motp_pin', 'string', length=128, default='', label='MOTP PIN')]
OFFSET = 60 # Be sure is the same in your OTP Client
# Set session.auth_two_factor to None. Because the code is generated by external app.
# This will avoid to use the default setting and send a code by email.
def _set_two_factor(user, auth_two_factor):
return None
def verify_otp(user, otp):
import time
from hashlib import md5
epoch_time = int(time.time())
time_start = int(str(epoch_time - OFFSET)[:-1])
time_end = int(str(epoch_time + OFFSET)[:-1])
for t in range(time_start - 1, time_end + 1):
to_hash = str(t) + user.motp_secret + user.motp_pin
hash = md5(to_hash).hexdigest()[:6]
if otp == hash:
return hash
auth.settings.auth_two_factor_enabled = True
auth.messages.two_factor_comment = "Verify your OTP Client for the code."
auth.settings.two_factor_methods = [lambda user,
auth_two_factor: _set_two_factor(user, auth_two_factor)]
auth.settings.two_factor_onvalidation = [lambda user, otp: verify_otp(user, otp)]
"""
if self.settings.two_factor_onvalidation:
for two_factor_onvalidation in self.settings.two_factor_onvalidation:
try:
session.auth_two_factor = \
two_factor_onvalidation(session.auth_two_factor_user, form.vars['authentication_code'])
except:
pass
else:
break
if form.vars['authentication_code'] == str(session.auth_two_factor):
# Handle the case when the two-factor form has been successfully validated
# and the user was previously stored (the current user should be None because
# in this case, the previous username/password login form should not be displayed.
# This will allow the code after the 2-factor authentication block to proceed as
# normal.
if user is None or user == session.auth_two_factor_user:
user = session.auth_two_factor_user
# For security, because the username stored in the
# session somehow does not match the just validated
# user. Should not be possible without session stealing
# which is hard with SSL.
elif user != session.auth_two_factor_user:
user = None
# Either way, the user and code associated with this session should
# be removed. This handles cases where the session login may have
# expired but browser window is open, so the old session key and
# session usernamem will still exist
self._reset_two_factor_auth(session)
else:
session.auth_two_factor_tries_left -= 1
# If the number of retries are higher than auth_two_factor_tries_left
# Require user to enter username and password again.
if session.auth_two_factor_enabled and session.auth_two_factor_tries_left < 1:
# Exceeded maximum allowed tries for this code. Require user to enter
# username and password again.
user = None
accepted_form = False
self._reset_two_factor_auth(session)
# Redirect to the default 'next' page without logging
# in. If that page requires login, user will be redirected
# back to the main login form
redirect(next, client_side=settings.client_side)
response.flash = self.messages.invalid_two_factor_code.format(session.auth_two_factor_tries_left)
return form
else:
return form
# End login logic for two-factor authentication
# process authenticated users
if user:
user = Row(table_user._filter_fields(user, id=True))
# process authenticated users
# user wants to be logged in for longer
self.login_user(user)
session.auth.expiration = \
request.post_vars.remember_me and \
settings.long_expiration or \
settings.expiration
session.auth.remember_me = 'remember_me' in request.post_vars
self.log_event(log, user)
session.flash = self.messages.logged_in
# how to continue
if settings.login_form == self:
if accepted_form:
callback(onaccept, form)
if next == session._auth_next:
session._auth_next = None
next = replace_id(next, form)
redirect(next, client_side=settings.client_side)
table_user[username].requires = old_requires
return form
elif user:
callback(onaccept, None)
if next == session._auth_next:
del session._auth_next
redirect(next, client_side=settings.client_side)
def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT):
"""
Logouts and redirects to login
"""
# Clear out 2-step authentication information if user logs
# out. This information is also cleared on successful login.
self._reset_two_factor_auth(current.session)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.logout_next
if onlogout is DEFAULT:
onlogout = self.settings.logout_onlogout
if onlogout:
onlogout(self.user)
if log is DEFAULT:
log = self.messages['logout_log']
if self.user:
self.log_event(log, self.user)
if self.settings.login_form != self:
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
next = cas.logout_url(next)
current.session.auth = None
self.user = None
if self.settings.renew_session_onlogout:
current.session.renew(clear_session=not self.settings.keep_session_onlogout)
current.session.flash = self.messages.logged_out
if next is not None:
redirect(next)
def logout_bare(self):
self.logout(next=None, onlogout=None, log=None)
def register(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a registration form
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if self.is_logged_in():
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.register_next
if onvalidation is DEFAULT:
onvalidation = self.settings.register_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.register_onaccept
if log is DEFAULT:
log = self.messages['register_log']
table_user = self.table_user()
if self.settings.login_userfield:
username = self.settings.login_userfield
elif 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
# Ensure the username field is unique.
unique_validator = IS_NOT_IN_DB(self.db, table_user[username])
if not table_user[username].requires:
table_user[username].requires = unique_validator
elif isinstance(table_user[username].requires, (list, tuple)):
if not any([isinstance(validator, IS_NOT_IN_DB) for validator in
table_user[username].requires]):
if isinstance(table_user[username].requires, list):
table_user[username].requires.append(unique_validator)
else:
table_user[username].requires += (unique_validator, )
elif not isinstance(table_user[username].requires, IS_NOT_IN_DB):
table_user[username].requires = [table_user[username].requires,
unique_validator]
passfield = self.settings.password_field
formstyle = self.settings.formstyle
try: # Make sure we have our original minimum length as other auth forms change it
table_user[passfield].requires[-1].min_length = self.settings.password_min_length
except:
pass
if self.settings.register_verify_password:
if self.settings.register_fields is None:
self.settings.register_fields = [f.name for f in table_user if f.writable and not f.compute]
k = self.settings.register_fields.index(passfield)
self.settings.register_fields.insert(k + 1, "password_two")
extra_fields = [
Field("password_two", "password",
requires=IS_EQUAL_TO(request.post_vars.get(passfield, None),
error_message=self.messages.mismatched_password),
label=current.T("Confirm Password"))]
else:
extra_fields = []
form = SQLFORM(table_user,
fields=self.settings.register_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.register_button,
delete_label=self.messages.delete_label,
formstyle=formstyle,
separator=self.settings.label_separator,
extra_fields=extra_fields
)
captcha = self.settings.register_captcha or self.settings.captcha
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
# Add a message if specified
if self.settings.pre_registration_div:
addrow(form, '',
DIV(_id="pre-reg", *self.settings.pre_registration_div),
'', formstyle, '')
key = web2py_uuid()
if self.settings.registration_requires_approval:
key = 'pending-' + key
table_user.registration_key.default = key
if form.accepts(request, session if self.csrf_prevention else None,
formname='register',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
description = self.messages.group_description % form.vars
if self.settings.create_user_groups:
group_id = self.add_group(self.settings.create_user_groups % form.vars, description)
self.add_membership(group_id, form.vars.id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, form.vars.id)
if self.settings.registration_requires_verification:
link = self.url(
self.settings.function, args=('verify_email', key), scheme=True)
d = dict(form.vars)
d.update(dict(key=key, link=link, username=form.vars[username],
firstname=form.vars['firstname'],
lastname=form.vars['lastname']))
if not (self.settings.mailer and self.settings.mailer.send(
to=form.vars.email,
subject=self.messages.verify_email_subject,
message=self.messages.verify_email % d)):
self.db.rollback()
response.flash = self.messages.unable_send_email
return form
session.flash = self.messages.email_sent
if self.settings.registration_requires_approval and \
not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='pending')
session.flash = self.messages.registration_pending
elif (not self.settings.registration_requires_verification or self.settings.login_after_registration):
if not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='')
session.flash = self.messages.registration_successful
user = table_user(**{username: form.vars[username]})
self.login_user(user)
session.flash = self.messages.logged_in
self.log_event(log, form.vars)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def verify_email(self,
next=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Action used to verify the registration email
"""
key = getarg(-1)
table_user = self.table_user()
user = table_user(registration_key=key)
if not user:
redirect(self.settings.login_url)
if self.settings.registration_requires_approval:
user.update_record(registration_key='pending')
current.session.flash = self.messages.registration_pending
else:
user.update_record(registration_key='')
current.session.flash = self.messages.email_verified
# make sure session has same user.registrato_key as db record
if current.session.auth and current.session.auth.user:
current.session.auth.user.registration_key = user.registration_key
if log is DEFAULT:
log = self.messages['verify_email_log']
if next is DEFAULT:
next = self.settings.verify_email_next
if onaccept is DEFAULT:
onaccept = self.settings.verify_email_onaccept
self.log_event(log, user)
callback(onaccept, user)
redirect(next)
def retrieve_username(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to retrieve the user username
(only if there is a username field)
"""
table_user = self.table_user()
if 'username' not in table_user.fields:
raise HTTP(404)
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_username_captcha or \
(self.settings.retrieve_username_captcha is not False and self.settings.captcha)
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_username_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_username_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_username_onaccept
if log is DEFAULT:
log = self.messages['retrieve_username_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_username', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
users = table_user._db(table_user.email == form.vars.email).select()
if not users:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
username = ', '.join(u.username for u in users)
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_username_subject,
message=self.messages.retrieve_username % dict(username=username))
session.flash = self.messages.email_sent
for user in users:
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def random_password(self):
import string
import random
password = ''
specials = r'!#$*'
for i in range(0, 3):
password += random.choice(string.ascii_lowercase)
password += random.choice(string.ascii_uppercase)
password += random.choice(string.digits)
password += random.choice(specials)
return ''.join(random.sample(password, len(password)))
def reset_password_deprecated(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password (deprecated)
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_password_onaccept
if log is DEFAULT:
log = self.messages['retrieve_password_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_password', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
user = table_user(email=form.vars.email)
if not user:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
key = user.registration_key
if key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
current.session.flash = \
self.messages.registration_pending
redirect(self.url(args=request.args))
password = self.random_password()
passfield = self.settings.password_field
d = {
passfield: str(table_user[passfield].validate(password)[0]),
'registration_key': ''
}
user.update_record(**d)
if self.settings.mailer and \
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_password_subject,
message=self.messages.retrieve_password % dict(password=password)):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def confirm_registration(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to confirm user registration
"""
table_user = self.table_user()
request = current.request
# response = current.response
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
key = request.vars.key
if not key and len(request.args) > 1:
key = request.args[-1]
if key:
session._reset_password_key = key
if next:
redirect_vars = {'_next': next}
else:
redirect_vars = {}
redirect(self.url(args='confirm_registration',
vars=redirect_vars))
else:
key = session._reset_password_key
else:
key = request.vars.key or getarg(-1)
try:
t0 = int(key.split('-')[0])
if time.time() - t0 > 60 * 60 * 24:
raise Exception
user = table_user(reset_password_key=key)
if not user:
raise Exception
except Exception as e:
session.flash = self.messages.invalid_reset_password
redirect(next, client_side=self.settings.client_side)
passfield = self.settings.password_field
form = SQLFORM.factory(
Field('first_name',
label='First Name',
required=True),
Field('last_name',
label='Last Name',
required=True),
Field('new_password', 'password',
label=self.messages.new_password,
requires=self.table_user()[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button='Confirm Registration',
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.process().accepted:
user.update_record(
**{passfield: str(form.vars.new_password),
'first_name': str(form.vars.first_name),
'last_name': str(form.vars.last_name),
'registration_key': '',
'reset_password_key': ''})
session.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(user)
redirect(next, client_side=self.settings.client_side)
return form
def email_registration(self, subject, body, user):
"""
Sends and email invitation to a user informing they have been registered with the application
"""
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
link = self.url(self.settings.function,
args=('confirm_registration',), vars={'key': reset_password_key},
scheme=True)
d = dict(user)
d.update(dict(key=reset_password_key, link=link, site=current.request.env.http_host))
if self.settings.mailer and self.settings.mailer.send(
to=user.email,
subject=subject % d,
message=body % d):
user.update_record(reset_password_key=reset_password_key)
return True
return False
def bulk_register(self, max_emails=100):
"""
Creates a form for ther user to send invites to other users to join
"""
if not self.user:
redirect(self.settings.login_url)
if not self.settings.bulk_register_enabled:
return HTTP(404)
form = SQLFORM.factory(
Field('subject', 'string', default=self.messages.bulk_invite_subject, requires=IS_NOT_EMPTY()),
Field('emails', 'text', requires=IS_NOT_EMPTY()),
Field('message', 'text', default=self.messages.bulk_invite_body, requires=IS_NOT_EMPTY()),
formstyle=self.settings.formstyle)
if form.process().accepted:
emails = re.compile('[^\s\'"@<>,;:]+\@[^\s\'"@<>,;:]+').findall(form.vars.emails)
# send the invitations
emails_sent = []
emails_fail = []
emails_exist = []
for email in emails[:max_emails]:
if self.table_user()(email=email):
emails_exist.append(email)
else:
user = self.register_bare(email=email)
if self.email_registration(form.vars.subject, form.vars.message, user):
emails_sent.append(email)
else:
emails_fail.append(email)
emails_fail += emails[max_emails:]
form = DIV(H4('Emails sent'), UL(*[A(x, _href='mailto:' + x) for x in emails_sent]),
H4('Emails failed'), UL(*[A(x, _href='mailto:' + x) for x in emails_fail]),
H4('Emails existing'), UL(*[A(x, _href='mailto:' + x) for x in emails_exist]))
return form
def manage_tokens(self):
if not self.user:
redirect(self.settings.login_url)
table_token = self.table_token()
table_token.user_id.writable = False
table_token.user_id.default = self.user.id
table_token.token.writable = False
if current.request.args(1) == 'new':
table_token.token.readable = False
form = SQLFORM.grid(table_token, args=['manage_tokens'])
return form
def reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
# response = current.response
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
key = request.vars.key
if key:
session._reset_password_key = key
redirect(self.url(args='reset_password'))
else:
key = session._reset_password_key
else:
key = request.vars.key
try:
t0 = int(key.split('-')[0])
if time.time() - t0 > 60 * 60 * 24:
raise Exception
user = table_user(reset_password_key=key)
if not user:
raise Exception
except Exception:
session.flash = self.messages.invalid_reset_password
redirect(next, client_side=self.settings.client_side)
key = user.registration_key
if key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
session.flash = self.messages.registration_pending
redirect(next, client_side=self.settings.client_side)
if onvalidation is DEFAULT:
onvalidation = self.settings.reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.reset_password_onaccept
passfield = self.settings.password_field
form = SQLFORM.factory(
Field('new_password', 'password',
label=self.messages.new_password,
requires=self.table_user()[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_reset_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session, onvalidation=onvalidation,
hideerror=self.settings.hideerror):
user.update_record(
**{passfield: str(form.vars.new_password),
'registration_key': '',
'reset_password_key': ''})
session.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(user)
callback(onaccept, form)
redirect(next, client_side=self.settings.client_side)
return form
def request_reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_password_captcha or \
(self.settings.retrieve_password_captcha is not False and self.settings.captcha)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if onvalidation is DEFAULT:
onvalidation = self.settings.request_reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.request_reset_password_onaccept
if log is DEFAULT:
log = self.messages['reset_password_log']
userfield = self.settings.login_userfield or 'username' \
if self.settings.login_userfield or 'username' \
in table_user.fields else 'email'
if userfield == 'email':
table_user.email.requires = [
IS_EMAIL(error_message=self.messages.invalid_email),
IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
if not self.settings.email_case_sensitive:
table_user.email.requires.insert(0, IS_LOWER())
elif userfield == 'username':
table_user.username.requires = [
IS_IN_DB(self.db, table_user.username,
error_message=self.messages.invalid_username)]
if not self.settings.username_case_sensitive:
table_user.username.requires.insert(0, IS_LOWER())
form = SQLFORM(table_user,
fields=[userfield],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.password_reset_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='reset_password', dbio=False,
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
user = table_user(**{userfield: form.vars.get(userfield)})
key = user.registration_key
if not user:
session.flash = self.messages['invalid_%s' % userfield]
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
elif key in ('pending', 'disabled', 'blocked') or (key or '').startswith('pending'):
session.flash = self.messages.registration_pending
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
if self.email_reset_password(user):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
# old_requires = table_user.email.requires
return form
def email_reset_password(self, user):
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
link = self.url(self.settings.function,
args=('reset_password',), vars={'key': reset_password_key},
scheme=True)
d = dict(user)
d.update(dict(key=reset_password_key, link=link))
if self.settings.mailer and self.settings.mailer.send(
to=user.email,
subject=self.messages.reset_password_subject,
message=self.messages.reset_password % d):
user.update_record(reset_password_key=reset_password_key)
return True
return False
def retrieve_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
if self.settings.reset_password_requires_verification:
return self.request_reset_password(next, onvalidation, onaccept, log)
else:
return self.reset_password_deprecated(next, onvalidation, onaccept, log)
def change_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change password
"""
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
# Go to external link to change the password
if self.settings.login_form != self:
cas = self.settings.login_form
# To prevent error if change_password_url function is not defined in alternate login
if hasattr(cas, 'change_password_url'):
next = cas.change_password_url(next)
if next is not None:
redirect(next)
db = self.db
table_user = self.table_user()
s = db(table_user.id == self.user.id)
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.change_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.change_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.change_password_onaccept
if log is DEFAULT:
log = self.messages['change_password_log']
passfield = self.settings.password_field
requires = table_user[passfield].requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
requires = [t for t in requires if isinstance(t, CRYPT)]
if requires:
requires[0] = CRYPT(**requires[0].__dict__) # Copy the existing CRYPT attributes
requires[0].min_length = 0 # But do not enforce minimum length for the old password
form = SQLFORM.factory(
Field('old_password', 'password', requires=requires,
label=self.messages.old_password),
Field('new_password', 'password',
label=self.messages.new_password,
requires=table_user[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR('value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_change_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session,
formname='change_password',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
current_user = s.select(limitby=(0, 1), orderby_on_limitby=False).first()
if not form.vars['old_password'] == current_user[passfield]:
form.errors['old_password'] = self.messages.invalid_password
else:
d = {passfield: str(form.vars.new_password)}
s.update(**d)
session.flash = self.messages.password_changed
self.log_event(log, self.user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def profile(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change his/her profile
"""
table_user = self.table_user()
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
passfield = self.settings.password_field
table_user[passfield].writable = False
table_user['email'].writable = False
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.profile_next
if onvalidation is DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.profile_onaccept
if log is DEFAULT:
log = self.messages['profile_log']
form = SQLFORM(
table_user,
self.user.id,
fields=self.settings.profile_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.profile_save_button,
delete_label=self.messages.delete_label,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
deletable=self.settings.allow_delete_accounts,
)
if form.accepts(request, session,
formname='profile',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
extra_fields = self.settings.extra_fields.get(self.settings.table_user_name, [])
if not form.deleted:
if any(f.compute for f in extra_fields):
user = table_user[self.user.id]
self._update_session_user(user)
self.update_groups()
else:
self.user.update(table_user._filter_fields(form.vars))
session.flash = self.messages.profile_updated
self.log_event(log, self.user)
callback(onaccept, form)
if form.deleted:
return self.logout()
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def run_login_onaccept(self):
onaccept = self.settings.login_onaccept
if onaccept:
form = Storage(dict(vars=self.user))
if not isinstance(onaccept, (list, tuple)):
onaccept = [onaccept]
for callback in onaccept:
callback(form)
def jwt(self):
"""
To use JWT authentication:
1) instantiate auth with::
auth = Auth(db, jwt = {'secret_key':'secret'})
where 'secret' is your own secret string.
2) Decorate functions that require login but should accept the JWT token credentials::
@auth.allows_jwt()
@auth.requires_login()
def myapi(): return 'hello %s' % auth.user.email
Notice jwt is allowed but not required. if user is logged in, myapi is accessible.
3) Use it!
Now API users can obtain a token with
http://.../app/default/user/jwt?username=...&password=....
(returns json object with a token attribute)
API users can refresh an existing token with
http://.../app/default/user/jwt?token=...
they can authenticate themselves when calling http:/.../myapi by injecting a header
Authorization: Bearer <the jwt token>
Any additional attributes in the jwt argument of Auth() below::
auth = Auth(db, jwt = {...})
are passed to the constructor of class AuthJWT. Look there for documentation.
"""
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
rtn = self.jwt_handler.jwt_token_manager()
raise HTTP(200, rtn, cookies=None, **current.response.headers)
def is_impersonating(self):
return self.is_logged_in() and 'impersonator' in current.session.auth
def impersonate(self, user_id=DEFAULT):
"""
To use this make a POST to
`http://..../impersonate request.post_vars.user_id=<id>`
Set request.post_vars.user_id to 0 to restore original user.
requires impersonator is logged in and::
has_permission('impersonate', 'auth_user', user_id)
"""
request = current.request
session = current.session
auth = session.auth
table_user = self.table_user()
if not self.is_logged_in():
raise HTTP(401, "Not Authorized")
current_id = auth.user.id
requested_id = user_id
user = None
if user_id is DEFAULT:
user_id = current.request.post_vars.user_id
if user_id and user_id != self.user.id and user_id != '0':
if not self.has_permission('impersonate',
self.table_user(),
user_id):
raise HTTP(403, "Forbidden")
user = table_user(user_id)
if not user:
raise HTTP(401, "Not Authorized")
auth.impersonator = pickle.dumps(session, pickle.HIGHEST_PROTOCOL)
auth.user.update(
table_user._filter_fields(user, True))
self.user = auth.user
self.update_groups()
log = self.messages['impersonate_log']
self.log_event(log, dict(id=current_id, other_id=auth.user.id))
self.run_login_onaccept()
elif user_id in (0, '0'):
if self.is_impersonating():
session.clear()
session.update(pickle.loads(auth.impersonator))
self.user = session.auth.user
self.update_groups()
self.run_login_onaccept()
return None
if requested_id is DEFAULT and not request.post_vars:
return SQLFORM.factory(Field('user_id', 'integer'))
elif not user:
return None
else:
return SQLFORM(table_user, user.id, readonly=True)
def groups(self):
"""
Displays the groups and their roles for the logged in user
"""
if not self.is_logged_in():
redirect(self.settings.login_url)
table_membership = self.table_membership()
memberships = self.db(
table_membership.user_id == self.user.id).select()
table = TABLE()
for membership in memberships:
table_group = self.table_group()
groups = self.db(table_group.id == membership.group_id).select()
if groups:
group = groups[0]
table.append(TR(H3(group.role, '(%s)' % group.id)))
table.append(TR(P(group.description)))
if not memberships:
return None
return table
def not_authorized(self):
"""
You can change the view for this page to make it look as you like
"""
if current.request.ajax:
raise HTTP(403, 'ACCESS DENIED')
return self.messages.access_denied
def allows_jwt(self, otherwise=None):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
return self.jwt_handler.allows_jwt(otherwise=otherwise)
def requires(self, condition, requires_login=True, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
def decorator(action):
def f(*a, **b):
basic_allowed, basic_accepted, user = self.basic()
user = user or self.user
login_required = requires_login
if callable(login_required):
login_required = login_required()
if login_required:
if not user:
if current.request.ajax:
raise HTTP(401, self.messages.ajax_failed_authentication)
elif otherwise is not None:
if callable(otherwise):
return otherwise()
redirect(otherwise)
elif self.settings.allow_basic_login_only or \
basic_accepted or current.request.is_restful:
raise HTTP(403, "Not authorized")
else:
next = self.here()
current.session.flash = current.response.flash
return call_or_redirect(self.settings.on_failed_authentication,
self.settings.login_url + '?_next=' + urllib_quote(next))
if callable(condition):
flag = condition()
else:
flag = condition
if not flag:
current.session.flash = self.messages.access_denied
return call_or_redirect(
self.settings.on_failed_authorization)
return action(*a, **b)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
def requires_login(self, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
return self.requires(True, otherwise=otherwise)
def requires_login_or_token(self, otherwise=None):
if self.settings.enable_tokens is True:
user = None
request = current.request
token = request.env.http_web2py_user_token or request.vars._token
table_token = self.table_token()
table_user = self.table_user()
from gluon.settings import global_settings
if global_settings.web2py_runtime_gae:
row = table_token(token=token)
if row:
user = table_user(row.user_id)
else:
row = self.db(table_token.token == token)(table_user.id == table_token.user_id).select().first()
if row:
user = row[table_user._tablename]
if user:
self.login_user(user)
return self.requires(True, otherwise=otherwise)
def requires_membership(self, role=None, group_id=None, otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def has_membership(self=self, group_id=group_id, role=role):
return self.has_membership(group_id=group_id, role=role)
return self.requires(has_membership, otherwise=otherwise)
def requires_permission(self, name, table_name='', record_id=0,
otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of any group (role) that
has 'name' access to 'table_name', 'record_id'.
"""
def has_permission(self=self, name=name, table_name=table_name, record_id=record_id):
return self.has_permission(name, table_name, record_id)
return self.requires(has_permission, otherwise=otherwise)
def requires_signature(self, otherwise=None, hash_vars=True, hash_extension=True):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def verify():
return URL.verify(current.request, user_signature=True, hash_vars=hash_vars, hash_extension=True)
return self.requires(verify, otherwise)
def accessible_query(self, name, table, user_id=None):
"""
Returns a query with all accessible records for user_id or
the current logged in user
this method does not work on GAE because uses JOIN and IN
Example:
Use as::
db(auth.accessible_query('read', db.mytable)).select(db.mytable.ALL)
"""
if not user_id:
user_id = self.user_id
db = self.db
if isinstance(table, str) and table in self.db.tables():
table = self.db[table]
elif isinstance(table, (Set, Query)):
# experimental: build a chained query for all tables
if isinstance(table, Set):
cquery = table.query
else:
cquery = table
tablenames = db._adapter.tables(cquery)
for tablename in tablenames:
cquery &= self.accessible_query(name, tablename, user_id=user_id)
return cquery
if not isinstance(table, str) and \
self.has_permission(name, table, 0, user_id):
return table.id > 0
membership = self.table_membership()
permission = self.table_permission()
query = table.id.belongs(
db(membership.user_id == user_id)
(membership.group_id == permission.group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
if self.settings.everybody_group_id:
query |= table.id.belongs(
db(permission.group_id == self.settings.everybody_group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
return query
@staticmethod
def archive(form,
archive_table=None,
current_record='current_record',
archive_current=False,
fields=None):
"""
If you have a table (db.mytable) that needs full revision history you
can just do::
form = crud.update(db.mytable, myrecord, onaccept=auth.archive)
or::
form = SQLFORM(db.mytable, myrecord).process(onaccept=auth.archive)
crud.archive will define a new table "mytable_archive" and store
a copy of the current record (if archive_current=True)
or a copy of the previous record (if archive_current=False)
in the newly created table including a reference
to the current record.
fields allows to specify extra fields that need to be archived.
If you want to access such table you need to define it yourself
in a model::
db.define_table('mytable_archive',
Field('current_record', db.mytable),
db.mytable)
Notice such table includes all fields of db.mytable plus one: current_record.
crud.archive does not timestamp the stored record unless your original table
has a fields like::
db.define_table(...,
Field('saved_on', 'datetime',
default=request.now, update=request.now, writable=False),
Field('saved_by', auth.user,
default=auth.user_id, update=auth.user_id, writable=False),
there is nothing special about these fields since they are filled before
the record is archived.
If you want to change the archive table name and the name of the reference field
you can do, for example::
db.define_table('myhistory',
Field('parent_record', db.mytable), db.mytable)
and use it as::
form = crud.update(db.mytable, myrecord,
onaccept=lambda form:crud.archive(form,
archive_table=db.myhistory,
current_record='parent_record'))
"""
if not archive_current and not form.record:
return None
table = form.table
if not archive_table:
archive_table_name = '%s_archive' % table
if archive_table_name not in table._db:
table._db.define_table(
archive_table_name,
Field(current_record, table),
*[field.clone(unique=False) for field in table])
archive_table = table._db[archive_table_name]
new_record = {current_record: form.vars.id}
for fieldname in archive_table.fields:
if fieldname not in ['id', current_record]:
if archive_current and fieldname in form.vars:
new_record[fieldname] = form.vars[fieldname]
elif form.record and fieldname in form.record:
new_record[fieldname] = form.record[fieldname]
if fields:
new_record.update(fields)
id = archive_table.insert(**new_record)
return id
def wiki(self,
slug=None,
env=None,
render='markmin',
manage_permissions=False,
force_prefix='',
restrict_search=False,
resolve=True,
extra=None,
menu_groups=None,
templates=None,
migrate=True,
controller=None,
function=None,
force_render=False,
groups=None):
if controller and function:
resolve = False
if not hasattr(self, '_wiki'):
self._wiki = Wiki(self, render=render,
manage_permissions=manage_permissions,
force_prefix=force_prefix,
restrict_search=restrict_search,
env=env, extra=extra or {},
menu_groups=menu_groups,
templates=templates,
migrate=migrate,
controller=controller,
function=function,
groups=groups)
else:
self._wiki.settings.extra = extra or {}
self._wiki.env.update(env or {})
# if resolve is set to True, process request as wiki call
# resolve=False allows initial setup without wiki redirection
wiki = None
if resolve:
if slug:
wiki = self._wiki.read(slug, force_render)
if isinstance(wiki, dict) and 'content' in wiki:
# We don't want to return a dict object, just the wiki
wiki = wiki['content']
else:
wiki = self._wiki()
if isinstance(wiki, basestring):
wiki = XML(wiki)
return wiki
def wikimenu(self):
"""To be used in menu.py for app wide wiki menus"""
if (hasattr(self, "_wiki") and
self._wiki.settings.controller and
self._wiki.settings.function):
self._wiki.automenu()
class Crud(object): # pragma: no cover
default_messages = dict(
submit_button='Submit',
delete_label='Check to delete',
record_created='Record Created',
record_updated='Record Updated',
record_deleted='Record Deleted',
update_log='Record %(id)s updated',
create_log='Record %(id)s created',
read_log='Record %(id)s read',
delete_log='Record %(id)s deleted',
)
def url(self, f=None, args=None, vars=None):
"""
This should point to the controller that exposes
download and crud
"""
if args is None:
args = []
if vars is None:
vars = {}
return URL(c=self.settings.controller, f=f, args=args, vars=vars)
def __init__(self, environment, db=None, controller='default'):
self.db = db
if not db and environment and isinstance(environment, DAL):
self.db = environment
elif not db:
raise SyntaxError("must pass db as first or second argument")
self.environment = current
settings = self.settings = Settings()
settings.auth = None
settings.logger = None
settings.create_next = None
settings.update_next = None
settings.controller = controller
settings.delete_next = self.url()
settings.download_url = self.url('download')
settings.create_onvalidation = StorageList()
settings.update_onvalidation = StorageList()
settings.delete_onvalidation = StorageList()
settings.create_onaccept = StorageList()
settings.update_onaccept = StorageList()
settings.update_ondelete = StorageList()
settings.delete_onaccept = StorageList()
settings.update_deletable = True
settings.showid = False
settings.keepvalues = False
settings.create_captcha = None
settings.update_captcha = None
settings.captcha = None
settings.formstyle = 'table3cols'
settings.label_separator = ': '
settings.hideerror = False
settings.detect_record_change = True
settings.hmac_key = None
settings.lock_keys = True
messages = self.messages = Messages(current.T)
messages.update(Crud.default_messages)
messages.lock_keys = True
def __call__(self):
args = current.request.args
if len(args) < 1:
raise HTTP(404)
elif args[0] == 'tables':
return self.tables()
elif len(args) > 1 and not args(1) in self.db.tables:
raise HTTP(404)
table = self.db[args(1)]
if args[0] == 'create':
return self.create(table)
elif args[0] == 'select':
return self.select(table, linkto=self.url(args='read'))
elif args[0] == 'search':
form, rows = self.search(table, linkto=self.url(args='read'))
return DIV(form, SQLTABLE(rows))
elif args[0] == 'read':
return self.read(table, args(2))
elif args[0] == 'update':
return self.update(table, args(2))
elif args[0] == 'delete':
return self.delete(table, args(2))
else:
raise HTTP(404)
def log_event(self, message, vars):
if self.settings.logger:
self.settings.logger.log_event(message, vars, origin='crud')
def has_permission(self, name, table, record=0):
if not self.settings.auth:
return True
try:
record_id = record.id
except:
record_id = record
return self.settings.auth.has_permission(name, str(table), record_id)
def tables(self):
return TABLE(*[TR(A(name,
_href=self.url(args=('select', name))))
for name in self.db.tables])
@staticmethod
def archive(form, archive_table=None, current_record='current_record'):
return Auth.archive(form, archive_table=archive_table,
current_record=current_record)
def update(self,
table,
record,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
ondelete=DEFAULT,
log=DEFAULT,
message=DEFAULT,
deletable=DEFAULT,
formname=DEFAULT,
**attributes
):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
try:
record_id = record.id
except:
record_id = record or 0
if record_id and not self.has_permission('update', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
if not record_id and not self.has_permission('create', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
response = current.response
session = current.session
if request.extension == 'json' and request.vars.json:
request.vars.update(json.loads(request.vars.json))
if next is DEFAULT:
next = prevent_open_redirect(request.get_vars._next) \
or prevent_open_redirect(request.post_vars._next) \
or self.settings.update_next
if onvalidation is DEFAULT:
onvalidation = self.settings.update_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.update_onaccept
if ondelete is DEFAULT:
ondelete = self.settings.update_ondelete
if log is DEFAULT:
log = self.messages['update_log']
if deletable is DEFAULT:
deletable = self.settings.update_deletable
if message is DEFAULT:
message = self.messages.record_updated
if 'hidden' not in attributes:
attributes['hidden'] = {}
attributes['hidden']['_next'] = next
form = SQLFORM(
table,
record,
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
deletable=deletable,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
**attributes # contains hidden
)
self.accepted = False
self.deleted = False
captcha = self.settings.update_captcha or self.settings.captcha
if record and captcha:
addrow(form, captcha.label, captcha, captcha.comment, self.settings.formstyle, 'captcha__row')
captcha = self.settings.create_captcha or self.settings.captcha
if not record and captcha:
addrow(form, captcha.label, captcha, captcha.comment, self.settings.formstyle, 'captcha__row')
if request.extension not in ('html', 'load'):
(_session, _formname) = (None, None)
else:
(_session, _formname) = (
session, '%s/%s' % (table._tablename, form.record_id))
if formname is not DEFAULT:
_formname = formname
keepvalues = self.settings.keepvalues
if request.vars.delete_this_record:
keepvalues = False
if isinstance(onvalidation, StorageList):
onvalidation = onvalidation.get(table._tablename, [])
if form.accepts(request, _session, formname=_formname,
onvalidation=onvalidation, keepvalues=keepvalues,
hideerror=self.settings.hideerror,
detect_record_change=self.settings.detect_record_change):
self.accepted = True
response.flash = message
if log:
self.log_event(log, form.vars)
if request.vars.delete_this_record:
self.deleted = True
message = self.messages.record_deleted
callback(ondelete, form, table._tablename)
response.flash = message
callback(onaccept, form, table._tablename)
if request.extension not in ('html', 'load'):
raise HTTP(200, 'RECORD CREATED/UPDATED')
if isinstance(next, (list, tuple)): # fix issue with 2.6
next = next[0]
if next: # Only redirect when explicit
next = replace_id(next, form)
session.flash = response.flash
redirect(next)
elif request.extension not in ('html', 'load'):
raise HTTP(401, serializers.json(dict(errors=form.errors)))
return form
def create(self,
table,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
message=DEFAULT,
formname=DEFAULT,
**attributes
):
if next is DEFAULT:
next = self.settings.create_next
if onvalidation is DEFAULT:
onvalidation = self.settings.create_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.create_onaccept
if log is DEFAULT:
log = self.messages['create_log']
if message is DEFAULT:
message = self.messages.record_created
return self.update(table,
None,
next=next,
onvalidation=onvalidation,
onaccept=onaccept,
log=log,
message=message,
deletable=False,
formname=formname,
**attributes
)
def read(self, table, record):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('read', table, record):
redirect(self.settings.auth.settings.on_failed_authorization)
form = SQLFORM(
table,
record,
readonly=True,
comments=False,
upload=self.settings.download_url,
showid=self.settings.showid,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if current.request.extension not in ('html', 'load'):
return table._filter_fields(form.record, id=True)
return form
def delete(self,
table,
record_id,
next=DEFAULT,
message=DEFAULT,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('delete', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
session = current.session
if next is DEFAULT:
next = prevent_open_redirect(request.get_vars._next) \
or prevent_open_redirect(request.post_vars._next) \
or self.settings.delete_next
if message is DEFAULT:
message = self.messages.record_deleted
record = table[record_id]
if record:
callback(self.settings.delete_onvalidation, record)
del table[record_id]
callback(self.settings.delete_onaccept, record, table._tablename)
session.flash = message
redirect(next)
def rows(self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not self.has_permission('select', table):
redirect(self.settings.auth.settings.on_failed_authorization)
# if record_id and not self.has_permission('select', table):
# redirect(self.settings.auth.settings.on_failed_authorization)
if not isinstance(table, Table):
table = self.db[table]
if not query:
query = table.id > 0
if not fields:
fields = [field for field in table if field.readable]
else:
fields = [table[f] if isinstance(f, str) else f for f in fields]
rows = self.db(query).select(*fields, **dict(orderby=orderby,
limitby=limitby))
return rows
def select(self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
headers=None,
**attr
):
headers = headers or {}
rows = self.rows(table, query, fields, orderby, limitby)
if not rows:
return None # Nicer than an empty table.
if 'upload' not in attr:
attr['upload'] = self.url('download')
if current.request.extension not in ('html', 'load'):
return rows.as_list()
if not headers:
if isinstance(table, str):
table = self.db[table]
headers = dict((str(k), k.label) for k in table)
return SQLTABLE(rows, headers=headers, **attr)
def get_format(self, field):
rtable = field._db[field.type[10:]]
format = rtable.get('_format', None)
if format and isinstance(format, str):
return format[2:-2]
return field.name
def get_query(self, field, op, value, refsearch=False):
try:
if refsearch:
format = self.get_format(field)
if op == 'equals':
if not refsearch:
return field == value
else:
return lambda row: row[field.name][format] == value
elif op == 'not equal':
if not refsearch:
return field != value
else:
return lambda row: row[field.name][format] != value
elif op == 'greater than':
if not refsearch:
return field > value
else:
return lambda row: row[field.name][format] > value
elif op == 'less than':
if not refsearch:
return field < value
else:
return lambda row: row[field.name][format] < value
elif op == 'starts with':
if not refsearch:
return field.like(value + '%')
else:
return lambda row: str(row[field.name][format]).startswith(value)
elif op == 'ends with':
if not refsearch:
return field.like('%' + value)
else:
return lambda row: str(row[field.name][format]).endswith(value)
elif op == 'contains':
if not refsearch:
return field.like('%' + value + '%')
else:
return lambda row: value in row[field.name][format]
except:
return None
def search(self, *tables, **args):
"""
Creates a search form and its results for a table
Examples:
Use as::
form, results = crud.search(db.test,
queries = ['equals', 'not equal', 'contains'],
query_labels={'equals':'Equals',
'not equal':'Not equal'},
fields = ['id','children'],
field_labels = {
'id':'ID','children':'Children'},
zero='Please choose',
query = (db.test.id > 0)&(db.test.id != 3) )
"""
table = tables[0]
fields = args.get('fields', table.fields)
validate = args.get('validate', True)
request = current.request
db = self.db
if not (isinstance(table, Table) or table in db.tables):
raise HTTP(404)
attributes = {}
for key in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):
if key in args:
attributes[key] = args[key]
tbl = TABLE()
selected = []
refsearch = []
results = []
showall = args.get('showall', False)
if showall:
selected = fields
chkall = args.get('chkall', False)
if chkall:
for f in fields:
request.vars['chk%s' % f] = 'on'
ops = args.get('queries', [])
zero = args.get('zero', '')
if not ops:
ops = ['equals', 'not equal', 'greater than',
'less than', 'starts with',
'ends with', 'contains']
ops.insert(0, zero)
query_labels = args.get('query_labels', {})
query = args.get('query', table.id > 0)
field_labels = args.get('field_labels', {})
for field in fields:
field = table[field]
if not field.readable:
continue
fieldname = field.name
chkval = request.vars.get('chk' + fieldname, None)
txtval = request.vars.get('txt' + fieldname, None)
opval = request.vars.get('op' + fieldname, None)
row = TR(TD(INPUT(_type="checkbox", _name="chk" + fieldname,
_disabled=(field.type == 'id'),
value=(field.type == 'id' or chkval == 'on'))),
TD(field_labels.get(fieldname, field.label)),
TD(SELECT([OPTION(query_labels.get(op, op),
_value=op) for op in ops],
_name="op" + fieldname,
value=opval)),
TD(INPUT(_type="text", _name="txt" + fieldname,
_value=txtval, _id='txt' + fieldname,
_class=str(field.type))))
tbl.append(row)
if request.post_vars and (chkval or field.type == 'id'):
if txtval and opval != '':
if field.type[0:10] == 'reference ':
refsearch.append(self.get_query(field, opval, txtval, refsearch=True))
elif validate:
value, error = field.validate(txtval)
if not error:
# TODO deal with 'starts with', 'ends with', 'contains' on GAE
query &= self.get_query(field, opval, value)
else:
row[3].append(DIV(error, _class='error'))
else:
query &= self.get_query(field, opval, txtval)
selected.append(field)
form = FORM(tbl, INPUT(_type="submit"))
if selected:
try:
results = db(query).select(*selected, **attributes)
for r in refsearch:
results = results.find(r)
except: # TODO: hmmm, we should do better here
results = None
return form, results
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor()))
def fetch(url, data=None, headers=None,
cookie=Cookie.SimpleCookie(),
user_agent='Mozilla/5.0'):
headers = headers or {}
if data is not None:
data = urlencode(data)
if user_agent:
headers['User-agent'] = user_agent
headers['Cookie'] = ' '.join(
['%s=%s;' % (c.key, c.value) for c in cookie.values()])
try:
from google.appengine.api import urlfetch
except ImportError:
req = urllib2.Request(url, data, headers)
html = urlopen(req).read()
else:
method = ((data is None) and urlfetch.GET) or urlfetch.POST
while url is not None:
response = urlfetch.fetch(url=url, payload=data,
method=method, headers=headers,
allow_truncated=False, follow_redirects=False,
deadline=10)
# next request will be a get, so no need to send the data again
data = None
method = urlfetch.GET
# load cookies from the response
cookie.load(response.headers.get('set-cookie', ''))
url = response.headers.get('location')
html = response.content
return html
regex_geocode = \
re.compile(r"""<geometry>[\W]*?<location>[\W]*?<lat>(?P<la>[^<]*)</lat>[\W]*?<lng>(?P<lo>[^<]*)</lng>[\W]*?</location>""")
def geocode(address):
try:
a = urllib_quote(address)
txt = fetch('http://maps.googleapis.com/maps/api/geocode/xml?sensor=false&address=%s' % a)
item = regex_geocode.search(txt)
(la, lo) = (float(item.group('la')), float(item.group('lo')))
return (la, lo)
except:
return (0.0, 0.0)
def reverse_geocode(lat, lng, lang=None):
""" Try to get an approximate address for a given latitude, longitude. """
if not lang:
lang = current.T.accepted_language
try:
return json.loads(fetch('http://maps.googleapis.com/maps/api/geocode/json?latlng=%(lat)s,%(lng)s&language=%(lang)s' % locals()))['results'][0]['formatted_address']
except:
return ''
def universal_caller(f, *a, **b):
c = f.__code__.co_argcount
n = f.__code__.co_varnames[:c]
defaults = f.__defaults__ or []
pos_args = n[0:-len(defaults)]
named_args = n[-len(defaults):]
arg_dict = {}
# Fill the arg_dict with name and value for the submitted, positional values
for pos_index, pos_val in enumerate(a[:c]):
arg_dict[n[pos_index]] = pos_val # n[pos_index] is the name of the argument
# There might be pos_args left, that are sent as named_values. Gather them as well.
# If a argument already is populated with values we simply replaces them.
for arg_name in pos_args[len(arg_dict):]:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
if len(arg_dict) >= len(pos_args):
# All the positional arguments is found. The function may now be called.
# However, we need to update the arg_dict with the values from the named arguments as well.
for arg_name in named_args:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
return f(**arg_dict)
# Raise an error, the function cannot be called.
raise HTTP(404, "Object does not exist")
class Service(object):
def __init__(self, environment=None, check_args=False):
self.check_args = check_args
self.run_procedures = {}
self.csv_procedures = {}
self.xml_procedures = {}
self.rss_procedures = {}
self.json_procedures = {}
self.jsonrpc_procedures = {}
self.jsonrpc2_procedures = {}
self.xmlrpc_procedures = {}
self.amfrpc_procedures = {}
self.amfrpc3_procedures = {}
self.soap_procedures = {}
def run(self, f):
"""
Example:
Use as::
service = Service()
@service.run
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/run/myfunction?a=3&b=4
"""
self.run_procedures[f.__name__] = f
return f
def csv(self, f):
"""
Example:
Use as::
service = Service()
@service.csv
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/csv/myfunction?a=3&b=4
"""
self.csv_procedures[f.__name__] = f
return f
def xml(self, f):
"""
Example:
Use as::
service = Service()
@service.xml
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/xml/myfunction?a=3&b=4
"""
self.xml_procedures[f.__name__] = f
return f
def rss(self, f):
"""
Example:
Use as::
service = Service()
@service.rss
def myfunction():
return dict(title=..., link=..., description=...,
created_on=..., entries=[dict(title=..., link=...,
description=..., created_on=...])
def call():
return service()
Then call it with:
wget http://..../app/default/call/rss/myfunction
"""
self.rss_procedures[f.__name__] = f
return f
def json(self, f):
"""
Example:
Use as::
service = Service()
@service.json
def myfunction(a, b):
return [{a: b}]
def call():
return service()
Then call it with:;
wget http://..../app/default/call/json/myfunction?a=hello&b=world
"""
self.json_procedures[f.__name__] = f
return f
def jsonrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/jsonrpc/myfunction?a=hello&b=world
"""
self.jsonrpc_procedures[f.__name__] = f
return f
def jsonrpc2(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc2
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget --post-data '{"jsonrpc": "2.0",
"id": 1,
"method": "myfunction",
"params": {"a": 1, "b": 2}}' http://..../app/default/call/jsonrpc2
"""
self.jsonrpc2_procedures[f.__name__] = f
return f
def xmlrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.xmlrpc
def myfunction(a, b):
return a + b
def call():
return service()
The call it with:
wget http://..../app/default/call/xmlrpc/myfunction?a=hello&b=world
"""
self.xmlrpc_procedures[f.__name__] = f
return f
def amfrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.amfrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/amfrpc/myfunction?a=hello&b=world
"""
self.amfrpc_procedures[f.__name__] = f
return f
def amfrpc3(self, domain='default'):
"""
Example:
Use as::
service = Service()
@service.amfrpc3('domain')
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/amfrpc3/myfunction?a=hello&b=world
"""
if not isinstance(domain, str):
raise SyntaxError("AMF3 requires a domain for function")
def _amfrpc3(f):
if domain:
self.amfrpc3_procedures[domain + '.' + f.__name__] = f
else:
self.amfrpc3_procedures[f.__name__] = f
return f
return _amfrpc3
def soap(self, name=None, returns=None, args=None, doc=None, response_element_name=None):
"""
Example:
Use as::
service = Service()
@service.soap('MyFunction',returns={'result':int},args={'a':int,'b':int,})
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
from gluon.contrib.pysimplesoap.client import SoapClient
client = SoapClient(wsdl="http://..../app/default/call/soap?WSDL")
response = client.MyFunction(a=1,b=2)
return response['result']
It also exposes online generated documentation and xml example messages
at `http://..../app/default/call/soap`
"""
def _soap(f):
self.soap_procedures[name or f.__name__] = f, returns, args, doc, response_element_name
return f
return _soap
def serve_run(self, args=None):
request = current.request
if not args:
args = request.args
if args and args[0] in self.run_procedures:
return str(self.call_service_function(self.run_procedures[args[0]],
*args[1:], **dict(request.vars)))
self.error()
def serve_csv(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/x-csv'
if not args:
args = request.args
def none_exception(value):
if isinstance(value, unicodeT):
return value.encode('utf8')
if hasattr(value, 'isoformat'):
return value.isoformat()[:19].replace('T', ' ')
if value is None:
return '<NULL>'
return value
if args and args[0] in self.csv_procedures:
import types
r = self.call_service_function(self.csv_procedures[args[0]],
*args[1:], **dict(request.vars))
s = StringIO()
if hasattr(r, 'export_to_csv_file'):
r.export_to_csv_file(s)
elif r and not isinstance(r, types.GeneratorType) and isinstance(r[0], (dict, Storage)):
import csv
writer = csv.writer(s)
writer.writerow(list(r[0].keys()))
for line in r:
writer.writerow([none_exception(v)
for v in line.values()])
else:
import csv
writer = csv.writer(s)
for line in r:
writer.writerow(line)
return s.getvalue()
self.error()
def serve_xml(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/xml'
if not args:
args = request.args
if args and args[0] in self.xml_procedures:
s = self.call_service_function(self.xml_procedures[args[0]],
*args[1:], **dict(request.vars))
if hasattr(s, 'as_list'):
s = s.as_list()
return serializers.xml(s, quote=False)
self.error()
def serve_rss(self, args=None):
request = current.request
response = current.response
if not args:
args = request.args
if args and args[0] in self.rss_procedures:
feed = self.call_service_function(self.rss_procedures[args[0]],
*args[1:], **dict(request.vars))
else:
self.error()
response.headers['Content-Type'] = 'application/rss+xml'
return serializers.rss(feed)
def serve_json(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
if not args:
args = request.args
d = dict(request.vars)
if args and args[0] in self.json_procedures:
s = self.call_service_function(self.json_procedures[args[0]], *args[1:], **d)
if hasattr(s, 'as_list'):
s = s.as_list()
return response.json(s)
self.error()
class JsonRpcException(Exception):
def __init__(self, code, info):
jrpc_error = Service.jsonrpc_errors.get(code)
if jrpc_error:
self.message, self.description = jrpc_error
self.code, self.info = code, info
# jsonrpc 2.0 error types. records the following structure {code: (message,meaning)}
jsonrpc_errors = {
-32700: ("Parse error. Invalid JSON was received by the server.",
"An error occurred on the server while parsing the JSON text."),
-32600: ("Invalid Request", "The JSON sent is not a valid Request object."),
-32601: ("Method not found", "The method does not exist / is not available."),
-32602: ("Invalid params", "Invalid method parameter(s)."),
-32603: ("Internal error", "Internal JSON-RPC error."),
-32099: ("Server error", "Reserved for implementation-defined server-errors.")}
def serve_jsonrpc(self):
def return_response(id, result):
return serializers.json({'version': '1.1', 'id': id, 'result': result, 'error': None})
def return_error(id, code, message, data=None):
error = {'name': 'JSONRPCError',
'code': code, 'message': message}
if data is not None:
error['data'] = data
return serializers.json({'id': id,
'version': '1.1',
'error': error,
})
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
methods = self.jsonrpc_procedures
data = json.loads(request.body.read())
jsonrpc_2 = data.get('jsonrpc')
if jsonrpc_2: # hand over to version 2 of the protocol
return self.serve_jsonrpc2(data)
id, method, params = data.get('id'), data.get('method'), data.get('params', [])
if id is None:
return return_error(0, 100, 'missing id')
if method not in methods:
return return_error(id, 100, 'method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
return return_response(id, s)
except Service.JsonRpcException as e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
message = '%s: %s' % (etype.__name__, eval)
data = request.is_local and traceback.format_tb(etb)
logger.warning('jsonrpc exception %s\n%s' % (message, traceback.format_tb(etb)))
return return_error(id, 100, message, data)
def serve_jsonrpc2(self, data=None, batch_element=False):
def return_response(id, result):
if not must_respond:
return None
return serializers.json({'jsonrpc': '2.0', 'id': id, 'result': result})
def return_error(id, code, message=None, data=None):
error = {'code': code}
if code in Service.jsonrpc_errors:
error['message'] = Service.jsonrpc_errors[code][0]
error['data'] = Service.jsonrpc_errors[code][1]
if message is not None:
error['message'] = message
if data is not None:
error['data'] = data
return serializers.json({'jsonrpc': '2.0', 'id': id, 'error': error})
def validate(data):
"""
Validate request as defined in: http://www.jsonrpc.org/specification#request_object.
Args:
data(str): The json object.
Returns:
- True -- if successful
- False -- if no error should be reported (i.e. data is missing 'id' member)
Raises:
JsonRPCException
"""
iparms = set(data.keys())
mandatory_args = set(['jsonrpc', 'method'])
missing_args = mandatory_args - iparms
if missing_args:
raise Service.JsonRpcException(-32600, 'Missing arguments %s.' % list(missing_args))
if data['jsonrpc'] != '2.0':
raise Service.JsonRpcException(-32603, 'Unsupported jsonrpc version "%s"' % data['jsonrpc'])
if 'id' not in iparms:
return False
return True
request = current.request
response = current.response
if not data:
response.headers['Content-Type'] = 'application/json; charset=utf-8'
try:
data = json.loads(request.body.read())
except ValueError: # decoding error in json lib
return return_error(None, -32700)
# Batch handling
if isinstance(data, list) and not batch_element:
retlist = []
for c in data:
retstr = self.serve_jsonrpc2(c, batch_element=True)
if retstr: # do not add empty responses
retlist.append(retstr)
if len(retlist) == 0: # return nothing
return ''
else:
return "[" + ','.join(retlist) + "]"
methods = self.jsonrpc2_procedures
methods.update(self.jsonrpc_procedures)
try:
must_respond = validate(data)
except Service.JsonRpcException as e:
return return_error(None, e.code, e.info)
id, method, params = data.get('id'), data['method'], data.get('params', '')
if method not in methods:
return return_error(id, -32601, data='Method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
if must_respond:
return return_response(id, s)
else:
return ''
except HTTP as e:
raise e
except Service.JsonRpcException as e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
data = '%s: %s\n' % (etype.__name__, eval) + str(request.is_local and traceback.format_tb(etb))
logger.warning('%s: %s\n%s' % (etype.__name__, eval, traceback.format_tb(etb)))
return return_error(id, -32099, data=data)
def serve_xmlrpc(self):
request = current.request
response = current.response
services = list(self.xmlrpc_procedures.values())
return response.xmlrpc(request, services)
def serve_amfrpc(self, version=0):
try:
import pyamf
import pyamf.remoting.gateway
except:
return "pyamf not installed or not in Python sys.path"
request = current.request
response = current.response
if version == 3:
services = self.amfrpc3_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
pyamf_request = pyamf.remoting.decode(request.body)
else:
services = self.amfrpc_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
context = pyamf.get_context(pyamf.AMF0)
pyamf_request = pyamf.remoting.decode(request.body, context)
pyamf_response = pyamf.remoting.Envelope(pyamf_request.amfVersion)
for name, message in pyamf_request:
pyamf_response[name] = base_gateway.getProcessor(message)(message)
response.headers['Content-Type'] = pyamf.remoting.CONTENT_TYPE
if version == 3:
return pyamf.remoting.encode(pyamf_response).getvalue()
else:
return pyamf.remoting.encode(pyamf_response, context).getvalue()
def serve_soap(self, version="1.1"):
try:
from gluon.contrib.pysimplesoap.server import SoapDispatcher
except:
return "pysimplesoap not installed in contrib"
request = current.request
response = current.response
procedures = self.soap_procedures
location = "%s://%s%s" % (request.env.wsgi_url_scheme,
request.env.http_host,
URL(r=request, f="call/soap", vars={}))
namespace = 'namespace' in response and response.namespace or location
documentation = response.description or ''
dispatcher = SoapDispatcher(
name=response.title,
location=location,
action=location, # SOAPAction
namespace=namespace,
prefix='pys',
documentation=documentation,
ns=True)
for method, (function, returns, args, doc, resp_elem_name) in iteritems(procedures):
dispatcher.register_function(method, function, returns, args, doc, resp_elem_name)
if request.env.request_method == 'POST':
fault = {}
# Process normal Soap Operation
response.headers['Content-Type'] = 'text/xml'
xml = dispatcher.dispatch(request.body.read(), fault=fault)
if fault:
# May want to consider populating a ticket here...
response.status = 500
# return the soap response
return xml
elif 'WSDL' in request.vars:
# Return Web Service Description
response.headers['Content-Type'] = 'text/xml'
return dispatcher.wsdl()
elif 'op' in request.vars:
# Return method help webpage
response.headers['Content-Type'] = 'text/html'
method = request.vars['op']
sample_req_xml, sample_res_xml, doc = dispatcher.help(method)
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
A("See all webservice operations",
_href=URL(r=request, f="call/soap", vars={})),
H2(method),
P(doc),
UL(LI("Location: %s" % dispatcher.location),
LI("Namespace: %s" % dispatcher.namespace),
LI("SoapAction: %s" % dispatcher.action),
),
H3("Sample SOAP XML Request Message:"),
CODE(sample_req_xml, language="xml"),
H3("Sample SOAP XML Response Message:"),
CODE(sample_res_xml, language="xml"),
]
return {'body': body}
else:
# Return general help and method list webpage
response.headers['Content-Type'] = 'text/html'
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
P(response.description),
P("The following operations are available"),
A("See WSDL for webservice description",
_href=URL(r=request, f="call/soap", vars={"WSDL": None})),
UL([LI(A("%s: %s" % (method, doc or ''),
_href=URL(r=request, f="call/soap", vars={'op': method})))
for method, doc in dispatcher.list_methods()]),
]
return {'body': body}
def __call__(self):
"""
Registers services with::
service = Service()
@service.run
@service.rss
@service.json
@service.jsonrpc
@service.xmlrpc
@service.amfrpc
@service.amfrpc3('domain')
@service.soap('Method', returns={'Result':int}, args={'a':int,'b':int,})
Exposes services with::
def call():
return service()
You can call services with::
http://..../app/default/call/run?[parameters]
http://..../app/default/call/rss?[parameters]
http://..../app/default/call/json?[parameters]
http://..../app/default/call/jsonrpc
http://..../app/default/call/xmlrpc
http://..../app/default/call/amfrpc
http://..../app/default/call/amfrpc3
http://..../app/default/call/soap
"""
request = current.request
if len(request.args) < 1:
raise HTTP(404, "Not Found")
arg0 = request.args(0)
if arg0 == 'run':
return self.serve_run(request.args[1:])
elif arg0 == 'rss':
return self.serve_rss(request.args[1:])
elif arg0 == 'csv':
return self.serve_csv(request.args[1:])
elif arg0 == 'xml':
return self.serve_xml(request.args[1:])
elif arg0 == 'json':
return self.serve_json(request.args[1:])
elif arg0 == 'jsonrpc':
return self.serve_jsonrpc()
elif arg0 == 'jsonrpc2':
return self.serve_jsonrpc2()
elif arg0 == 'xmlrpc':
return self.serve_xmlrpc()
elif arg0 == 'amfrpc':
return self.serve_amfrpc()
elif arg0 == 'amfrpc3':
return self.serve_amfrpc(3)
elif arg0 == 'soap':
return self.serve_soap()
else:
self.error()
def error(self):
raise HTTP(404, "Object does not exist")
# we make this a method so that subclasses can override it if they want to do more specific argument-checking
# but the default implmentation is the simplest: just pass the arguments we got, with no checking
def call_service_function(self, f, *a, **b):
if self.check_args:
return universal_caller(f, *a, **b)
else:
return f(*a, **b)
def completion(callback):
"""
Executes a task on completion of the called action.
Example:
Use as::
from gluon.tools import completion
@completion(lambda d: logging.info(repr(d)))
def index():
return dict(message='hello')
It logs the output of the function every time input is called.
The argument of completion is executed in a new thread.
"""
def _completion(f):
def __completion(*a, **b):
d = None
try:
d = f(*a, **b)
return d
finally:
thread.start_new_thread(callback, (d,))
return __completion
return _completion
def prettydate(d, T=lambda x: x, utc=False):
now = datetime.datetime.utcnow() if utc else datetime.datetime.now()
if isinstance(d, datetime.datetime):
dt = now - d
elif isinstance(d, datetime.date):
dt = now.date() - d
elif not d:
return ''
else:
return '[invalid date]'
if dt.days < 0:
suffix = ' from now'
dt = -dt
else:
suffix = ' ago'
if dt.days >= 2 * 365:
return T('%d years' + suffix) % int(dt.days // 365)
elif dt.days >= 365:
return T('1 year' + suffix)
elif dt.days >= 60:
return T('%d months' + suffix) % int(dt.days // 30)
elif dt.days >= 27: # 4 weeks ugly
return T('1 month' + suffix)
elif dt.days >= 14:
return T('%d weeks' + suffix) % int(dt.days // 7)
elif dt.days >= 7:
return T('1 week' + suffix)
elif dt.days > 1:
return T('%d days' + suffix) % dt.days
elif dt.days == 1:
return T('1 day' + suffix)
elif dt.seconds >= 2 * 60 * 60:
return T('%d hours' + suffix) % int(dt.seconds // 3600)
elif dt.seconds >= 60 * 60:
return T('1 hour' + suffix)
elif dt.seconds >= 2 * 60:
return T('%d minutes' + suffix) % int(dt.seconds // 60)
elif dt.seconds >= 60:
return T('1 minute' + suffix)
elif dt.seconds > 1:
return T('%d seconds' + suffix) % dt.seconds
elif dt.seconds == 1:
return T('1 second' + suffix)
else:
return T('now')
def test_thread_separation():
def f():
c = PluginManager()
lock1.acquire()
lock2.acquire()
c.x = 7
lock1.release()
lock2.release()
lock1 = thread.allocate_lock()
lock2 = thread.allocate_lock()
lock1.acquire()
thread.start_new_thread(f, ())
a = PluginManager()
a.x = 5
lock1.release()
lock2.acquire()
return a.x
class PluginManager(object):
"""
Plugin Manager is similar to a storage object but it is a single level
singleton. This means that multiple instances within the same thread share
the same attributes.
Its constructor is also special. The first argument is the name of the
plugin you are defining.
The named arguments are parameters needed by the plugin with default values.
If the parameters were previous defined, the old values are used.
Example:
in some general configuration file::
plugins = PluginManager()
plugins.me.param1=3
within the plugin model::
_ = PluginManager('me',param1=5,param2=6,param3=7)
where the plugin is used::
>>> print(plugins.me.param1)
3
>>> print(plugins.me.param2)
6
>>> plugins.me.param3 = 8
>>> print(plugins.me.param3)
8
Here are some tests::
>>> a=PluginManager()
>>> a.x=6
>>> b=PluginManager('check')
>>> print(b.x)
6
>>> b=PluginManager() # reset settings
>>> print(b.x)
<Storage {}>
>>> b.x=7
>>> print(a.x)
7
>>> a.y.z=8
>>> print(b.y.z)
8
>>> test_thread_separation()
5
>>> plugins=PluginManager('me',db='mydb')
>>> print(plugins.me.db)
mydb
>>> print('me' in plugins)
True
>>> print(plugins.me.installed)
True
"""
instances = {}
def __new__(cls, *a, **b):
id = thread.get_ident()
lock = thread.allocate_lock()
try:
lock.acquire()
try:
return cls.instances[id]
except KeyError:
instance = object.__new__(cls, *a, **b)
cls.instances[id] = instance
return instance
finally:
lock.release()
def __init__(self, plugin=None, **defaults):
if not plugin:
self.__dict__.clear()
settings = self.__getattr__(plugin)
settings.installed = True
settings.update(
(k, v) for k, v in defaults.items() if k not in settings)
def __getattr__(self, key):
if key not in self.__dict__:
self.__dict__[key] = Storage()
return self.__dict__[key]
def keys(self):
return list(self.__dict__.keys())
def __contains__(self, key):
return key in self.__dict__
class Expose(object):
def __init__(self, base=None, basename=None, extensions=None,
allow_download=True, follow_symlink_out=False):
"""
Examples:
Use as::
def static():
return dict(files=Expose())
or::
def static():
path = os.path.join(request.folder,'static','public')
return dict(files=Expose(path,basename='public'))
Args:
extensions: an optional list of file extensions for filtering
displayed files: e.g. `['.py', '.jpg']`
allow_download: whether to allow downloading selected files
follow_symlink_out: whether to follow symbolic links that points
points outside of `base`.
Warning: setting this to `True` might pose a security risk
if you don't also have complete control over writing
and file creation under `base`.
"""
self.follow_symlink_out = follow_symlink_out
self.base = self.normalize_path(
base or os.path.join(current.request.folder, 'static'))
self.basename = basename or current.request.function
self.base = base = os.path.realpath(base or os.path.join(current.request.folder, 'static'))
basename = basename or current.request.function
self.basename = basename
if current.request.raw_args:
self.args = [arg for arg in current.request.raw_args.split('/') if arg]
else:
self.args = [arg for arg in current.request.args if arg]
filename = os.path.join(self.base, *self.args)
if not os.path.exists(filename):
raise HTTP(404, "FILE NOT FOUND")
if not self.in_base(filename):
raise HTTP(401, "NOT AUTHORIZED")
if allow_download and not os.path.isdir(filename):
current.response.headers['Content-Type'] = contenttype(filename)
raise HTTP(200, open(filename, 'rb'), **current.response.headers)
self.path = path = os.path.join(filename, '*')
dirname_len = len(path) - 1
allowed = [f for f in sorted(glob.glob(path))
if not any([self.isprivate(f), self.issymlink_out(f)])]
self.folders = [f[dirname_len:]
for f in allowed if os.path.isdir(f)]
self.filenames = [f[dirname_len:]
for f in allowed if not os.path.isdir(f)]
if 'README' in self.filenames:
with open(os.path.join(filename, 'README')) as f:
readme = f.read()
self.paragraph = MARKMIN(readme)
else:
self.paragraph = None
if extensions:
self.filenames = [f for f in self.filenames
if os.path.splitext(f)[-1] in extensions]
def breadcrumbs(self, basename):
path = []
span = SPAN()
span.append(A(basename, _href=URL()))
for arg in self.args:
span.append('/')
path.append(arg)
span.append(A(arg, _href=URL(args='/'.join(path))))
return span
def table_folders(self):
if self.folders:
return SPAN(H3('Folders'),
TABLE(*[TR(TD(A(folder, _href=URL(args=self.args + [folder]))))
for folder in self.folders], **dict(_class="table")))
return ''
@staticmethod
def __in_base(subdir, basedir, sep=os.path.sep):
"""True if subdir/ is under basedir/"""
s = lambda f: '%s%s' % (f.rstrip(sep), sep) # f -> f/
# The trailing '/' is for the case of '/foobar' in_base of '/foo':
# - becase '/foobar' starts with '/foo'
# - but '/foobar/' doesn't start with '/foo/'
return s(subdir).startswith(s(basedir))
def in_base(self, f):
"""True if f/ is under self.base/
Where f ans slef.base are normalized paths
"""
return self.__in_base(self.normalize_path(f), self.base)
def normalize_path(self, f):
if self.follow_symlink_out:
return os.path.normpath(f)
else:
return os.path.realpath(f)
def issymlink_out(self, f):
"""True if f is a symlink and is pointing outside of self.base"""
return os.path.islink(f) and not self.in_base(f)
@staticmethod
def isprivate(f):
# remove '/private' prefix to deal with symbolic links on OSX
if f.startswith('/private/'):
f = f[8:]
return 'private' in f or f.startswith('.') or f.endswith('~')
@staticmethod
def isimage(f):
return os.path.splitext(f)[-1].lower() in (
'.png', '.jpg', '.jpeg', '.gif', '.tiff')
def table_files(self, width=160):
if self.filenames:
return SPAN(H3('Files'),
TABLE(*[TR(TD(A(f, _href=URL(args=self.args + [f]))),
TD(IMG(_src=URL(args=self.args + [f]),
_style='max-width:%spx' % width)
if width and self.isimage(f) else ''))
for f in self.filenames], **dict(_class="table")))
return ''
def xml(self):
return DIV(
H2(self.breadcrumbs(self.basename)),
self.paragraph or '',
self.table_folders(),
self.table_files()).xml()
class Wiki(object):
everybody = 'everybody'
rows_page = 25
def markmin_base(self, body):
return MARKMIN(body, extra=self.settings.extra,
url=True, environment=self.env,
autolinks=lambda link: expand_one(link, {})).xml()
def render_tags(self, tags):
return DIV(
_class='w2p_wiki_tags',
*[A(t.strip(), _href=URL(args='_search', vars=dict(q=t)))
for t in tags or [] if t.strip()])
def markmin_render(self, page):
return self.markmin_base(page.body) + self.render_tags(page.tags).xml()
def html_render(self, page):
html = page.body
# @///function -> http://..../function
html = replace_at_urls(html, URL)
# http://...jpg -> <img src="http://...jpg/> or embed
html = replace_autolinks(html, lambda link: expand_one(link, {}))
# @{component:name} -> <script>embed component name</script>
html = replace_components(html, self.env)
html = html + self.render_tags(page.tags).xml()
return html
@staticmethod
def component(text):
"""
In wiki docs allows `@{component:controller/function/args}`
which renders as a `LOAD(..., ajax=True)`
"""
items = text.split('/')
controller, function, args = items[0], items[1], items[2:]
return LOAD(controller, function, args=args, ajax=True).xml()
def get_renderer(self):
if isinstance(self.settings.render, basestring):
r = getattr(self, "%s_render" % self.settings.render)
elif callable(self.settings.render):
r = self.settings.render
elif isinstance(self.settings.render, dict):
def custom_render(page):
if page.render:
if page.render in self.settings.render.keys():
my_render = self.settings.render[page.render]
else:
my_render = getattr(self, "%s_render" % page.render)
else:
my_render = self.markmin_render
return my_render(page)
r = custom_render
else:
raise ValueError(
"Invalid render type %s" % type(self.settings.render))
return r
def __init__(self, auth, env=None, render='markmin',
manage_permissions=False, force_prefix='',
restrict_search=False, extra=None,
menu_groups=None, templates=None, migrate=True,
controller=None, function=None, groups=None):
settings = self.settings = auth.settings.wiki
"""
Args:
render:
- "markmin"
- "html"
- `<function>` : Sets a custom render function
- `dict(html=<function>, markmin=...)`: dict(...) allows
multiple custom render functions
- "multiple" : Is the same as `{}`. It enables per-record
formats using builtins
"""
engines = set(['markmin', 'html'])
show_engine = False
if render == "multiple":
render = {}
if isinstance(render, dict):
[engines.add(key) for key in render]
show_engine = True
settings.render = render
perms = settings.manage_permissions = manage_permissions
settings.force_prefix = force_prefix
settings.restrict_search = restrict_search
settings.extra = extra or {}
settings.menu_groups = menu_groups
settings.templates = templates
settings.controller = controller
settings.function = function
settings.groups = list(auth.user_groups.values()) \
if groups is None else groups
db = auth.db
self.env = env or {}
self.env['component'] = Wiki.component
self.auth = auth
self.wiki_menu_items = None
if self.auth.user:
self.settings.force_prefix = force_prefix % self.auth.user
else:
self.settings.force_prefix = force_prefix
self.host = current.request.env.http_host
table_definitions = [
('wiki_page', {
'args': [
Field('slug',
requires=[IS_SLUG(),
IS_NOT_IN_DB(db, 'wiki_page.slug')],
writable=False),
Field('title', length=255, unique=True),
Field('body', 'text', notnull=True),
Field('tags', 'list:string'),
Field('can_read', 'list:string',
writable=perms,
readable=perms,
default=[Wiki.everybody]),
Field('can_edit', 'list:string',
writable=perms, readable=perms,
default=[Wiki.everybody]),
Field('changelog'),
Field('html', 'text',
compute=self.get_renderer(),
readable=False, writable=False),
Field('render', default="markmin",
readable=show_engine,
writable=show_engine,
requires=IS_EMPTY_OR(
IS_IN_SET(engines))),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
('wiki_tag', {
'args': [
Field('name'),
Field('wiki_page', 'reference wiki_page'),
auth.signature],
'vars':{'format': '%(title)s', 'migrate': migrate}}),
('wiki_media', {
'args': [
Field('wiki_page', 'reference wiki_page'),
Field('title', required=True),
Field('filename', 'upload', required=True),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
]
# define only non-existent tables
for key, value in table_definitions:
args = []
if key not in db.tables():
# look for wiki_ extra fields in auth.settings
extra_fields = auth.settings.extra_fields
if extra_fields:
if key in extra_fields:
if extra_fields[key]:
for field in extra_fields[key]:
args.append(field)
args += value['args']
db.define_table(key, *args, **value['vars'])
if self.settings.templates is None and not self.settings.manage_permissions:
self.settings.templates = \
db.wiki_page.tags.contains('template') & db.wiki_page.can_read.contains('everybody')
def update_tags_insert(page, id, db=db):
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=id)
def update_tags_update(dbset, page, db=db):
page = dbset.select(limitby=(0, 1)).first()
db(db.wiki_tag.wiki_page == page.id).delete()
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=page.id)
db.wiki_page._after_insert.append(update_tags_insert)
db.wiki_page._after_update.append(update_tags_update)
if (auth.user and
check_credentials(current.request, gae_login=False) and
'wiki_editor' not in auth.user_groups.values() and
self.settings.groups == list(auth.user_groups.values())):
group = db.auth_group(role='wiki_editor')
gid = group.id if group else db.auth_group.insert(
role='wiki_editor')
auth.add_membership(gid)
settings.lock_keys = True
# WIKI ACCESS POLICY
def not_authorized(self, page=None):
raise HTTP(401)
def can_read(self, page):
if 'everybody' in page.can_read or not self.settings.manage_permissions:
return True
elif self.auth.user:
groups = self.settings.groups
if ('wiki_editor' in groups or
set(groups).intersection(set(page.can_read + page.can_edit)) or
page.created_by == self.auth.user.id):
return True
return False
def can_edit(self, page=None):
if not self.auth.user:
redirect(self.auth.settings.login_url)
groups = self.settings.groups
return ('wiki_editor' in groups or
(page is None and 'wiki_author' in groups) or
page is not None and (set(groups).intersection(set(page.can_edit)) or
page.created_by == self.auth.user.id))
def can_manage(self):
if not self.auth.user:
return False
groups = self.settings.groups
return 'wiki_editor' in groups
def can_search(self):
return True
def can_see_menu(self):
if self.auth.user:
if self.settings.menu_groups is None:
return True
else:
groups = self.settings.groups
if any(t in self.settings.menu_groups for t in groups):
return True
return False
# END POLICY
def automenu(self):
"""adds the menu if not present"""
if (not self.wiki_menu_items and self.settings.controller and self.settings.function):
self.wiki_menu_items = self.menu(self.settings.controller,
self.settings.function)
current.response.menu += self.wiki_menu_items
def __call__(self):
request = current.request
settings = self.settings
settings.controller = settings.controller or request.controller
settings.function = settings.function or request.function
self.automenu()
zero = request.args(0) or 'index'
if zero and zero.isdigit():
return self.media(int(zero))
elif not zero or not zero.startswith('_'):
return self.read(zero)
elif zero == '_edit':
return self.edit(request.args(1) or 'index', request.args(2) or 0)
elif zero == '_editmedia':
return self.editmedia(request.args(1) or 'index')
elif zero == '_create':
return self.create()
elif zero == '_pages':
return self.pages()
elif zero == '_search':
return self.search()
elif zero == '_recent':
ipage = int(request.vars.page or 0)
query = self.auth.db.wiki_page.created_by == request.args(
1, cast=int)
return self.search(query=query,
orderby=~self.auth.db.wiki_page.created_on,
limitby=(ipage * self.rows_page,
(ipage + 1) * self.rows_page),
)
elif zero == '_cloud':
return self.cloud()
elif zero == '_preview':
return self.preview(self.get_renderer())
def first_paragraph(self, page):
if not self.can_read(page):
mm = (page.body or '').replace('\r', '')
ps = [p for p in mm.split('\n\n') if not p.startswith('#') and p.strip()]
if ps:
return ps[0]
return ''
def fix_hostname(self, body):
return (body or '').replace('://HOSTNAME', '://%s' % self.host)
def read(self, slug, force_render=False):
if slug in '_cloud':
return self.cloud()
elif slug in '_search':
return self.search()
page = self.auth.db.wiki_page(slug=slug)
if page and (not self.can_read(page)):
return self.not_authorized(page)
if current.request.extension == 'html':
if not page:
url = URL(args=('_create', slug))
return dict(content=A('Create page "%s"' % slug, _href=url, _class="btn"))
else:
html = page.html if not force_render else self.get_renderer()(page)
content = XML(self.fix_hostname(html))
return dict(title=page.title,
slug=page.slug,
page=page,
content=content,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
elif current.request.extension == 'load':
return self.fix_hostname(page.html) if page else ''
else:
if not page:
raise HTTP(404)
else:
return dict(title=page.title,
slug=page.slug,
page=page,
content=page.body,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
def edit(self, slug, from_template=0):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not self.can_edit(page):
return self.not_authorized(page)
title_guess = ' '.join(c.capitalize() for c in slug.split('-'))
if not page:
if not (self.can_manage() or
slug.startswith(self.settings.force_prefix)):
current.session.flash = 'slug must have "%s" prefix' \
% self.settings.force_prefix
redirect(URL(args=('_create')))
db.wiki_page.can_read.default = [Wiki.everybody]
db.wiki_page.can_edit.default = [auth.user_group_role()]
db.wiki_page.title.default = title_guess
db.wiki_page.slug.default = slug
if slug == 'wiki-menu':
db.wiki_page.body.default = \
'- Menu Item > @////index\n- - Submenu > http://web2py.com'
else:
db.wiki_page.body.default = db(db.wiki_page.id == from_template).select(db.wiki_page.body)[0].body \
if int(from_template) > 0 else '## %s\n\npage content' % title_guess
vars = current.request.post_vars
if vars.body:
vars.body = vars.body.replace('://%s' % self.host, '://HOSTNAME')
form = SQLFORM(db.wiki_page, page, deletable=True,
formstyle='table2cols', showid=False).process()
if form.deleted:
current.session.flash = 'page deleted'
redirect(URL())
elif form.accepted:
current.session.flash = 'page created'
redirect(URL(args=slug))
script = """
jQuery(function() {
if (!jQuery('#wiki_page_body').length) return;
var pagecontent = jQuery('#wiki_page_body');
pagecontent.css('font-family',
'Monaco,Menlo,Consolas,"Courier New",monospace');
var prevbutton = jQuery('<button class="btn nopreview">Preview</button>');
var preview = jQuery('<div id="preview"></div>').hide();
var previewmedia = jQuery('<div id="previewmedia"></div>');
var form = pagecontent.closest('form');
preview.insertBefore(form);
prevbutton.insertBefore(form);
if(%(link_media)s) {
var mediabutton = jQuery('<button class="btn nopreview">Media</button>');
mediabutton.insertBefore(form);
previewmedia.insertBefore(form);
mediabutton.click(function() {
if (mediabutton.hasClass('nopreview')) {
web2py_component('%(urlmedia)s', 'previewmedia');
} else {
previewmedia.empty();
}
mediabutton.toggleClass('nopreview');
});
}
prevbutton.click(function(e) {
e.preventDefault();
if (prevbutton.hasClass('nopreview')) {
prevbutton.addClass('preview').removeClass(
'nopreview').html('Edit Source');
try{var wiki_render = jQuery('#wiki_page_render').val()}
catch(e){var wiki_render = null;}
web2py_ajax_page('post', \
'%(url)s', {body: jQuery('#wiki_page_body').val(), \
render: wiki_render}, 'preview');
form.fadeOut('fast', function() {preview.fadeIn()});
} else {
prevbutton.addClass(
'nopreview').removeClass('preview').html('Preview');
preview.fadeOut('fast', function() {form.fadeIn()});
}
})
})
""" % dict(url=URL(args=('_preview', slug)), link_media=('true' if page else 'false'),
urlmedia=URL(extension='load',
args=('_editmedia', slug),
vars=dict(embedded=1)))
return dict(content=TAG[''](form, SCRIPT(script)))
def editmedia(self, slug):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not (page and self.can_edit(page)):
return self.not_authorized(page)
self.auth.db.wiki_media.id.represent = lambda id, row: \
id if not row.filename else \
SPAN('@////%i/%s.%s' % (id, IS_SLUG.urlify(row.title.split('.')[0]), row.filename.split('.')[-1]))
self.auth.db.wiki_media.wiki_page.default = page.id
self.auth.db.wiki_media.wiki_page.writable = False
links = []
csv = True
create = True
if current.request.vars.embedded:
script = "var c = jQuery('#wiki_page_body'); c.val(c.val() + jQuery('%s').text()); return false;"
fragment = self.auth.db.wiki_media.id.represent
csv = False
create = False
links = [lambda row: A('copy into source', _href='#', _onclick=script % (fragment(row.id, row)))]
content = SQLFORM.grid(
self.auth.db.wiki_media.wiki_page == page.id,
orderby=self.auth.db.wiki_media.title,
links=links,
csv=csv,
create=create,
args=['_editmedia', slug],
user_signature=False)
return dict(content=content)
def create(self):
if not self.can_edit():
return self.not_authorized()
db = self.auth.db
slugs = db(db.wiki_page.id > 0).select(db.wiki_page.id, db.wiki_page.slug)
options = [OPTION(row.slug, _value=row.id) for row in slugs]
options.insert(0, OPTION('', _value=''))
fields = [Field("slug", default=current.request.args(1) or
self.settings.force_prefix,
requires=(IS_SLUG(), IS_NOT_IN_DB(db, db.wiki_page.slug))), ]
if self.settings.templates:
fields.append(
Field("from_template", "reference wiki_page",
requires=IS_EMPTY_OR(IS_IN_DB(db(self.settings.templates), db.wiki_page._id, '%(slug)s')),
comment=current.T("Choose Template or empty for new Page")))
form = SQLFORM.factory(*fields, **dict(_class="well"))
form.element("[type=submit]").attributes["_value"] = \
current.T("Create Page from Slug")
if form.process().accepted:
form.vars.from_template = 0 if not form.vars.from_template else form.vars.from_template
redirect(URL(args=('_edit', form.vars.slug, form.vars.from_template or 0))) # added param
return dict(content=form)
def pages(self):
if not self.can_manage():
return self.not_authorized()
self.auth.db.wiki_page.slug.represent = lambda slug, row: SPAN(
'@////%s' % slug)
self.auth.db.wiki_page.title.represent = lambda title, row: \
A(title, _href=URL(args=row.slug))
wiki_table = self.auth.db.wiki_page
content = SQLFORM.grid(
wiki_table,
fields=[wiki_table.slug,
wiki_table.title, wiki_table.tags,
wiki_table.can_read, wiki_table.can_edit],
links=[
lambda row:
A('edit', _href=URL(args=('_edit', row.slug)), _class='btn'),
lambda row:
A('media', _href=URL(args=('_editmedia', row.slug)), _class='btn')],
details=False, editable=False, deletable=False, create=False,
orderby=self.auth.db.wiki_page.title,
args=['_pages'],
user_signature=False)
return dict(content=content)
def media(self, id):
request, response, db = current.request, current.response, self.auth.db
media = db.wiki_media(id)
if media:
if self.settings.manage_permissions:
page = db.wiki_page(media.wiki_page)
if not self.can_read(page):
return self.not_authorized(page)
request.args = [media.filename]
m = response.download(request, db)
current.session.forget() # get rid of the cookie
response.headers['Last-Modified'] = \
request.utcnow.strftime("%a, %d %b %Y %H:%M:%S GMT")
if 'Content-Disposition' in response.headers:
del response.headers['Content-Disposition']
response.headers['Pragma'] = 'cache'
response.headers['Cache-Control'] = 'private'
return m
else:
raise HTTP(404)
def menu(self, controller='default', function='index'):
db = self.auth.db
request = current.request
menu_page = db.wiki_page(slug='wiki-menu')
menu = []
if menu_page:
tree = {'': menu}
regex = re.compile('[\r\n\t]*(?P<base>(\s*\-\s*)+)(?P<title>\w.*?)\s+\>\s+(?P<link>\S+)')
for match in regex.finditer(self.fix_hostname(menu_page.body)):
base = match.group('base').replace(' ', '')
title = match.group('title')
link = match.group('link')
title_page = None
if link.startswith('@'):
items = link[2:].split('/')
if len(items) > 3:
title_page = items[3]
link = URL(a=items[0] or None, c=items[1] or controller,
f=items[2] or function, args=items[3:])
parent = tree.get(base[1:], tree[''])
subtree = []
tree[base] = subtree
parent.append((current.T(title),
request.args(0) == title_page,
link, subtree))
if self.can_see_menu():
submenu = []
menu.append((current.T('[Wiki]'), None, None, submenu))
if URL() == URL(controller, function):
if not str(request.args(0)).startswith('_'):
slug = request.args(0) or 'index'
mode = 1
elif request.args(0) == '_edit':
slug = request.args(1) or 'index'
mode = 2
elif request.args(0) == '_editmedia':
slug = request.args(1) or 'index'
mode = 3
else:
mode = 0
if mode in (2, 3):
submenu.append((current.T('View Page'), None,
URL(controller, function, args=slug)))
if mode in (1, 3):
submenu.append((current.T('Edit Page'), None,
URL(controller, function, args=('_edit', slug))))
if mode in (1, 2):
submenu.append((current.T('Edit Page Media'), None,
URL(controller, function, args=('_editmedia', slug))))
submenu.append((current.T('Create New Page'), None,
URL(controller, function, args=('_create'))))
# Moved next if to inside self.auth.user check
if self.can_manage():
submenu.append((current.T('Manage Pages'), None,
URL(controller, function, args=('_pages'))))
submenu.append((current.T('Edit Menu'), None,
URL(controller, function, args=('_edit', 'wiki-menu'))))
# Also moved inside self.auth.user check
submenu.append((current.T('Search Pages'), None,
URL(controller, function, args=('_search'))))
return menu
def search(self, tags=None, query=None, cloud=True, preview=True,
limitby=(0, 100), orderby=None):
if not self.can_search():
return self.not_authorized()
request = current.request
content = CAT()
if tags is None and query is None:
form = FORM(INPUT(_name='q', requires=IS_NOT_EMPTY(),
value=request.vars.q),
INPUT(_type="submit", _value=current.T('Search')),
_method='GET')
content.append(DIV(form, _class='w2p_wiki_form'))
if request.vars.q:
tags = [v.strip() for v in request.vars.q.split(',')]
tags = [v.lower() for v in tags if v]
if tags or query is not None:
db = self.auth.db
count = db.wiki_tag.wiki_page.count()
fields = [db.wiki_page.id, db.wiki_page.slug,
db.wiki_page.title, db.wiki_page.tags,
db.wiki_page.can_read, db.wiki_page.can_edit]
if preview:
fields.append(db.wiki_page.body)
if query is None:
query = (db.wiki_page.id == db.wiki_tag.wiki_page) &\
(db.wiki_tag.name.belongs(tags))
query = query | db.wiki_page.title.contains(request.vars.q)
if self.settings.restrict_search and not self.can_manage():
query = query & (db.wiki_page.created_by == self.auth.user_id)
pages = db(query).select(count,
*fields, **dict(orderby=orderby or ~count,
groupby=reduce(lambda a, b: a | b, fields),
distinct=True,
limitby=limitby))
if request.extension in ('html', 'load'):
if not pages:
content.append(DIV(current.T("No results"),
_class='w2p_wiki_form'))
def link(t):
return A(t, _href=URL(args='_search', vars=dict(q=t)))
items = [DIV(H3(A(p.wiki_page.title, _href=URL(
args=p.wiki_page.slug))),
MARKMIN(self.first_paragraph(p.wiki_page))
if preview else '',
DIV(_class='w2p_wiki_tags',
*[link(t.strip()) for t in
p.wiki_page.tags or [] if t.strip()]),
_class='w2p_wiki_search_item')
for p in pages]
content.append(DIV(_class='w2p_wiki_pages', *items))
else:
cloud = False
content = [p.wiki_page.as_dict() for p in pages]
elif cloud:
content.append(self.cloud()['content'])
if request.extension == 'load':
return content
return dict(content=content)
def cloud(self):
db = self.auth.db
count = db.wiki_tag.wiki_page.count(distinct=True)
ids = db(db.wiki_tag).select(
db.wiki_tag.name, count,
distinct=True,
groupby=db.wiki_tag.name,
orderby=~count, limitby=(0, 20))
if ids:
a, b = ids[0](count), ids[-1](count)
def style(c):
STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'
size = (1.5 * (c - b) / max(a - b, 1) + 1.3)
return STYLE % (1.3, size)
items = []
for item in ids:
items.append(A(item.wiki_tag.name,
_style=style(item(count)),
_href=URL(args='_search',
vars=dict(q=item.wiki_tag.name))))
items.append(' ')
return dict(content=DIV(_class='w2p_cloud', *items))
def preview(self, render):
request = current.request
# FIXME: This is an ugly hack to ensure a default render
# engine if not specified (with multiple render engines)
if 'render' not in request.post_vars:
request.post_vars.render = None
return render(request.post_vars)
class Config(object):
def __init__(
self,
filename,
section,
default_values={}
):
self.config = configparser.ConfigParser(default_values)
self.config.read(filename)
if not self.config.has_section(section):
self.config.add_section(section)
self.section = section
self.filename = filename
def read(self):
if not(isinstance(current.session['settings_%s' % self.section], dict)):
settings = dict(self.config.items(self.section))
else:
settings = current.session['settings_%s' % self.section]
return settings
def save(self, options):
for option, value in options:
self.config.set(self.section, option, value)
try:
self.config.write(open(self.filename, 'w'))
result = True
except:
current.session['settings_%s' % self.section] = dict(self.config.items(self.section))
result = False
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
| open_redirect | {
"code": [
" host = current.request.env.http_host",
" return self.prevent_open_redirect(next, host)",
" @staticmethod",
" def prevent_open_redirect(next, host):",
" if next:",
" parts = next.split('/')",
" if ':' not in parts[0] and parts[:2] != ['', '']:",
" return next",
" elif len(parts) > 2 and parts[0].endswith(':') and parts[1:3] == ['', host]:",
" return next",
" return None",
" next = request.get_vars._next \\",
" or request.post_vars._next \\",
" next = request.get_vars._next \\",
" or request.post_vars._next \\"
],
"line_no": [
1755,
1759,
1762,
1763,
1766,
1767,
1768,
1769,
1770,
1771,
1772,
4279,
4280,
4425,
4426
]
} | {
"code": [
"def prevent_open_redirect(url):",
" print(host)",
" return None",
" parts = url.split('/')",
" return url",
" return url",
" return prevent_open_redirect(next)",
" next = prevent_open_redirect(request.get_vars._next) \\",
" or prevent_open_redirect(request.post_vars._next) \\",
" next = prevent_open_redirect(request.get_vars._next) \\",
" or prevent_open_redirect(request.post_vars._next) \\"
],
"line_no": [
110,
114,
116,
118,
120,
122,
1774,
4282,
4283,
4428,
4429
]
} |
import .base64
from functools import .reduce
from gluon._compat import .pickle, thread, urllib2, Cookie, StringIO, urlencode
from gluon._compat import .configparser, MIMEBase, MIMEMultipart, MIMEText, Header
from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems
from gluon._compat import .to_bytes, to_native, add_charset, string_types
from gluon._compat import .charset_QP, basestring, unicodeT, to_unicode
from gluon._compat import .urllib2, urlopen
import .datetime
import .logging
import .sys
import glob
import .os
import .re
import .time
import .fnmatch
import .traceback
import .smtplib
import email.utils
import .random
import hmac
import hashlib
import .json
from email import .message_from_string
from gluon.authapi import .AuthAPI
from gluon.contenttype import .contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid, compare
from gluon.fileutils import .read_file, check_credentials
from gluon import *
from gluon.contrib.autolinks import expand_one
from gluon.contrib.markmin.markmin2html import .replace_at_urls
from gluon.contrib.markmin.markmin2html import .replace_autolinks
from gluon.contrib.markmin.markmin2html import .replace_components
from pydal.objects import Row, Set, Query
import gluon.serializers as serializers
VAR_0 = DAL.Table
VAR_1 = DAL.Field
__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',
'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']
VAR_2 = logging.getLogger("web2py")
VAR_3 = lambda: None
def FUNC_0(VAR_4, VAR_5=None):
VAR_11 = VAR_263.request.args
if VAR_4 < 0 and len(VAR_11) >= -VAR_4:
return VAR_11[VAR_4]
elif VAR_4 >= 0 and len(VAR_11) > VAR_4:
return VAR_11[VAR_4]
else:
return VAR_5
def VAR_26(VAR_6, VAR_7, VAR_8=None):
if VAR_6:
if VAR_8 and isinstance(VAR_6, dict):
VAR_6 = actions.get(VAR_8, [])
if not isinstance(VAR_6, (list, tuple)):
VAR_6 = [actions]
[VAR_114(VAR_7) for VAR_114 in VAR_6]
def FUNC_2(*VAR_9):
VAR_13 = []
for VAR_305 in VAR_9:
if isinstance(VAR_305, (list, tuple)):
VAR_13 = b + list(VAR_305)
else:
VAR_13.append(VAR_305)
return VAR_13
def FUNC_3(VAR_10, *VAR_11):
if callable(VAR_10):
redirect(VAR_10(*VAR_11))
else:
redirect(VAR_10)
def FUNC_4(VAR_12, VAR_7):
if VAR_12:
VAR_12 = VAR_12.replace('[VAR_215]', str(VAR_7.vars.id))
if VAR_12[0] == '/' or VAR_12[:4] == 'http':
return VAR_12
return URL(VAR_12)
class CLASS_0(object):
class CLASS_10(MIMEBase):
def __init__(
self,
VAR_81,
VAR_94=None,
VAR_220=None,
VAR_221=None,
VAR_42='utf-8'):
if isinstance(VAR_81, str):
if VAR_94 is None:
VAR_94 = VAR_418.path.basename(VAR_81)
VAR_81 = read_file(VAR_81, 'rb')
else:
if VAR_94 is None:
raise Exception('Missing VAR_417 name')
VAR_81 = payload.read()
VAR_94 = filename.encode(VAR_42)
if VAR_221 is None:
VAR_221 = contenttype(VAR_94)
self.my_filename = VAR_94
self.my_payload = VAR_81
MIMEBase.__init__(self, *VAR_221.split('/', 1))
self.set_payload(VAR_81)
self['Content-Disposition'] = Header('attachment; VAR_94="%s"' % to_native(VAR_94, VAR_42), 'utf-8')
if VAR_220 is not None:
self['Content-Id'] = '<%VAR_278>' % to_native(VAR_220, VAR_42)
Encoders.encode_base64(self)
def __init__(self, VAR_31=None, VAR_32=None, VAR_33=None, VAR_34=True):
VAR_222 = self.settings = Settings()
VAR_222.server = VAR_31
VAR_222.sender = VAR_32
VAR_222.login = VAR_33
VAR_222.tls = VAR_34
VAR_222.timeout = 5 # seconds
VAR_222.hostname = None
VAR_222.ssl = False
VAR_222.cipher_type = None
VAR_222.gpg_home = None
VAR_222.sign = True
VAR_222.sign_passphrase = None
VAR_222.encrypt = True
VAR_222.x509_sign_keyfile = None
VAR_222.x509_sign_certfile = None
VAR_222.x509_sign_chainfile = None
VAR_222.x509_nocerts = False
VAR_222.x509_crypt_certfiles = None
VAR_222.debug = False
VAR_222.lock_keys = True
self.result = {}
self.error = None
def FUNC_13(self,
VAR_35,
VAR_36='[no VAR_36]',
VAR_37='[no VAR_37]',
VAR_38=None,
VAR_39=None,
VAR_40=None,
VAR_41=None,
VAR_32=None,
VAR_42='utf-8',
VAR_43=False,
VAR_18={},
VAR_44=None,
VAR_45=None,
VAR_46=None,
VAR_47=None,
VAR_48=None,
VAR_49=None,
VAR_50=None,
VAR_51=None,
VAR_52=None,
VAR_53=None
):
add_charset('utf-8', charset_QP, charset_QP, 'utf-8')
def FUNC_151(VAR_199):
if [VAR_14 for VAR_14 in VAR_199 if 32 > ord(VAR_14) or ord(VAR_14) > 127]:
return Header(VAR_199.encode('utf-8'), 'utf-8')
else:
return VAR_199
def FUNC_152(VAR_213):
if VAR_43:
VAR_213 = FUNC_151(VAR_213)
return VAR_213
VAR_32 = VAR_32 or self.settings.sender
if not isinstance(self.settings.server, str):
raise Exception('Server VAR_22 not specified')
if not isinstance(VAR_32, str):
raise Exception('Sender VAR_22 not specified')
if not VAR_43 and VAR_38:
VAR_345 = MIMEMultipart('mixed')
elif VAR_43:
if not isinstance(VAR_37, basestring):
VAR_37 = VAR_37.read()
if isinstance(VAR_37, unicodeT):
VAR_213 = VAR_37.encode('utf-8')
elif not VAR_42 == 'utf-8':
VAR_213 = VAR_37.decode(VAR_42).encode('utf-8')
else:
VAR_213 = VAR_37
VAR_345 = MIMEText(VAR_213)
if VAR_35:
if not isinstance(VAR_35, (list, tuple)):
VAR_35 = [to]
else:
raise Exception('Target receiver VAR_22 not specified')
if VAR_41:
if not isinstance(VAR_41, (list, tuple)):
VAR_41 = [reply_to]
if VAR_39:
if not isinstance(VAR_39, (list, tuple)):
VAR_39 = [cc]
if VAR_40:
if not isinstance(VAR_40, (list, tuple)):
VAR_40 = [bcc]
if VAR_37 is None:
VAR_213 = VAR_303 = None
elif isinstance(VAR_37, (list, tuple)):
VAR_213, VAR_303 = VAR_37
elif VAR_37.strip().startswith('<html') and \
VAR_37.strip().endswith('</VAR_303>'):
VAR_213 = self.settings.server == 'gae' and VAR_37 or None
VAR_303 = VAR_37
else:
VAR_213 = VAR_37
VAR_303 = None
if (VAR_213 is not None or VAR_303 is not None) and (not VAR_43):
if VAR_213 is not None:
if not isinstance(VAR_213, basestring):
VAR_213 = VAR_213.read()
if isinstance(VAR_213, unicodeT):
VAR_213 = VAR_213.encode('utf-8')
elif not VAR_42 == 'utf-8':
VAR_213 = VAR_213.decode(VAR_42).encode('utf-8')
if VAR_303 is not None:
if not isinstance(VAR_303, basestring):
VAR_303 = html.read()
if isinstance(VAR_303, unicodeT):
VAR_303 = html.encode('utf-8')
elif not VAR_42 == 'utf-8':
VAR_303 = html.decode(VAR_42).encode('utf-8')
if VAR_213 is not None and VAR_303:
VAR_417 = MIMEMultipart('alternative')
VAR_417.attach(MIMEText(VAR_213, _charset='utf-8'))
VAR_417.attach(MIMEText(VAR_303, 'html', _charset='utf-8'))
elif VAR_213 is not None:
VAR_417 = MIMEText(VAR_213, _charset='utf-8')
elif VAR_303:
VAR_417 = MIMEText(VAR_303, 'html', _charset='utf-8')
if VAR_38:
VAR_345.attach(VAR_417)
else:
VAR_345 = VAR_417
if (VAR_38 is None) or VAR_43:
pass
elif isinstance(VAR_38, (list, tuple)):
for VAR_417 in VAR_38:
VAR_345.attach(VAR_417)
else:
VAR_345.attach(VAR_38)
VAR_38 = [VAR_38]
VAR_45 = cipher_type or self.settings.cipher_type
VAR_46 = VAR_46 if VAR_46 is not None else self.settings.sign
VAR_47 = sign_passphrase or self.settings.sign_passphrase
VAR_48 = VAR_48 if VAR_48 is not None else self.settings.encrypt
if VAR_45 == 'gpg':
if self.settings.gpg_home:
import .os
VAR_418.environ['GNUPGHOME'] = self.settings.gpg_home
if not VAR_46 and not VAR_48:
self.error = "No VAR_46 and no VAR_48 is set but VAR_421 type VAR_35 gpg"
return False
from pyme import .core, errors
from pyme.constants.sig import .mode
if VAR_46:
import .string
core.check_version(None)
VAR_419 = VAR_80.replace(VAR_345.as_string(), '\n', '\VAR_391\n')
VAR_420 = core.Data(VAR_419)
VAR_240 = core.Data()
VAR_14 = core.Context()
VAR_14.set_armor(1)
VAR_14.signers_clear()
for sigkey in VAR_14.op_keylist_all(VAR_32, 1):
if sigkey.can_sign:
VAR_14.signers_add(sigkey)
if not VAR_14.signers_enum(0):
self.error = 'No VAR_199 for signing [%VAR_278]' % VAR_32
return False
VAR_14.set_passphrase_cb(lambda VAR_30, y, z: VAR_47)
try:
VAR_14.op_sign(VAR_420, VAR_240, VAR_118.DETACH)
VAR_240.seek(0, 0)
VAR_81 = MIMEMultipart('signed',
boundary=None,
_subparts=None,
**dict(micalg="pgp-sha1",
protocol="application/pgp-signature"))
payload.attach(VAR_345)
VAR_446 = MIMEBase("application", 'pgp-signature')
VAR_446.set_payload(VAR_240.read())
VAR_81.attach(VAR_446)
VAR_345 = VAR_81
except errors.GPGMEError as ex:
self.error = "GPG VAR_394: %s" % ex.getstring()
return False
if VAR_48:
core.check_version(None)
VAR_420 = core.Data(VAR_345.as_string())
VAR_421 = core.Data()
VAR_14 = core.Context()
VAR_14.set_armor(1)
VAR_422 = []
VAR_423 = VAR_35[:]
if VAR_39:
VAR_423.extend(VAR_39)
if VAR_40:
VAR_423.extend(VAR_40)
for addr in VAR_423:
VAR_14.op_keylist_start(addr, 0)
VAR_391 = VAR_14.op_keylist_next()
if VAR_391 is None:
self.error = 'No VAR_199 for [%VAR_278]' % addr
return False
VAR_422.append(VAR_391)
try:
VAR_14.op_encrypt(VAR_422, 1, VAR_420, VAR_421)
cipher.seek(0, 0)
VAR_81 = MIMEMultipart('encrypted',
boundary=None,
_subparts=None,
**dict(protocol="application/pgp-encrypted"))
VAR_446 = MIMEBase("application", 'pgp-encrypted')
VAR_446.set_payload("Version: 1\VAR_391\n")
VAR_81.attach(VAR_446)
VAR_446 = MIMEBase("application", 'octet-stream')
VAR_446.set_payload(VAR_421.read())
VAR_81.attach(VAR_446)
except errors.GPGMEError as ex:
self.error = "GPG VAR_394: %s" % ex.getstring()
return False
elif VAR_45 == 'x509':
if not VAR_46 and not VAR_48:
self.error = "No VAR_46 and no VAR_48 is set but VAR_421 type VAR_35 x509"
return False
import .os
VAR_49 = x509_sign_keyfile or self.settings.x509_sign_keyfile
VAR_50 = x509_sign_chainfile or self.settings.x509_sign_chainfile
VAR_51 = x509_sign_certfile or self.settings.x509_sign_certfile or \
VAR_49 or self.settings.x509_sign_certfile
VAR_52 = x509_crypt_certfiles or self.settings.x509_crypt_certfiles
VAR_53 = x509_nocerts or\
self.settings.x509_nocerts
try:
from M2Crypto import BIO, SMIME, X509
except Exception as e:
self.error = "Can't load M2Crypto module"
return False
VAR_424 = BIO.MemoryBuffer(VAR_345.as_string())
VAR_278 = SMIME.SMIME()
if VAR_46:
try:
VAR_459 = BIO.openfile(VAR_49)\
if VAR_418.path.isfile(VAR_49)\
else BIO.MemoryBuffer(VAR_49)
VAR_460 = BIO.openfile(VAR_51)\
if VAR_418.path.isfile(VAR_51)\
else BIO.MemoryBuffer(VAR_51)
VAR_278.load_key_bio(VAR_459, VAR_460,
VAR_26=lambda VAR_30: VAR_47)
if VAR_50:
VAR_462 = X509.X509_Stack()
VAR_466 = X509.load_cert(VAR_50)\
if VAR_418.path.isfile(VAR_50)\
else X509.load_cert_string(VAR_50)
VAR_462.push(VAR_466)
VAR_278.set_x509_stack(VAR_462)
except Exception as e:
self.error = "Something went wrong on certificate / private VAR_199 loading: <%VAR_278>" % str(e)
return False
try:
if VAR_53:
VAR_467 = SMIME.PKCS7_NOCERTS
else:
VAR_467 = 0
if not VAR_48:
VAR_467 += SMIME.PKCS7_DETACHED
VAR_461 = VAR_278.sign(VAR_424, VAR_467=flags)
VAR_424 = BIO.MemoryBuffer(VAR_345.as_string(
)) # Recreate coz VAR_46() has consumed it.
except Exception as e:
self.error = "Something went wrong on signing: <%VAR_278> %s" % (
str(e), str(VAR_467))
return False
if VAR_48:
try:
VAR_462 = X509.X509_Stack()
if not isinstance(VAR_52, (list, tuple)):
VAR_52 = [x509_crypt_certfiles]
for crypt_certfile in VAR_52:
VAR_468 = X509.load_cert(crypt_certfile)\
if VAR_418.path.isfile(crypt_certfile)\
else X509.load_cert_string(crypt_certfile)
VAR_462.push(VAR_468)
VAR_278.set_x509_stack(VAR_462)
VAR_278.set_cipher(SMIME.Cipher('des_ede3_cbc'))
VAR_463 = BIO.MemoryBuffer()
if VAR_46:
VAR_278.write(VAR_463, VAR_461)
else:
VAR_463.write(VAR_345.as_string())
VAR_461 = VAR_278.encrypt(VAR_463)
except Exception as e:
self.error = "Something went wrong on encrypting: <%VAR_278>" % str(e)
return False
VAR_425 = BIO.MemoryBuffer()
if VAR_48:
VAR_278.write(VAR_425, VAR_461)
else:
if VAR_46:
VAR_278.write(VAR_425, VAR_461, VAR_424, SMIME.PKCS7_DETACHED)
else:
VAR_425.write('\VAR_391\n')
VAR_425.write(VAR_345.as_string())
VAR_425.close()
VAR_426 = str(VAR_425.read())
VAR_81 = message_from_string(VAR_426)
else:
VAR_81 = VAR_345
if VAR_44:
VAR_81['From'] = FUNC_152(to_unicode(VAR_44, VAR_42))
else:
VAR_81['From'] = FUNC_152(to_unicode(VAR_32, VAR_42))
VAR_223 = VAR_35[:]
if VAR_35:
VAR_81['To'] = FUNC_152(to_unicode(', '.join(VAR_35), VAR_42))
if VAR_41:
VAR_81['Reply-To'] = FUNC_152(to_unicode(', '.join(VAR_41), VAR_42))
if VAR_39:
VAR_81['Cc'] = FUNC_152(to_unicode(', '.join(VAR_39), VAR_42))
VAR_35.extend(VAR_39)
if VAR_40:
VAR_35.extend(VAR_40)
VAR_81['Subject'] = FUNC_152(to_unicode(VAR_36, VAR_42))
VAR_81['Date'] = email.utils.formatdate()
for VAR_346, v in iteritems(VAR_18):
VAR_81[VAR_346] = FUNC_152(to_unicode(v, VAR_42))
VAR_224 = {}
try:
if self.settings.server == 'logging':
VAR_427 = 'email not sent\VAR_181%VAR_278\nFrom: %VAR_278\nTo: %VAR_278\nSubject: %VAR_278\VAR_181\VAR_181%VAR_278\VAR_181%VAR_278\n' % \
('-' * 40, VAR_32, ', '.join(VAR_35), VAR_36, VAR_213 or VAR_303, '-' * 40)
VAR_2.warning(VAR_427)
elif self.settings.server.startswith('logging:'):
VAR_427 = 'email not sent\VAR_181%VAR_278\nFrom: %VAR_278\nTo: %VAR_278\nSubject: %VAR_278\VAR_181\VAR_181%VAR_278\VAR_181%VAR_278\n' % \
('-' * 40, VAR_32, ', '.join(VAR_35), VAR_36, VAR_213 or VAR_303, '-' * 40)
open(self.settings.server[8:], 'a').write(VAR_427)
elif self.settings.server == 'gae':
VAR_464 = dict()
if VAR_39:
VAR_464['cc'] = VAR_39
if VAR_40:
VAR_464['bcc'] = VAR_40
if VAR_41:
VAR_464['reply_to'] = VAR_41
from google.appengine.api import .mail
VAR_38 = VAR_38 and [mail.Attachment(
VAR_9.my_filename,
VAR_9.my_payload,
VAR_220='<VAR_417-%VAR_278>' % VAR_346
) for VAR_346, VAR_9 in enumerate(VAR_38) if not VAR_43]
if VAR_38:
VAR_224 = mail.send_mail(
VAR_32=sender, VAR_35=VAR_223,
VAR_36=to_unicode(VAR_36, VAR_42),
VAR_82=to_unicode(VAR_213 or '', VAR_42),
VAR_303=html,
VAR_38=attachments, **VAR_464)
elif VAR_303 and (not VAR_43):
VAR_224 = mail.send_mail(
VAR_32=sender, VAR_35=VAR_223,
VAR_36=to_unicode(VAR_36, VAR_42), VAR_82=to_unicode(VAR_213 or '', VAR_42), VAR_303=html, **VAR_464)
else:
VAR_224 = mail.send_mail(
VAR_32=sender, VAR_35=VAR_223,
VAR_36=to_unicode(VAR_36, VAR_42), VAR_82=to_unicode(VAR_213 or '', VAR_42), **VAR_464)
elif self.settings.server == 'aws':
import .boto3
from botocore.exceptions import ClientError
VAR_469 = boto3.client('ses')
try:
VAR_43 = {'Data': VAR_81.as_string()}
VAR_244 = VAR_469.send_raw_email(RawMessage=VAR_43,
Source=VAR_32,
Destinations=VAR_35)
return True
except ClientError as e:
return False
else:
VAR_470 = self.settings.server.split(':')
VAR_351 = dict(timeout=self.settings.timeout)
VAR_471 = smtplib.SMTP_SSL if self.settings.ssl else smtplib.SMTP
VAR_31 = VAR_471(*VAR_470, **VAR_351)
try:
if self.settings.tls and not self.settings.ssl:
VAR_31.ehlo(self.settings.hostname)
VAR_31.starttls()
VAR_31.ehlo(self.settings.hostname)
if self.settings.login:
VAR_31.login(*self.settings.login.split(':', 1))
VAR_224 = VAR_31.sendmail(VAR_32, VAR_35, VAR_81.as_string())
finally:
try:
VAR_31.quit()
except smtplib.SMTPException:
try:
VAR_31.close()
except Exception:
pass
except Exception as e:
VAR_2.warning('Mail.send failure:%s' % e)
self.result = VAR_224
self.error = e
return False
self.result = VAR_224
self.error = None
return True
class CLASS_1(DIV):
VAR_54 = 'https://www.google.com/recaptcha/api.js'
VAR_55 = 'https://www.google.com/recaptcha/api/siteverify'
def __init__(self,
VAR_56=None,
VAR_57='',
VAR_58='',
VAR_59='invalid',
VAR_60='Verify:',
VAR_61=None,
VAR_62='',
):
VAR_56 = VAR_56 or VAR_263.request
self.request_vars = VAR_56 and VAR_56.vars or VAR_263.request.vars
self.remote_addr = VAR_56.env.remote_addr
self.public_key = VAR_57
self.private_key = VAR_58
self.errors = Storage()
self.error_message = VAR_59
self.components = []
self.attributes = {}
self.label = VAR_60
self.options = VAR_61 or {}
self.comment = VAR_62
def FUNC_14(self):
VAR_225 = self.request_vars.pop('g-recaptcha-response', None)
VAR_226 = self.remote_addr
if not VAR_225:
self.errors['captcha'] = self.error_message
return False
VAR_227 = urlencode({
'secret': self.private_key,
'remoteip': VAR_226,
'response': VAR_225,
}).encode('utf-8')
VAR_56 = urllib2.Request(
VAR_12=self.VERIFY_SERVER,
VAR_17=to_bytes(VAR_227),
VAR_18={'Content-type': 'application/VAR_30-www-VAR_7-urlencoded',
'User-agent': 'reCAPTCHA Python'})
VAR_228 = urlopen(VAR_56)
VAR_229 = VAR_228.read()
VAR_228.close()
try:
VAR_347 = FUNC_98.loads(to_native(VAR_229))
except:
self.errors['captcha'] = self.error_message
return False
if VAR_347.get('success', False):
self.request_vars.captcha = ''
return True
else:
self.errors['captcha'] = self.error_message
return False
def VAR_404(self):
VAR_230 = self.API_URI
VAR_231 = self.options.pop('hl', None)
if VAR_231:
VAR_230 = self.API_URI + '?VAR_231=%s' % VAR_231
VAR_57 = self.public_key
self.options['sitekey'] = VAR_57
VAR_232 = DIV(
SCRIPT(_src=VAR_230, _async='', _defer=''),
DIV(_class="g-recaptcha", VAR_17=self.options),
TAG.noscript(XML("""
<div VAR_15="width: 302px; height: 352px;">
<div VAR_15="width: 302px; height: 352px; VAR_4: relative;">
<div VAR_15="width: 302px; height: 352px; VAR_4: absolute;">
<iframe src="https://www.google.com/recaptcha/api/fallback?VAR_346=%(VAR_57)s"
frameborder="0" scrolling="no"
VAR_15="width: 302px; height:352px; border-VAR_15: none;">
</iframe>
</div>
<div VAR_15="width: 250px; height: 80px; VAR_4: absolute; border-VAR_15: none;
bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;">
<textarea VAR_215="g-recaptcha-response" VAR_148="g-recaptcha-response"
class="g-recaptcha-response"
VAR_15="width: 250px; height: 80px; border: 1px solid #c1c1c1;
margin: 0px; padding: 0px; resize: none;" VAR_179="">
</textarea>
</div>
</div>
</div>""" % dict(VAR_57=public_key))
)
)
if not self.errors.captcha:
return XML(VAR_232).xml()
else:
VAR_232.append(DIV(self.errors['captcha'], _class='error'))
return XML(VAR_232).xml()
def FUNC_5(VAR_7, VAR_9, VAR_13, VAR_14, VAR_15, VAR_16, VAR_4=-1):
if VAR_15 == "divs":
VAR_7[0].insert(VAR_4, DIV(DIV(LABEL(VAR_9), _class='w2p_fl'),
DIV(VAR_13, _class='w2p_fw'),
DIV(VAR_14, _class='w2p_fc'),
VAR_16=_id))
elif VAR_15 == "table2cols":
VAR_7[0].insert(VAR_4, TR(TD(LABEL(VAR_9), _class='w2p_fl'),
TD(VAR_14, _class='w2p_fc')))
VAR_7[0].insert(VAR_4 + 1, TR(TD(VAR_13, _class='w2p_fw'),
_colspan=2, VAR_16=_id))
elif VAR_15 == "ul":
VAR_7[0].insert(VAR_4, LI(DIV(LABEL(VAR_9), _class='w2p_fl'),
DIV(VAR_13, _class='w2p_fw'),
DIV(VAR_14, _class='w2p_fc'),
VAR_16=_id))
elif VAR_15 == "bootstrap":
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-label'),
DIV(VAR_13, SPAN(VAR_14, _class='inline-help'),
_class='controls'),
_class='control-group', VAR_16=_id))
elif VAR_15 in ("bootstrap3_inline", "bootstrap4_inline"):
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-VAR_60 col-sm-3'),
DIV(VAR_13, SPAN(VAR_14, _class='help-block'),
_class='col-sm-9'),
_class='form-VAR_405 row', VAR_16=_id))
elif VAR_15 in ("bootstrap3_stacked", "bootstrap4_stacked"):
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-label'),
VAR_13, SPAN(VAR_14, _class='help-block'),
_class='form-VAR_405 row', VAR_16=_id))
else:
VAR_7[0].insert(VAR_4, TR(TD(LABEL(VAR_9), _class='w2p_fl'),
TD(VAR_13, _class='w2p_fw'),
TD(VAR_14, _class='w2p_fc'), VAR_16=_id))
class CLASS_2(object):
def __init__(self,
VAR_63,
VAR_64,
VAR_65='HS256',
VAR_66=True,
VAR_67=30,
VAR_68=60 * 5,
VAR_69=True,
VAR_70=60 * 60,
VAR_71='Bearer',
VAR_72=None,
VAR_73='username',
VAR_74='password',
VAR_75='Login required',
VAR_76=None,
VAR_77=None,
VAR_78=None,
VAR_79=4 * 1024,
):
self.secret_key = VAR_64
self.auth = VAR_63
self.algorithm = VAR_65
if self.algorithm not in ('HS256', 'HS384', 'HS512'):
raise NotImplementedError('Algorithm %VAR_278 not allowed' % VAR_65)
self.verify_expiration = VAR_66
self.leeway = VAR_67
self.expiration = VAR_68
self.allow_refresh = VAR_69
self.refresh_expiration_delta = VAR_70
self.header_prefix = VAR_71
self.jwt_add_header = VAR_72 or {}
VAR_233 = {'alg': self.algorithm, 'typ': 'JWT'}
for VAR_346, v in iteritems(self.jwt_add_header):
VAR_233[VAR_346] = v
self.cached_b64h = self.jwt_b64e(FUNC_98.dumps(VAR_233))
VAR_234 = {
'HS256': hashlib.sha256,
'HS384': hashlib.sha384,
'HS512': hashlib.sha512
}
self.digestmod = VAR_234[VAR_65]
self.user_param = VAR_73
self.pass_param = VAR_74
self.realm = VAR_75
self.salt = VAR_76
self.additional_payload = VAR_77
self.before_authorization = VAR_78
self.max_header_length = VAR_79
self.recvd_token = None
@staticmethod
def FUNC_16(VAR_80):
VAR_80 = to_bytes(VAR_80)
return base64.urlsafe_b64encode(VAR_80).strip(b'=')
@staticmethod
def FUNC_17(VAR_80):
VAR_80 = to_bytes(VAR_80, 'ascii', 'ignore')
return base64.urlsafe_b64decode(VAR_80 + b'=' * (-len(VAR_80) % 4))
def FUNC_18(self, VAR_81):
VAR_84 = to_bytes(self.secret_key)
if self.salt:
if callable(self.salt):
VAR_84 = "%VAR_278$%s" % (VAR_84, self.salt(VAR_81))
else:
VAR_84 = "%VAR_278$%s" % (VAR_84, self.salt)
if isinstance(VAR_84, unicodeT):
VAR_84 = secret.encode('ascii', 'ignore')
VAR_235 = self.cached_b64h
VAR_236 = self.jwt_b64e(serializers.json(VAR_81))
VAR_237 = VAR_235 + b'.' + VAR_236
VAR_238 = hmac.new(VAR_199=VAR_84, msg=VAR_237, digestmod=self.digestmod)
VAR_239 = self.jwt_b64e(VAR_238.digest())
return to_native(VAR_237 + b'.' + VAR_239)
def FUNC_19(self, VAR_82, VAR_83, VAR_84):
VAR_238 = hmac.new(VAR_199=VAR_84, msg=VAR_82, digestmod=self.digestmod)
return compare(self.jwt_b64e(VAR_238.digest()), VAR_83)
def FUNC_20(self, VAR_85):
VAR_85 = to_bytes(VAR_85, 'utf-8', 'strict')
VAR_82, VAR_240 = VAR_85.rsplit(b'.', 1)
VAR_235, VAR_241 = VAR_82.split(b'.', 1)
if VAR_235 != self.cached_b64h:
raise HTTP(400, 'Invalid JWT Header')
VAR_84 = self.secret_key
VAR_89 = serializers.loads_json(to_native(self.jwt_b64d(VAR_241)))
if self.salt:
if callable(self.salt):
VAR_84 = "%VAR_278$%s" % (VAR_84, self.salt(VAR_89))
else:
VAR_84 = "%VAR_278$%s" % (VAR_84, self.salt)
VAR_84 = to_bytes(VAR_84, 'ascii', 'ignore')
if not self.verify_signature(VAR_82, VAR_240, VAR_84):
raise HTTP(400, 'Token VAR_83 is invalid')
if self.verify_expiration:
VAR_193 = time.mktime(datetime.datetime.utcnow().timetuple())
if VAR_89['exp'] + self.leeway < VAR_193:
raise HTTP(400, 'Token is expired')
if callable(self.before_authorization):
self.before_authorization(VAR_89)
return VAR_89
def FUNC_21(self, VAR_86):
VAR_193 = time.mktime(datetime.datetime.utcnow().timetuple())
VAR_242 = VAR_193 + self.expiration
VAR_81 = dict(
VAR_103=VAR_86['hmac_key'],
user_groups=VAR_86['user_groups'],
VAR_141=VAR_86['user'].as_dict(),
iat=VAR_193,
exp=VAR_242
)
return VAR_81
def FUNC_22(self, VAR_87):
VAR_193 = time.mktime(datetime.datetime.utcnow().timetuple())
if self.verify_expiration:
VAR_348 = VAR_87['exp']
if VAR_348 + self.leeway < VAR_193:
raise HTTP(400, 'Token already expired')
VAR_243 = VAR_87.get('orig_iat') or VAR_87['iat']
if VAR_243 + self.refresh_expiration_delta < VAR_193:
raise HTTP(400, 'Token issued too long ago')
VAR_242 = VAR_193 + self.expiration
VAR_87.update(
VAR_243=orig_iat,
iat=VAR_193,
exp=VAR_242,
VAR_103=web2py_uuid()
)
self.alter_payload(VAR_87)
return VAR_87
def FUNC_23(self, VAR_81):
if self.additional_payload:
if callable(self.additional_payload):
VAR_81 = self.additional_payload(VAR_81)
elif isinstance(self.additional_payload, dict):
VAR_81.update(self.additional_payload)
return VAR_81
def FUNC_24(self, VAR_88='_token'):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
VAR_139.forget(VAR_244)
VAR_245 = None
VAR_246 = None
VAR_85 = None
try:
VAR_85 = self.recvd_token or self.get_jwt_token_from_request(VAR_88)
except HTTP:
pass
if VAR_85:
if not self.allow_refresh:
raise HTTP(403, 'Refreshing VAR_85 is not allowed')
VAR_89 = self.load_token(VAR_85)
VAR_349 = self.refresh_token(VAR_89)
VAR_246 = {'token': self.generate_token(VAR_349)}
elif self.user_param in VAR_56.vars and self.pass_param in VAR_56.vars:
VAR_124 = VAR_56.vars[self.user_param]
VAR_132 = VAR_56.vars[self.pass_param]
VAR_245 = self.auth.login_bare(VAR_124, VAR_132)
else:
VAR_245 = self.auth.user
self.auth.login_user(VAR_245)
if VAR_245:
VAR_81 = self.serialize_auth_session(VAR_139.auth)
self.alter_payload(VAR_81)
VAR_246 = {'token': self.generate_token(VAR_81)}
elif VAR_246 is None:
raise HTTP(401,
'Not Authorized - need VAR_35 be logged in, VAR_35 pass VAR_9 VAR_85 '
'for refresh or VAR_124 and VAR_132 for login',
**{'WWW-Authenticate': 'JWT VAR_75="%s"' % self.realm})
VAR_244.headers['Content-Type'] = 'application/json'
return serializers.json(VAR_246)
def FUNC_25(self, VAR_89):
self.auth.user = Storage(VAR_89['user'])
self.auth.user_groups = VAR_89['user_groups']
self.auth.hmac_key = VAR_89['hmac_key']
def FUNC_26(self, VAR_88='_token'):
VAR_85 = None
VAR_247 = VAR_263.request.env.http_authorization
if VAR_247:
VAR_350 = VAR_247.split()
if VAR_350[0].lower() != self.header_prefix.lower():
raise HTTP(400, 'Invalid JWT header')
elif len(VAR_350) == 1:
raise HTTP(400, 'Invalid JWT header, missing token')
elif len(VAR_350) > 2:
raise HTTP(400, 'Invalid JWT header, VAR_85 contains spaces')
VAR_85 = VAR_350[1]
else:
VAR_85 = VAR_263.request.vars.get(VAR_88)
if VAR_85 is None:
raise HTTP(400, 'JWT header not found and JWT parameter {} missing in request'.format(VAR_88))
self.recvd_token = VAR_85
return VAR_85
def FUNC_27(self, VAR_90=None, VAR_91=True, VAR_66=True, VAR_88='_token'):
def FUNC_153(VAR_114):
def VAR_10(*VAR_11, **VAR_351):
try:
VAR_85 = self.get_jwt_token_from_request(VAR_88=token_param)
except HTTP as e:
if VAR_91:
raise e
VAR_85 = None
if VAR_85 and len(VAR_85) < self.max_header_length:
VAR_447 = self.verify_expiration
try:
self.verify_expiration = VAR_66
VAR_89 = self.load_token(VAR_85)
except ValueError:
raise HTTP(400, 'Invalid JWT header, wrong VAR_85 format')
finally:
self.verify_expiration = VAR_447
self.inject_token(VAR_89)
return VAR_114(*VAR_11, **VAR_351)
VAR_10.__doc__ = VAR_114.__doc__
VAR_10.__name__ = VAR_114.__name__
VAR_10.__dict__.update(VAR_114.__dict__)
return VAR_10
return FUNC_153
class CLASS_3(AuthAPI):
VAR_92 = dict(AuthAPI.default_settings,
allow_basic_login=False,
allow_basic_login_only=False,
allow_delete_accounts=False,
alternate_requires_registration=False,
auth_manager_role=None,
auth_two_factor_enabled=False,
auth_two_factor_tries_left=3,
bulk_register_enabled=False,
VAR_232=None,
cas_maps=None,
client_side=True,
VAR_273=None,
hideerror=False,
label_separator=None,
login_after_password_change=True,
login_after_registration=False,
login_captcha=None,
login_specify_error=False,
long_expiration=3600 * 30 * 24, # one month
VAR_102=None,
manager_actions={},
VAR_270=False,
on_failed_authentication=lambda VAR_30: redirect(VAR_30),
pre_registration_div=None,
prevent_open_redirect_attacks=True,
prevent_password_reset_attacks=True,
profile_fields=None,
register_captcha=None,
register_fields=None,
register_verify_password=True,
remember_me_form=True,
reset_password_requires_verification=False,
retrieve_password_captcha=None,
retrieve_username_captcha=None,
showid=False,
FUNC_34=None,
table_cas_name='auth_cas',
table_event=None,
VAR_381=None,
VAR_282=None,
table_permission=None,
table_token_name='auth_token',
VAR_254=None,
two_factor_authentication_group=None,
VAR_129=['email'],
VAR_287=Settings()
)
VAR_93 = dict(AuthAPI.default_messages,
access_denied='Insufficient privileges',
bulk_invite_body='You have been invited VAR_35 join %(site)VAR_278, click %(VAR_276)VAR_278 VAR_35 complete '
'the process',
bulk_invite_subject='Invitation VAR_35 join %(site)s',
delete_label='Check VAR_35 delete',
email_sent='Email sent',
email_verified='Email verified',
function_disabled='Function disabled',
impersonate_log='User %(VAR_215)VAR_278 is impersonating %(other_id)s',
invalid_reset_password='Invalid reset password',
invalid_two_factor_code='Incorrect VAR_309. {0} more attempt(VAR_278) remaining.',
is_empty="Cannot be empty",
label_client_ip='Client IP',
label_description='Description',
label_email='E-mail',
label_first_name='First name',
label_group_id='Group ID',
label_last_name='Last name',
label_name='Name',
label_origin='Origin',
label_password='Password',
label_record_id='Record ID',
label_registration_id='Registration identifier',
label_registration_key='Registration key',
label_remember_me="Remember me (for 30 days)",
label_reset_password_key='Reset Password key',
label_role='Role',
label_table_name='Object or VAR_153 name',
label_time_stamp='Timestamp',
label_two_factor='Authentication code',
label_user_id='User ID',
label_username='Username',
login_button='Log In',
login_disabled='Login disabled by administrator',
new_password='New password',
new_password_sent='A new VAR_132 was emailed VAR_35 you',
old_password='Old password',
password_change_button='Change password',
password_reset_button='Request reset password',
profile_save_button='Apply changes',
register_button='Sign Up',
FUNC_61='Click on the VAR_276 %(VAR_276)VAR_278 VAR_35 reset your password',
reset_password_log='User %(VAR_215)VAR_278 Password reset',
reset_password_subject='Password reset',
FUNC_64='Your VAR_132 is: %(VAR_132)s',
retrieve_password_log='User %(VAR_215)VAR_278 Password retrieved',
retrieve_password_subject='Password retrieve',
retrieve_two_factor_code='Your temporary VAR_33 VAR_309 is {0}',
retrieve_two_factor_code_subject='Two-step Login Authentication Code',
FUNC_54='Your VAR_124 is: %(VAR_124)s',
retrieve_username_log='User %(VAR_215)VAR_278 Username retrieved',
retrieve_username_subject='Username retrieve',
submit_button='Submit',
two_factor_comment='This VAR_309 was emailed VAR_35 you and is VAR_91 for VAR_33.',
unable_send_email='Unable VAR_35 FUNC_13 email',
username_sent='Your VAR_124 was emailed VAR_35 you',
FUNC_53='Welcome %(VAR_124)VAR_278! Click on the VAR_276 %(VAR_276)VAR_278 VAR_35 FUNC_161 your email',
verify_email_log='User %(VAR_215)VAR_278 Verification email sent',
verify_email_subject='Email verification',
verify_password='Verify Password',
verify_password_comment='please input your VAR_132 again'
)
"""
Class for authentication, authorization, VAR_146 based access control.
Includes:
- registration and FUNC_66
- VAR_33 and FUNC_50
- VAR_124 and VAR_132 retrieval
- event logging
- VAR_146 creation and assignment
- VAR_141 defined VAR_405/VAR_146 based VAR_285
Args:
VAR_100: is there for legacy but unused (awful)
VAR_101: has VAR_35 be the database where VAR_35 VAR_337 VAR_119 for authentication
VAR_102: `CLASS_0(...)` or None (no VAR_102) or True (make VAR_9 VAR_102)
VAR_103: can be VAR_9 VAR_103 or VAR_103=CLASS_3.get_or_create_key()
VAR_104: (where is the VAR_141 VAR_114?)
VAR_106: (delegate authentication VAR_35 the URL, CAS2)
Authentication Example::
from gluon.contrib.utils import *
mail=CLASS_0()
mail.settings.server='smtp.gmail.com:587'
mail.settings.sender='you@somewhere.com'
mail.settings.login='username:password'
VAR_63=CLASS_3(VAR_101)
VAR_63.settings.mailer=mail
VAR_63.define_tables()
def authentication():
return dict(VAR_7=VAR_63())
Exposes:
- `http://.../{application}/{VAR_104}/authentication/VAR_33`
- `http://.../{application}/{VAR_104}/authentication/FUNC_50`
- `http://.../{application}/{VAR_104}/authentication/FUNC_52`
- `http://.../{application}/{VAR_104}/authentication/FUNC_53`
- `http://.../{application}/{VAR_104}/authentication/FUNC_54`
- `http://.../{application}/{VAR_104}/authentication/FUNC_64`
- `http://.../{application}/{VAR_104}/authentication/FUNC_61`
- `http://.../{application}/{VAR_104}/authentication/FUNC_66`
- `http://.../{application}/{VAR_104}/authentication/FUNC_65`
On registration VAR_9 VAR_405 with VAR_146=new_user.id is created
and VAR_141 is given VAR_284 of this VAR_405.
You can VAR_337 VAR_9 VAR_405 with::
VAR_147=VAR_63.add_group('Manager', 'can access the manage action')
VAR_63.add_permission(VAR_147, 'access VAR_35 manage')
Here "access VAR_35 manage" is just VAR_9 VAR_141 defined VAR_80.
You can give access VAR_35 VAR_9 VAR_141::
VAR_63.add_membership(VAR_147, VAR_143)
If VAR_141 VAR_215 is omitted, the logged in VAR_141 is assumed
Then you can decorate any VAR_114::
@VAR_63.requires_permission('access VAR_35 manage')
def manage():
return dict()
You can restrict VAR_9 VAR_285 VAR_35 VAR_9 specific VAR_153::
VAR_63.add_permission(VAR_147, 'edit', VAR_101.sometable)
@VAR_63.requires_permission('edit', VAR_101.sometable)
Or VAR_35 VAR_9 specific VAR_168::
VAR_63.add_permission(VAR_147, 'edit', VAR_101.sometable, 45)
@VAR_63.requires_permission('edit', VAR_101.sometable, 45)
If authorization is not granted calls::
VAR_63.settings.on_failed_authorization
Other VAR_61::
VAR_63.settings.mailer=None
VAR_63.settings.expiration=3600 # seconds
...
...
"""
@staticmethod
def FUNC_28(VAR_94=None, VAR_95='sha512'):
VAR_56 = VAR_263.request
if not VAR_94:
VAR_94 = VAR_418.path.join(VAR_56.folder, 'private', 'auth.key')
if VAR_418.path.exists(VAR_94):
VAR_199 = open(VAR_94, 'r').read().strip()
else:
VAR_199 = VAR_95 + ':' + web2py_uuid()
open(VAR_94, 'w').write(VAR_199)
return VAR_199
def VAR_12(self, VAR_10=None, VAR_11=None, VAR_96=None, VAR_97=False):
if VAR_11 is None:
VAR_11 = []
if VAR_96 is None:
VAR_96 = {}
VAR_98 = VAR_97 and self.settings.host
return URL(VAR_14=self.settings.controller,
VAR_10=FUNC_118, VAR_11=args, VAR_96=vars, VAR_97=scheme, VAR_98=host)
def FUNC_30(self):
return URL(VAR_11=VAR_263.request.args, VAR_96=VAR_263.request.get_vars)
def FUNC_31(self, VAR_98, VAR_99=None):
if VAR_98:
if VAR_99:
for VAR_305 in VAR_99:
if fnmatch.fnmatch(VAR_98, VAR_305):
break
else:
raise HTTP(403, "Invalid Hostname")
elif VAR_99:
VAR_98 = VAR_99[0]
else:
VAR_98 = 'localhost'
return VAR_98
def __init__(self, VAR_100=None, VAR_101=None, VAR_102=True,
VAR_103=None, VAR_104='default', VAR_105='user',
VAR_106=None, VAR_83=True, VAR_107=False,
VAR_108=True, VAR_109=None,
VAR_110=None, VAR_111=None, VAR_99=None):
if not VAR_101 and VAR_100 and isinstance(VAR_100, DAL):
VAR_101 = VAR_100
self.db = VAR_101
self.environment = VAR_263
self.csrf_prevention = VAR_108
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
VAR_63 = VAR_139.auth
self.user_groups = VAR_63 and VAR_63.user_groups or {}
if VAR_107:
VAR_56.requires_https()
VAR_193 = VAR_56.now
if VAR_63:
VAR_352 = datetime.timedelta(days=0, seconds=VAR_63.expiration)
if VAR_63.last_visit and VAR_63.last_visit + VAR_352 > VAR_193:
self.user = VAR_63.user
if (VAR_193 - VAR_63.last_visit).seconds > (VAR_63.expiration // 10):
VAR_63.last_visit = VAR_193
else:
self.user = None
if VAR_139.auth:
del VAR_139.auth
VAR_139.renew(clear_session=True)
else:
self.user = None
if VAR_139.auth:
del VAR_139.auth
VAR_110 = url_index or URL(VAR_104, 'index')
VAR_248 = URL(VAR_104, VAR_105, VAR_11='login',
extension=VAR_109)
VAR_222 = self.settings = Settings()
VAR_222.update(CLASS_3.default_settings)
VAR_98 = self.select_host(VAR_56.env.http_host, VAR_99)
VAR_222.update(
cas_domains=[VAR_98],
VAR_125=False,
VAR_106=cas_provider,
cas_actions=dict(VAR_33='login',
VAR_292='validate',
servicevalidate='serviceValidate',
proxyvalidate='proxyValidate',
FUNC_50='logout'),
cas_create_user=True,
VAR_360={},
actions_disabled=[],
VAR_104=controller,
VAR_105=function,
login_url=VAR_248,
logged_url=URL(VAR_104, VAR_105, VAR_11='profile'),
download_url=URL(VAR_104, 'download'),
VAR_102=(mailer is True) and CLASS_0() or VAR_102,
on_failed_authorization=URL(VAR_104, VAR_105, VAR_11='not_authorized'),
login_next=VAR_110,
login_onvalidation=[],
login_onaccept=[],
login_onfail=[],
login_methods=[self],
login_form=self,
VAR_353=VAR_110,
logout_onlogout=None,
register_next=VAR_110,
register_onvalidation=[],
register_onaccept=[],
verify_email_next=VAR_248,
verify_email_onaccept=[],
profile_next=VAR_110,
profile_onvalidation=[],
profile_onaccept=[],
retrieve_username_next=VAR_110,
retrieve_password_next=VAR_110,
request_reset_password_next=VAR_248,
reset_password_next=VAR_110,
change_password_next=VAR_110,
change_password_onvalidation=[],
change_password_onaccept=[],
retrieve_password_onvalidation=[],
request_reset_password_onvalidation=[],
request_reset_password_onaccept=[],
reset_password_onvalidation=[],
reset_password_onaccept=[],
VAR_103=hmac_key,
VAR_273=VAR_263.response.formstyle,
label_separator=VAR_263.response.form_label_separator,
VAR_430=[],
two_factor_onvalidation=[],
VAR_98=host,
)
VAR_222.lock_keys = True
VAR_249 = self.messages = Messages(VAR_263.T)
VAR_249.update(CLASS_3.default_messages)
VAR_249.update(ajax_failed_authentication=
DIV(H4('NOT AUTHORIZED'),
'Please ',
A('login',
_href=self.settings.login_url +
('?_next=' + urllib_quote(VAR_263.request.env.http_web2py_component_location))
if VAR_263.request.env.http_web2py_component_location else ''),
' VAR_35 view this VAR_229.',
_class='not-authorized alert alert-block'))
VAR_249.lock_keys = True
VAR_244 = VAR_263.response
if VAR_63 and VAR_63.remember_me:
VAR_244.session_cookie_expires = VAR_63.expiration
if VAR_83:
self.define_signature()
else:
self.signature = None
self.jwt_handler = VAR_111 and CLASS_2(self, **VAR_111)
def FUNC_32(self):
VAR_112 = VAR_263.request.vars._next
VAR_98 = VAR_263.request.env.http_host
if isinstance(VAR_112, (list, tuple)):
VAR_112 = next[0]
if VAR_112 and self.settings.prevent_open_redirect_attacks:
return self.prevent_open_redirect(VAR_112, VAR_98)
return VAR_112 or None
@staticmethod
def FUNC_33(VAR_112, VAR_98):
if VAR_112:
VAR_350 = VAR_112.split('/')
if ':' not in VAR_350[0] and VAR_350[:2] != ['', '']:
return VAR_112
elif len(VAR_350) > 2 and VAR_350[0].endswith(':') and VAR_350[1:3] == ['', VAR_98]:
return VAR_112
return None
def FUNC_34(self):
return self.db[self.settings.table_cas_name]
def VAR_277(self):
return self.db[self.settings.table_token_name]
def FUNC_36(self, *VAR_9, **VAR_13):
raise HTTP(*VAR_9, **VAR_13)
def __call__(self):
VAR_56 = VAR_263.request
VAR_11 = VAR_56.args
if not VAR_11:
redirect(self.url(VAR_11='login', VAR_96=VAR_56.vars))
elif VAR_11[0] in self.settings.actions_disabled:
raise HTTP(404)
if VAR_11[0] in ('login', 'logout', 'register', 'verify_email',
'retrieve_username', 'retrieve_password',
'reset_password', 'request_reset_password',
'change_password', 'profile', 'groups',
'impersonate', 'not_authorized', 'confirm_registration',
'bulk_register', 'manage_tokens', 'jwt'):
if len(VAR_56.args) >= 2 and VAR_11[0] == 'impersonate':
return getattr(self, VAR_11[0])(VAR_56.args[1])
else:
return getattr(self, VAR_11[0])()
elif VAR_11[0] == 'cas' and not self.settings.cas_provider:
if VAR_11(1) == self.settings.cas_actions['login']:
return self.cas_login(VAR_137=2)
elif VAR_11(1) == self.settings.cas_actions['validate']:
return self.cas_validate(VAR_137=1)
elif VAR_11(1) == self.settings.cas_actions['servicevalidate']:
return self.cas_validate(VAR_137=2, VAR_138=False)
elif VAR_11(1) == self.settings.cas_actions['proxyvalidate']:
return self.cas_validate(VAR_137=2, VAR_138=True)
elif (VAR_11(1) == 'p3'
and VAR_11(2) == self.settings.cas_actions['servicevalidate']):
return self.cas_validate(VAR_137=3, VAR_138=False)
elif (VAR_11(1) == 'p3'
and VAR_11(2) == self.settings.cas_actions['proxyvalidate']):
return self.cas_validate(VAR_137=3, VAR_138=True)
elif VAR_11(1) == self.settings.cas_actions['logout']:
return self.logout(VAR_112=VAR_56.vars.service or VAR_3)
else:
raise HTTP(404)
def FUNC_37(self, VAR_113='Welcome', VAR_114=None,
VAR_115=(' [ ', ' | ', ' ] '), VAR_116=VAR_3,
VAR_117=VAR_3, VAR_118='default'):
VAR_250 = [] # Hold all VAR_342 VAR_250 in VAR_9 list
self.bar = '' # The final
VAR_28 = VAR_263.T
VAR_117 = [] if not VAR_117 else VAR_117
if not VAR_114:
VAR_114 = self.url(self.settings.function)
VAR_56 = VAR_263.request
if URL() == VAR_114:
VAR_112 = ''
else:
VAR_112 = '?_next=' + urllib_quote(URL(VAR_11=VAR_56.args,
VAR_96=VAR_56.get_vars))
VAR_251 = lambda VAR_105: \
'%VAR_278/%VAR_278%s' % (VAR_114, VAR_105, VAR_112 if VAR_117 is VAR_3 or VAR_105 in VAR_117 else '')
if isinstance(VAR_113, str):
VAR_113 = VAR_28(VAR_113)
if VAR_113:
VAR_113 = VAR_113.strip() + ' '
def FUNC_154(*VAR_9, **VAR_13):
b['_rel'] = 'nofollow'
return A(*VAR_9, **VAR_13)
if self.user_id: # User is logged in
VAR_353 = self.settings.logout_next
VAR_250.append({'name': VAR_28('Log Out'),
'href': '%VAR_278/FUNC_50?_next=%s' % (VAR_114, urllib_quote(VAR_353)),
'icon': 'icon-off'})
if 'profile' not in self.settings.actions_disabled:
VAR_250.append({'name': VAR_28('Profile'), 'href': VAR_251('profile'),
'icon': 'icon-user'})
if 'change_password' not in self.settings.actions_disabled:
VAR_250.append({'name': VAR_28('Password'),
'href': VAR_251('change_password'),
'icon': 'icon-lock'})
if VAR_116 is VAR_3:
VAR_116 = '%(first_name)s'
if callable(VAR_116):
VAR_116 = VAR_116(self.user)
elif ((isinstance(VAR_116, str) or
type(VAR_116).__name__ == 'lazyT') and
re.search(r'%\(.+\)s', VAR_116)):
VAR_116 = VAR_116 % self.user
if not VAR_116:
VAR_116 = ''
else: # User is not logged in
VAR_250.append({'name': VAR_28('Log In'), 'href': VAR_251('login'),
'icon': 'icon-off'})
if 'register' not in self.settings.actions_disabled:
VAR_250.append({'name': VAR_28('Sign Up'), 'href': VAR_251('register'),
'icon': 'icon-user'})
if 'request_reset_password' not in self.settings.actions_disabled:
VAR_250.append({'name': VAR_28('Lost VAR_132?'),
'href': VAR_251('request_reset_password'),
'icon': 'icon-lock'})
if self.settings.use_username and 'retrieve_username' not in self.settings.actions_disabled:
VAR_250.append({'name': VAR_28('Forgot VAR_124?'),
'href': VAR_251('retrieve_username'),
'icon': 'icon-edit'})
def VAR_342(): # For inclusion in MENU
self.bar = [(VAR_250[0]['name'], False, VAR_250[0]['href'], [])]
del VAR_250[0]
for VAR_305 in VAR_250:
self.bar[0][3].append((VAR_305['name'], False, VAR_305['href']))
def FUNC_155(): # Default web2py scaffolding
def FUNC_172(VAR_354): return VAR_354 + ' ' + VAR_354.replace('icon', 'glyphicon')
self.bar = UL(LI(FUNC_154(I(_class=FUNC_172('icon ' + VAR_250[0]['icon'])),
' ' + VAR_250[0]['name'],
_href=VAR_250[0]['href'])), _class='dropdown-menu')
del VAR_250[0]
for VAR_305 in VAR_250:
self.bar.insert(-1, LI(FUNC_154(I(_class=FUNC_172('icon ' + VAR_305['icon'])),
' ' + VAR_305['name'],
_href=VAR_305['href'])))
self.bar.insert(-1, LI('', _class='divider'))
if self.user_id:
self.bar = LI(FUNC_154(VAR_113, VAR_116,
_href='#', _class="dropdown-toggle",
VAR_17={'toggle': 'dropdown'}),
self.bar, _class='dropdown')
else:
self.bar = LI(FUNC_154(VAR_28('Log In'),
_href='#', _class="dropdown-toggle",
VAR_17={'toggle': 'dropdown'}), self.bar,
_class='dropdown')
def VAR_355():
VAR_355 = {'prefix': VAR_113, 'user': VAR_116 if self.user_id else None}
for i in VAR_250:
if i['name'] == VAR_28('Log In'):
VAR_346 = 'login'
elif i['name'] == VAR_28('Sign Up'):
VAR_346 = 'register'
elif i['name'] == VAR_28('Lost VAR_132?'):
VAR_346 = 'request_reset_password'
elif i['name'] == VAR_28('Forgot VAR_124?'):
VAR_346 = 'retrieve_username'
elif i['name'] == VAR_28('Log Out'):
VAR_346 = 'logout'
elif i['name'] == VAR_28('Profile'):
VAR_346 = 'profile'
elif i['name'] == VAR_28('Password'):
VAR_346 = 'change_password'
VAR_355[VAR_346] = i['href']
self.bar = VAR_355
VAR_61 = {'asmenu': VAR_342,
'dropdown': FUNC_155,
'bare': VAR_355
} # Define custom modes.
if VAR_118 in VAR_61 and callable(VAR_61[VAR_118]):
VAR_61[VAR_118]()
else:
VAR_356, VAR_357, VAR_358 = VAR_115
if self.user_id:
self.bar = SPAN(VAR_113, VAR_116, VAR_356,
FUNC_154(VAR_250[0]['name'],
_href=VAR_250[0]['href']), VAR_358,
_class='auth_navbar')
else:
self.bar = SPAN(VAR_356, FUNC_154(VAR_250[0]['name'],
_href=VAR_250[0]['href']), VAR_358,
_class='auth_navbar')
for VAR_305 in VAR_250[1:]:
self.bar.insert(-1, VAR_357)
self.bar.insert(-1, FUNC_154(VAR_305['name'], _href=VAR_305['href']))
return self.bar
def FUNC_38(self,
VAR_119,
VAR_120=None,
VAR_121='%(VAR_8)s_archive',
VAR_122='current_record',
VAR_123=None):
VAR_123 = current_record_label or VAR_263.T(
VAR_122.replace('_', ' ').title())
for VAR_153 in VAR_119:
VAR_359 = VAR_153.fields()
if 'id' in VAR_359 and 'modified_on' in VAR_359 and VAR_122 not in VAR_359:
VAR_153._enable_record_versioning(VAR_120=archive_db,
archive_name=VAR_121,
VAR_122=current_record,
VAR_123=current_record_label)
def FUNC_39(self, VAR_124=None, VAR_83=None, VAR_125=False,
VAR_126=None, VAR_127=None):
VAR_101 = self.db
if VAR_126 is None:
VAR_126 = VAR_101._migrate
if VAR_127 is None:
VAR_127 = VAR_101._fake_migrate
VAR_222 = self.settings
VAR_222.enable_tokens = VAR_125
VAR_252 = \
super(CLASS_3, self).define_tables(VAR_124, VAR_83, VAR_126, VAR_127)._table_signature_list
VAR_193 = VAR_263.request.now
VAR_253 = 'reference %s' % VAR_222.table_user_name
if VAR_222.cas_domains:
if VAR_222.table_cas_name not in VAR_101.tables:
VAR_101.define_table(
VAR_222.table_cas_name,
VAR_1('user_id', VAR_253, VAR_5=None,
VAR_60=self.messages.label_user_id),
VAR_1('created_on', 'datetime', VAR_5=VAR_193),
VAR_1('service', VAR_279=IS_URL()),
VAR_1('ticket'),
VAR_1('renew', 'boolean', VAR_5=False),
*VAR_222.extra_fields.get(VAR_222.table_cas_name, []),
**dict(
VAR_126=self._get_migrate(
VAR_222.table_cas_name, VAR_126),
VAR_127=fake_migrate))
if VAR_222.enable_tokens:
VAR_360 = VAR_222.extra_fields.get(
VAR_222.table_token_name, []) + VAR_252
if VAR_222.table_token_name not in VAR_101.tables:
VAR_101.define_table(
VAR_222.table_token_name,
VAR_1('user_id', VAR_253, VAR_5=None,
VAR_60=self.messages.label_user_id),
VAR_1('expires_on', 'datetime', VAR_5=datetime.datetime(2999, 12, 31)),
VAR_1('token', writable=False, VAR_5=web2py_uuid, unique=True),
*VAR_360,
**dict(VAR_126=self._get_migrate(VAR_222.table_token_name, VAR_126),
VAR_127=fake_migrate))
if not VAR_101._lazy_tables:
VAR_222.table_user = VAR_101[VAR_222.table_user_name]
VAR_222.table_group = VAR_101[VAR_222.table_group_name]
VAR_222.table_membership = VAR_101[VAR_222.table_membership_name]
VAR_222.table_permission = VAR_101[VAR_222.table_permission_name]
VAR_222.table_event = VAR_101[VAR_222.table_event_name]
if VAR_222.cas_domains:
VAR_222.table_cas = VAR_101[VAR_222.table_cas_name]
if VAR_222.cas_provider: # THIS IS NOT LAZY
VAR_222.actions_disabled = \
['profile', 'register', 'change_password',
'request_reset_password', 'retrieve_username']
from gluon.contrib.login_methods.cas_auth import CasAuth
VAR_361 = VAR_222.cas_maps
if not VAR_361:
VAR_254 = self.table_user()
VAR_361 = dict((VAR_148, lambda v, VAR_181=VAR_148: v.get(VAR_181, None)) for VAR_148 in
VAR_254.fields if VAR_148 != 'id'
and VAR_254[VAR_148].readable)
VAR_361['registration_id'] = \
lambda v, VAR_446=VAR_222.cas_provider: '%VAR_278/%s' % (VAR_446, v['user'])
VAR_6 = [VAR_222.cas_actions['login'],
VAR_222.cas_actions['servicevalidate'],
VAR_222.cas_actions['logout']]
VAR_222.login_form = CasAuth(
casversion=2,
urlbase=VAR_222.cas_provider,
VAR_6=actions,
VAR_361=maps)
return self
def FUNC_40(self, VAR_128, VAR_129=['email'],
VAR_33=True, VAR_130=True):
VAR_254 = self.table_user()
VAR_255 = self.settings.cas_create_user
VAR_141 = None
VAR_256 = []
VAR_257 = ['registration_id', 'username', 'email']
if self.settings.login_userfield:
VAR_257.append(self.settings.login_userfield)
for VAR_385 in VAR_257:
if VAR_385 in VAR_254.fields() and \
VAR_128.get(VAR_385, None):
VAR_256.append(VAR_385)
VAR_179 = VAR_128[VAR_385]
VAR_141 = VAR_254(**{VAR_385: VAR_179})
if VAR_141:
break
if not VAR_256:
return None
if 'registration_id' not in VAR_128:
FUNC_119['registration_id'] = VAR_128[VAR_256[0]]
if 'registration_id' in VAR_256 \
and VAR_141 \
and VAR_141.registration_id \
and ('registration_id' not in VAR_128 or VAR_141.registration_id != str(VAR_128['registration_id'])):
VAR_141 = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if VAR_141:
if not VAR_130:
return None
VAR_362 = dict(registration_id=VAR_128['registration_id'])
for VAR_199 in VAR_129:
if VAR_199 in VAR_128:
VAR_362[VAR_199] = VAR_128[VAR_199]
VAR_141.update_record(**VAR_362)
elif VAR_256:
if VAR_255 is False:
self.logout(VAR_112=None, VAR_140=None, VAR_136=None)
raise HTTP(403, "Forbidden. User need VAR_35 be created first.")
if 'first_name' not in VAR_128 and 'first_name' in VAR_254.fields:
VAR_448 = VAR_128.get('email', 'anonymous').split('@')[0]
VAR_128['first_name'] = VAR_128.get('username', VAR_448)
VAR_96 = VAR_254._filter_fields(VAR_128)
VAR_143 = VAR_254.insert(**VAR_96)
VAR_141 = VAR_254[VAR_143]
if self.settings.create_user_groups:
VAR_147 = self.add_group(self.settings.create_user_groups % VAR_141)
self.add_membership(VAR_147, VAR_143)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, VAR_143)
if VAR_33:
self.user = VAR_141
if self.settings.register_onaccept:
VAR_26(self.settings.register_onaccept, Storage(VAR_96=VAR_141))
return VAR_141
def VAR_258(self, VAR_131=False):
if not self.settings.allow_basic_login:
return (False, False, False)
VAR_258 = VAR_263.request.env.http_authorization
if VAR_131:
if callable(VAR_131):
VAR_131 = VAR_131()
elif isinstance(VAR_131, string_types):
VAR_449 = to_unicode(VAR_131)
elif VAR_131 is True:
VAR_449 = '' + VAR_263.request.application
VAR_363 = HTTP(401, 'Not Authorized', **{'WWW-Authenticate': 'Basic VAR_75="' + VAR_449 + '"'})
if not VAR_258 or not VAR_258[:6].lower() == 'basic ':
if VAR_131:
raise VAR_363
return (True, False, False)
(VAR_124, VAR_207, VAR_132) = base64.b64decode(VAR_258[6:]).partition(b':')
VAR_259 = VAR_207 and self.login_bare(VAR_124, VAR_132)
if not VAR_259 and VAR_131:
raise VAR_363
return (True, True, VAR_259)
def FUNC_42(self):
VAR_254 = self.table_user()
VAR_260 = self.settings.login_userfield or ('username' \
if self.settings.login_userfield or 'username' \
in VAR_254.fields else 'email')
VAR_261 = self.settings.password_field
return Storage({'table_user': VAR_254,
'userfield': VAR_260,
'passfield': VAR_261})
def FUNC_43(self, VAR_124, VAR_132):
VAR_222 = self._get_login_settings()
VAR_141 = VAR_222.table_user(**{VAR_222.userfield: VAR_124})
if VAR_141 and VAR_141.get(VAR_222.passfield, False):
VAR_132 = VAR_222.table_user[
VAR_222.passfield].validate(VAR_132)[0]
if ((VAR_141.registration_key is None or
not VAR_141.registration_key.strip()) and
VAR_132 == VAR_141[VAR_222.passfield]):
self.login_user(VAR_141)
return VAR_141
else:
for login_method in self.settings.login_methods:
if login_method != self and login_method(VAR_124, VAR_132):
self.user = VAR_141
return VAR_141
return False
def FUNC_44(self, **VAR_133):
VAR_222 = self._get_login_settings()
if VAR_133.get(VAR_222.passfield):
VAR_133[VAR_222.passfield] = \
VAR_222.table_user[VAR_222.passfield].validate(VAR_133[VAR_222.passfield], None)[0]
if not VAR_133.get(VAR_222.userfield):
raise ValueError('register_bare: VAR_260 not provided or invalid')
VAR_141 = self.get_or_create_user(VAR_133, VAR_33=False, VAR_130=False,
VAR_129=self.settings.update_fields)
if not VAR_141:
return False
return VAR_141
def FUNC_45(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=2,
):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
VAR_101, VAR_153 = self.db, self.table_cas()
VAR_139._cas_service = VAR_56.vars.service or VAR_139._cas_service
if VAR_56.env.http_host not in self.settings.cas_domains or \
not VAR_139._cas_service:
raise HTTP(403, 'not authorized')
def FUNC_157(VAR_262=False):
VAR_266 = VAR_153(VAR_364=VAR_139._cas_service, VAR_143=self.user.id)
if VAR_266:
VAR_264 = VAR_266.ticket
else:
VAR_264 = 'ST-' + web2py_uuid()
VAR_153.insert(VAR_364=VAR_139._cas_service,
VAR_143=self.user.id,
VAR_264=ticket,
created_on=VAR_56.now,
VAR_265=VAR_262)
VAR_364 = VAR_139._cas_service
VAR_365 = '&' if '?' in VAR_364 else '?'
del VAR_139._cas_service
if 'warn' in VAR_56.vars and not VAR_262:
VAR_244.headers[
'refresh'] = "5;URL=%s" % VAR_364 + VAR_365 + "ticket=" + VAR_264
return A("Continue VAR_35 %s" % VAR_364,
_href=VAR_364 + VAR_365 + "ticket=" + VAR_264)
else:
redirect(VAR_364 + VAR_365 + "ticket=" + VAR_264)
if self.is_logged_in() and 'renew' not in VAR_56.vars:
return FUNC_157()
elif not self.is_logged_in() and 'gateway' in VAR_56.vars:
redirect(VAR_139._cas_service)
def FUNC_158(VAR_7, VAR_135=onaccept):
if VAR_135 is not VAR_3:
VAR_135(VAR_7)
return FUNC_157(VAR_262=True)
return self.login(VAR_112, VAR_134, FUNC_158, VAR_136)
def FUNC_46(self, VAR_137=2, VAR_138=False):
VAR_56 = VAR_263.request
VAR_101, VAR_153 = self.db, self.table_cas()
VAR_263.response.headers['Content-Type'] = 'text'
VAR_264 = VAR_56.vars.ticket
VAR_265 = 'renew' in VAR_56.vars
VAR_266 = VAR_153(VAR_264=ticket)
VAR_267 = False
if VAR_266:
VAR_260 = self.settings.login_userfield or 'username' \
if 'username' in VAR_153.fields else 'email'
if VAR_264[0:3] == 'ST-' and \
not ((VAR_266.renew and VAR_265) ^ renew):
VAR_141 = self.table_user()(VAR_266.user_id)
VAR_266.delete_record()
VAR_267 = True
def FUNC_159(VAR_82):
VAR_366 = to_native(TAG['cas:serviceResponse'](
VAR_82, **{'_xmlns:cas': 'http://www.yale.edu/tp/cas'}).xml())
return '<?VAR_404 VAR_137="1.0" VAR_42="UTF-8"?>\n' + VAR_366
if VAR_267:
if VAR_137 == 1:
VAR_37 = 'yes\VAR_181%s' % VAR_141[VAR_260]
elif VAR_137 == 3:
VAR_124 = VAR_141.get('username', VAR_141[VAR_260])
VAR_37 = FUNC_159(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](VAR_124),
TAG['cas:attributes'](
*[TAG['cas:' + VAR_177.name](VAR_141[VAR_177.name])
for VAR_177 in self.table_user()
if VAR_177.readable])))
else: # assume VAR_137 2
VAR_124 = VAR_141.get('username', VAR_141[VAR_260])
VAR_37 = FUNC_159(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](VAR_124),
*[TAG['cas:' + VAR_177.name](VAR_141[VAR_177.name])
for VAR_177 in self.table_user()
if VAR_177.readable]))
else:
if VAR_137 == 1:
VAR_37 = 'no\n'
elif VAR_266:
VAR_37 = FUNC_159(TAG['cas:authenticationFailure']())
else:
VAR_37 = FUNC_159(
TAG['cas:authenticationFailure'](
'Ticket %VAR_278 not recognized' % VAR_264,
_code='INVALID TICKET'))
raise HTTP(200, VAR_37)
def FUNC_47(self, VAR_139):
VAR_139.auth_two_factor_user = None
VAR_139.auth_two_factor = None
VAR_139.auth_two_factor_enabled = False
VAR_139.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
def FUNC_48(self, VAR_112, VAR_139):
if self.is_logged_in():
if VAR_112 == VAR_139._auth_next:
del VAR_139._auth_next
redirect(VAR_112, client_side=self.settings.client_side)
def VAR_33(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_222 = self.settings
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
VAR_268 = self.get_vars_next()
if VAR_268:
VAR_139._auth_next = VAR_268
elif VAR_139._auth_next:
VAR_268 = VAR_139._auth_next
if VAR_112 is VAR_3:
VAR_112 = VAR_222.login_next
if callable(VAR_112):
VAR_112 = VAR_112()
VAR_367 = VAR_268
if VAR_367:
VAR_428 = VAR_367.split('://')
if VAR_428[0].lower() in ['http', 'https', 'ftp']:
VAR_450 = VAR_367.split('//', 1)[-1].split('/')[0]
if VAR_450 in VAR_222.cas_domains:
VAR_112 = VAR_367
else:
VAR_112 = VAR_367
self.when_is_logged_in_bypass_next_in_url(VAR_112=next, VAR_139=session)
VAR_254 = self.table_user()
if 'username' in VAR_254.fields or \
not VAR_222.login_email_validate:
VAR_368 = IS_NOT_EMPTY(VAR_59=self.messages.is_empty)
if not VAR_222.username_case_sensitive:
VAR_368 = [IS_LOWER(), VAR_368]
else:
VAR_368 = IS_EMAIL(VAR_59=self.messages.invalid_email)
if not VAR_222.email_case_sensitive:
VAR_368 = [IS_LOWER(), VAR_368]
VAR_261 = VAR_222.password_field
try:
VAR_254[VAR_261].requires[-1].min_length = 0
except:
pass
if VAR_134 is VAR_3:
VAR_134 = VAR_222.login_onvalidation
if VAR_135 is VAR_3:
VAR_135 = VAR_222.login_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['login_log']
VAR_269 = VAR_222.login_onfail
VAR_141 = None # VAR_5
VAR_270 = False
if self.settings.login_userfield:
VAR_124 = self.settings.login_userfield
else:
if 'username' in VAR_254.fields:
VAR_124 = 'username'
else:
VAR_124 = 'email'
if self.settings.multi_login:
VAR_270 = True
VAR_271 = VAR_254[VAR_124].requires
VAR_254[VAR_124].requires = VAR_368
if VAR_139.auth_two_factor_enabled and VAR_139.auth_two_factor_tries_left < 1:
VAR_141 = None
VAR_369 = False
self._reset_two_factor_auth(VAR_139)
redirect(VAR_112, client_side=VAR_222.client_side)
if VAR_139.auth_two_factor_user is None:
if VAR_222.remember_me_form:
VAR_360 = [
VAR_1('remember_me', 'boolean', VAR_5=False,
VAR_60=self.messages.label_remember_me)]
else:
VAR_360 = []
if VAR_222.login_form == self:
VAR_7 = SQLFORM(VAR_254,
VAR_133=[VAR_124, VAR_261],
hidden=dict(_next=VAR_112),
showid=VAR_222.showid,
submit_button=self.messages.login_button,
delete_label=self.messages.delete_label,
VAR_273=VAR_222.formstyle,
separator=VAR_222.label_separator,
VAR_360=extra_fields,
)
VAR_232 = VAR_222.login_captcha or \
(VAR_222.login_captcha is not False and VAR_222.captcha)
if VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232, VAR_232.comment,
VAR_222.formstyle, 'captcha__row')
VAR_369 = False
VAR_429 = self.messages.invalid_user
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='login', dbio=False,
VAR_134=onvalidation,
hideerror=VAR_222.hideerror):
VAR_369 = True
VAR_451 = VAR_7.vars[VAR_124]
if VAR_270 and '@' in VAR_451:
VAR_141 = VAR_254(email=VAR_451)
else:
VAR_141 = VAR_254(**{VAR_124: VAR_451})
if VAR_141:
VAR_429 = self.messages.invalid_password
VAR_465 = VAR_141
if (VAR_465.registration_key or '').startswith('pending'):
VAR_244.flash = self.messages.registration_pending
return VAR_7
elif VAR_465.registration_key in ('disabled', 'blocked'):
VAR_244.flash = self.messages.login_disabled
return VAR_7
elif (VAR_465.registration_key is not None and VAR_465.registration_key.strip()):
VAR_244.flash = \
self.messages.registration_verifying
return VAR_7
VAR_141 = None
for login_method in VAR_222.login_methods:
if login_method != self and \
login_method(VAR_56.vars[VAR_124],
VAR_56.vars[VAR_261]):
if self not in VAR_222.login_methods:
VAR_7.vars[VAR_261] = None
VAR_141 = self.get_or_create_user(
VAR_7.vars, VAR_222.update_fields)
break
if not VAR_141:
if VAR_222.login_methods[0] == self:
if VAR_7.vars.get(VAR_261, '') == VAR_465[VAR_261]:
VAR_141 = VAR_465
else:
if not VAR_222.alternate_requires_registration:
for login_method in VAR_222.login_methods:
if login_method != self and \
login_method(VAR_56.vars[VAR_124],
VAR_56.vars[VAR_261]):
if self not in VAR_222.login_methods:
VAR_7.vars[VAR_261] = None
VAR_141 = self.get_or_create_user(
VAR_7.vars, VAR_222.update_fields)
break
if not VAR_141:
self.log_event(self.messages['login_failed_log'],
VAR_56.post_vars)
VAR_139.flash = VAR_429 if self.settings.login_specify_error else self.messages.invalid_login
VAR_26(VAR_269, None)
redirect(self.url(VAR_11=VAR_56.args, VAR_96=VAR_56.get_vars),client_side=VAR_222.client_side)
else: # use VAR_9 central authentication VAR_31
VAR_370 = VAR_222.login_form
VAR_371 = VAR_370.get_user()
if VAR_371:
cas_user[VAR_261] = None
VAR_141 = self.get_or_create_user(
VAR_254._filter_fields(VAR_371),
VAR_222.update_fields)
elif hasattr(VAR_370, 'login_form'):
return VAR_370.login_form()
else:
VAR_112 = self.url(VAR_222.function, VAR_11='login')
redirect(VAR_370.login_url(VAR_112),
client_side=VAR_222.client_side)
if VAR_141 and self.settings.auth_two_factor_enabled is True:
VAR_139.auth_two_factor_enabled = True
elif VAR_141 and self.settings.two_factor_authentication_group:
VAR_146 = self.settings.two_factor_authentication_group
VAR_139.auth_two_factor_enabled = self.has_membership(VAR_143=VAR_141.id, VAR_146=role)
if VAR_139.auth_two_factor_enabled:
VAR_7 = SQLFORM.factory(
VAR_1('authentication_code',
VAR_60=self.messages.label_two_factor,
VAR_91=True,
VAR_62=self.messages.two_factor_comment),
hidden=dict(_next=VAR_112),
VAR_273=VAR_222.formstyle,
separator=VAR_222.label_separator
)
VAR_369 = False
if VAR_139.auth_two_factor_user is None and VAR_141 is not None:
VAR_139.auth_two_factor_user = VAR_141 # store the validated VAR_141 and associate with this VAR_139
VAR_139.auth_two_factor = random.randint(100000, 999999)
VAR_139.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
VAR_430 = self.settings.two_factor_methods
if not VAR_430:
self.settings.mailer.send(
VAR_35=VAR_141.email,
VAR_36=self.messages.retrieve_two_factor_code_subject,
VAR_37=self.messages.retrieve_two_factor_code.format(VAR_139.auth_two_factor))
else:
for two_factor_method in VAR_430:
try:
VAR_139.auth_two_factor = two_factor_method(VAR_141, VAR_139.auth_two_factor)
except:
pass
else:
break
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='login', dbio=False,
VAR_134=onvalidation,
hideerror=VAR_222.hideerror):
VAR_369 = True
"""
The lists is executed after VAR_7 validation for each of the corresponding VAR_114.
For example, in your model:
In your models copy and paste:
VAR_63.settings.extra_fields['auth_user'] = [
VAR_1('motp_secret', 'password', length=512, VAR_5='', VAR_60='MOTP Secret'),
VAR_1('motp_pin', 'string', length=128, VAR_5='', VAR_60='MOTP PIN')]
OFFSET = 60 # Be sure is the same in your OTP Client
def _set_two_factor(VAR_141, auth_two_factor):
return None
def verify_otp(VAR_141, otp):
import .time
from hashlib import .md5
epoch_time = int(time.time())
time_start = int(str(epoch_time - OFFSET)[:-1])
time_end = int(str(epoch_time + OFFSET)[:-1])
for VAR_445 in range(time_start - 1, time_end + 1):
to_hash = str(VAR_445) + VAR_141.motp_secret + VAR_141.motp_pin
hash = md5(to_hash).hexdigest()[:6]
if otp == hash:
return hash
VAR_63.settings.auth_two_factor_enabled = True
VAR_63.messages.two_factor_comment = "Verify your OTP Client for the VAR_309."
VAR_63.settings.two_factor_methods = [lambda VAR_141,
auth_two_factor: _set_two_factor(VAR_141, auth_two_factor)]
VAR_63.settings.two_factor_onvalidation = [lambda VAR_141, otp: verify_otp(VAR_141, otp)]
"""
if self.settings.two_factor_onvalidation:
for two_factor_onvalidation in self.settings.two_factor_onvalidation:
try:
VAR_139.auth_two_factor = \
two_factor_onvalidation(VAR_139.auth_two_factor_user, VAR_7.vars['authentication_code'])
except:
pass
else:
break
if VAR_7.vars['authentication_code'] == str(VAR_139.auth_two_factor):
if VAR_141 is None or VAR_141 == VAR_139.auth_two_factor_user:
VAR_141 = VAR_139.auth_two_factor_user
elif VAR_141 != VAR_139.auth_two_factor_user:
VAR_141 = None
self._reset_two_factor_auth(VAR_139)
else:
VAR_139.auth_two_factor_tries_left -= 1
if VAR_139.auth_two_factor_enabled and VAR_139.auth_two_factor_tries_left < 1:
VAR_141 = None
VAR_369 = False
self._reset_two_factor_auth(VAR_139)
redirect(VAR_112, client_side=VAR_222.client_side)
VAR_244.flash = self.messages.invalid_two_factor_code.format(VAR_139.auth_two_factor_tries_left)
return VAR_7
else:
return VAR_7
if VAR_141:
VAR_141 = Row(VAR_254._filter_fields(VAR_141, VAR_215=True))
self.login_user(VAR_141)
VAR_139.auth.expiration = \
VAR_56.post_vars.remember_me and \
VAR_222.long_expiration or \
VAR_222.expiration
VAR_139.auth.remember_me = 'remember_me' in VAR_56.post_vars
self.log_event(VAR_136, VAR_141)
VAR_139.flash = self.messages.logged_in
if VAR_222.login_form == self:
if VAR_369:
VAR_26(VAR_135, VAR_7)
if VAR_112 == VAR_139._auth_next:
VAR_139._auth_next = None
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112, client_side=VAR_222.client_side)
VAR_254[VAR_124].requires = VAR_271
return VAR_7
elif VAR_141:
VAR_26(VAR_135, None)
if VAR_112 == VAR_139._auth_next:
del VAR_139._auth_next
redirect(VAR_112, client_side=VAR_222.client_side)
def FUNC_50(self, VAR_112=VAR_3, VAR_140=VAR_3, VAR_136=VAR_3):
self._reset_two_factor_auth(VAR_263.session)
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.logout_next
if VAR_140 is VAR_3:
VAR_140 = self.settings.logout_onlogout
if VAR_140:
onlogout(self.user)
if VAR_136 is VAR_3:
VAR_136 = self.messages['logout_log']
if self.user:
self.log_event(VAR_136, self.user)
if self.settings.login_form != self:
VAR_370 = self.settings.login_form
VAR_371 = VAR_370.get_user()
if VAR_371:
VAR_112 = VAR_370.logout_url(VAR_112)
VAR_263.session.auth = None
self.user = None
if self.settings.renew_session_onlogout:
VAR_263.session.renew(clear_session=not self.settings.keep_session_onlogout)
VAR_263.session.flash = self.messages.logged_out
if VAR_112 is not None:
redirect(VAR_112)
def FUNC_51(self):
self.logout(VAR_112=None, VAR_140=None, VAR_136=None)
def FUNC_52(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
if self.is_logged_in():
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.register_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.register_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.register_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['register_log']
VAR_254 = self.table_user()
if self.settings.login_userfield:
VAR_124 = self.settings.login_userfield
elif 'username' in VAR_254.fields:
VAR_124 = 'username'
else:
VAR_124 = 'email'
VAR_272 = IS_NOT_IN_DB(self.db, VAR_254[VAR_124])
if not VAR_254[VAR_124].requires:
VAR_254[VAR_124].requires = VAR_272
elif isinstance(VAR_254[VAR_124].requires, (list, tuple)):
if not any([isinstance(validator, IS_NOT_IN_DB) for validator in
VAR_254[VAR_124].requires]):
if isinstance(VAR_254[VAR_124].requires, list):
VAR_254[VAR_124].requires.append(VAR_272)
else:
VAR_254[VAR_124].requires += (VAR_272, )
elif not isinstance(VAR_254[VAR_124].requires, IS_NOT_IN_DB):
VAR_254[VAR_124].requires = [VAR_254[VAR_124].requires,
VAR_272]
VAR_261 = self.settings.password_field
VAR_273 = self.settings.formstyle
try: # Make sure we have our original minimum length as other VAR_63 forms change it
VAR_254[VAR_261].requires[-1].min_length = self.settings.password_min_length
except:
pass
if self.settings.register_verify_password:
if self.settings.register_fields is None:
self.settings.register_fields = [VAR_10.name for VAR_10 in VAR_254 if VAR_10.writable and not VAR_10.compute]
VAR_346 = self.settings.register_fields.index(VAR_261)
self.settings.register_fields.insert(VAR_346 + 1, "password_two")
VAR_360 = [
VAR_1("password_two", "password",
VAR_279=IS_EQUAL_TO(VAR_56.post_vars.get(VAR_261, None),
VAR_59=self.messages.mismatched_password),
VAR_60=VAR_263.T("Confirm Password"))]
else:
VAR_360 = []
VAR_7 = SQLFORM(VAR_254,
VAR_133=self.settings.register_fields,
hidden=dict(_next=VAR_112),
showid=self.settings.showid,
submit_button=self.messages.register_button,
delete_label=self.messages.delete_label,
VAR_273=formstyle,
separator=self.settings.label_separator,
VAR_360=extra_fields
)
VAR_232 = self.settings.register_captcha or self.settings.captcha
if VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232,
captcha.comment, self.settings.formstyle, 'captcha__row')
if self.settings.pre_registration_div:
FUNC_5(VAR_7, '',
DIV(VAR_16="pre-reg", *self.settings.pre_registration_div),
'', VAR_273, '')
VAR_199 = web2py_uuid()
if self.settings.registration_requires_approval:
VAR_199 = 'pending-' + VAR_199
VAR_254.registration_key.default = VAR_199
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='register',
VAR_134=onvalidation,
hideerror=self.settings.hideerror):
VAR_372 = self.messages.group_description % VAR_7.vars
if self.settings.create_user_groups:
VAR_147 = self.add_group(self.settings.create_user_groups % VAR_7.vars, VAR_372)
self.add_membership(VAR_147, VAR_7.vars.id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, VAR_7.vars.id)
if self.settings.registration_requires_verification:
VAR_276 = self.url(
self.settings.function, VAR_11=('verify_email', VAR_199), VAR_97=True)
VAR_27 = dict(VAR_7.vars)
VAR_27.update(dict(VAR_199=key, VAR_276=FUNC_173, VAR_124=VAR_7.vars[VAR_124],
firstname=VAR_7.vars['firstname'],
lastname=VAR_7.vars['lastname']))
if not (self.settings.mailer and self.settings.mailer.send(
VAR_35=VAR_7.vars.email,
VAR_36=self.messages.verify_email_subject,
VAR_37=self.messages.verify_email % VAR_27)):
self.db.rollback()
VAR_244.flash = self.messages.unable_send_email
return VAR_7
VAR_139.flash = self.messages.email_sent
if self.settings.registration_requires_approval and \
not self.settings.registration_requires_verification:
VAR_254[VAR_7.vars.id] = dict(registration_key='pending')
VAR_139.flash = self.messages.registration_pending
elif (not self.settings.registration_requires_verification or self.settings.login_after_registration):
if not self.settings.registration_requires_verification:
VAR_254[VAR_7.vars.id] = dict(registration_key='')
VAR_139.flash = self.messages.registration_successful
VAR_141 = VAR_254(**{VAR_124: VAR_7.vars[VAR_124]})
self.login_user(VAR_141)
VAR_139.flash = self.messages.logged_in
self.log_event(VAR_136, VAR_7.vars)
VAR_26(VAR_135, VAR_7)
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_53(self,
VAR_112=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_199 = FUNC_0(-1)
VAR_254 = self.table_user()
VAR_141 = VAR_254(registration_key=VAR_199)
if not VAR_141:
redirect(self.settings.login_url)
if self.settings.registration_requires_approval:
VAR_141.update_record(registration_key='pending')
VAR_263.session.flash = self.messages.registration_pending
else:
VAR_141.update_record(registration_key='')
VAR_263.session.flash = self.messages.email_verified
if VAR_263.session.auth and VAR_263.session.auth.user:
VAR_263.session.auth.user.registration_key = VAR_141.registration_key
if VAR_136 is VAR_3:
VAR_136 = self.messages['verify_email_log']
if VAR_112 is VAR_3:
VAR_112 = self.settings.verify_email_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.verify_email_onaccept
self.log_event(VAR_136, VAR_141)
VAR_26(VAR_135, VAR_141)
redirect(VAR_112)
def FUNC_54(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
if 'username' not in VAR_254.fields:
raise HTTP(404)
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
VAR_232 = self.settings.retrieve_username_captcha or \
(self.settings.retrieve_username_captcha is not False and self.settings.captcha)
if not self.settings.mailer:
VAR_244.flash = self.messages.function_disabled
return ''
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.retrieve_username_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.retrieve_username_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.retrieve_username_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['retrieve_username_log']
VAR_271 = VAR_254.email.requires
VAR_254.email.requires = [IS_IN_DB(self.db, VAR_254.email,
VAR_59=self.messages.invalid_email)]
VAR_7 = SQLFORM(VAR_254,
VAR_133=['email'],
hidden=dict(_next=VAR_112),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232,
captcha.comment, self.settings.formstyle, 'captcha__row')
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='retrieve_username', dbio=False,
VAR_134=onvalidation, hideerror=self.settings.hideerror):
VAR_373 = VAR_254._db(VAR_254.email == VAR_7.vars.email).select()
if not VAR_373:
VAR_263.session.flash = \
self.messages.invalid_email
redirect(self.url(VAR_11=VAR_56.args))
VAR_124 = ', '.join(u.username for u in VAR_373)
self.settings.mailer.send(VAR_35=VAR_7.vars.email,
VAR_36=self.messages.retrieve_username_subject,
VAR_37=self.messages.retrieve_username % dict(VAR_124=username))
VAR_139.flash = self.messages.email_sent
for VAR_141 in VAR_373:
self.log_event(VAR_136, VAR_141)
VAR_26(VAR_135, VAR_7)
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112)
VAR_254.email.requires = VAR_271
return VAR_7
def FUNC_55(self):
import .string
import .random
VAR_132 = ''
VAR_274 = r'!#$*'
for i in range(0, 3):
VAR_132 += random.choice(VAR_80.ascii_lowercase)
VAR_132 += random.choice(VAR_80.ascii_uppercase)
VAR_132 += random.choice(VAR_80.digits)
VAR_132 += random.choice(VAR_274)
return ''.join(random.sample(VAR_132, len(VAR_132)))
def FUNC_56(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
if not self.settings.mailer:
VAR_244.flash = self.messages.function_disabled
return ''
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.retrieve_password_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.retrieve_password_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.retrieve_password_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['retrieve_password_log']
VAR_271 = VAR_254.email.requires
VAR_254.email.requires = [IS_IN_DB(self.db, VAR_254.email,
VAR_59=self.messages.invalid_email)]
VAR_7 = SQLFORM(VAR_254,
VAR_133=['email'],
hidden=dict(_next=VAR_112),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='retrieve_password', dbio=False,
VAR_134=onvalidation, hideerror=self.settings.hideerror):
VAR_141 = VAR_254(email=VAR_7.vars.email)
if not VAR_141:
VAR_263.session.flash = \
self.messages.invalid_email
redirect(self.url(VAR_11=VAR_56.args))
VAR_199 = VAR_141.registration_key
if VAR_199 in ('pending', 'disabled', 'blocked') or (VAR_199 or '').startswith('pending'):
VAR_263.session.flash = \
self.messages.registration_pending
redirect(self.url(VAR_11=VAR_56.args))
VAR_132 = self.random_password()
VAR_261 = self.settings.password_field
VAR_27 = {
VAR_261: str(VAR_254[VAR_261].validate(VAR_132)[0]),
'registration_key': ''
}
VAR_141.update_record(**VAR_27)
if self.settings.mailer and \
self.settings.mailer.send(VAR_35=VAR_7.vars.email,
VAR_36=self.messages.retrieve_password_subject,
VAR_37=self.messages.retrieve_password % dict(VAR_132=password)):
VAR_139.flash = self.messages.email_sent
else:
VAR_139.flash = self.messages.unable_send_email
self.log_event(VAR_136, VAR_141)
VAR_26(VAR_135, VAR_7)
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112)
VAR_254.email.requires = VAR_271
return VAR_7
def FUNC_57(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
VAR_199 = VAR_56.vars.key
if not VAR_199 and len(VAR_56.args) > 1:
VAR_199 = VAR_56.args[-1]
if VAR_199:
VAR_139._reset_password_key = VAR_199
if VAR_112:
VAR_452 = {'_next': VAR_112}
else:
VAR_452 = {}
redirect(self.url(VAR_11='confirm_registration',
VAR_96=VAR_452))
else:
VAR_199 = VAR_139._reset_password_key
else:
VAR_199 = VAR_56.vars.key or FUNC_0(-1)
try:
VAR_374 = int(VAR_199.split('-')[0])
if time.time() - VAR_374 > 60 * 60 * 24:
raise Exception
VAR_141 = VAR_254(VAR_275=VAR_199)
if not VAR_141:
raise Exception
except Exception as e:
VAR_139.flash = self.messages.invalid_reset_password
redirect(VAR_112, client_side=self.settings.client_side)
VAR_261 = self.settings.password_field
VAR_7 = SQLFORM.factory(
VAR_1('first_name',
VAR_60='First Name',
VAR_91=True),
VAR_1('last_name',
VAR_60='Last Name',
VAR_91=True),
VAR_1('new_password', 'password',
VAR_60=self.messages.new_password,
VAR_279=self.table_user()[VAR_261].requires),
VAR_1('new_password2', 'password',
VAR_60=self.messages.verify_password,
VAR_279=[IS_EXPR('value==%s' % repr(VAR_56.vars.new_password),
self.messages.mismatched_password)]),
submit_button='Confirm Registration',
hidden=dict(_next=VAR_112),
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.process().accepted:
VAR_141.update_record(
**{VAR_261: str(VAR_7.vars.new_password),
'first_name': str(VAR_7.vars.first_name),
'last_name': str(VAR_7.vars.last_name),
'registration_key': '',
'reset_password_key': ''})
VAR_139.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(VAR_141)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_58(self, VAR_36, VAR_82, VAR_141):
VAR_275 = str(int(time.time())) + '-' + web2py_uuid()
VAR_276 = self.url(self.settings.function,
VAR_11=('confirm_registration',), VAR_96={'key': VAR_275},
VAR_97=True)
VAR_27 = dict(VAR_141)
VAR_27.update(dict(VAR_199=VAR_275, VAR_276=FUNC_173, site=VAR_263.request.env.http_host))
if self.settings.mailer and self.settings.mailer.send(
VAR_35=VAR_141.email,
VAR_36=subject % VAR_27,
VAR_37=VAR_82 % VAR_27):
VAR_141.update_record(VAR_275=reset_password_key)
return True
return False
def FUNC_59(self, VAR_142=100):
if not self.user:
redirect(self.settings.login_url)
if not self.settings.bulk_register_enabled:
return HTTP(404)
VAR_7 = SQLFORM.factory(
VAR_1('subject', 'string', VAR_5=self.messages.bulk_invite_subject, VAR_279=IS_NOT_EMPTY()),
VAR_1('emails', 'text', VAR_279=IS_NOT_EMPTY()),
VAR_1('message', 'text', VAR_5=self.messages.bulk_invite_body, VAR_279=IS_NOT_EMPTY()),
VAR_273=self.settings.formstyle)
if VAR_7.process().accepted:
VAR_375 = re.compile('[^\VAR_278\'"@<>,;:]+\@[^\VAR_278\'"@<>,;:]+').findall(VAR_7.vars.emails)
VAR_376 = []
VAR_377 = []
VAR_378 = []
for email in VAR_375[:VAR_142]:
if self.table_user()(email=email):
VAR_378.append(email)
else:
VAR_141 = self.register_bare(email=email)
if self.email_registration(VAR_7.vars.subject, VAR_7.vars.message, VAR_141):
VAR_376.append(email)
else:
VAR_377.append(email)
VAR_377 += VAR_375[VAR_142:]
VAR_7 = DIV(H4('Emails sent'), UL(*[A(VAR_30, _href='mailto:' + VAR_30) for VAR_30 in VAR_376]),
H4('Emails failed'), UL(*[A(VAR_30, _href='mailto:' + VAR_30) for VAR_30 in VAR_377]),
H4('Emails existing'), UL(*[A(VAR_30, _href='mailto:' + VAR_30) for VAR_30 in VAR_378]))
return VAR_7
def FUNC_60(self):
if not self.user:
redirect(self.settings.login_url)
VAR_277 = self.table_token()
VAR_277.user_id.writable = False
VAR_277.user_id.default = self.user.id
VAR_277.token.writable = False
if VAR_263.request.args(1) == 'new':
VAR_277.token.readable = False
VAR_7 = SQLFORM.grid(VAR_277, VAR_11=['manage_tokens'])
return VAR_7
def FUNC_61(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
VAR_199 = VAR_56.vars.key
if VAR_199:
VAR_139._reset_password_key = VAR_199
redirect(self.url(VAR_11='reset_password'))
else:
VAR_199 = VAR_139._reset_password_key
else:
VAR_199 = VAR_56.vars.key
try:
VAR_374 = int(VAR_199.split('-')[0])
if time.time() - VAR_374 > 60 * 60 * 24:
raise Exception
VAR_141 = VAR_254(VAR_275=VAR_199)
if not VAR_141:
raise Exception
except Exception:
VAR_139.flash = self.messages.invalid_reset_password
redirect(VAR_112, client_side=self.settings.client_side)
VAR_199 = VAR_141.registration_key
if VAR_199 in ('pending', 'disabled', 'blocked') or (VAR_199 or '').startswith('pending'):
VAR_139.flash = self.messages.registration_pending
redirect(VAR_112, client_side=self.settings.client_side)
if VAR_134 is VAR_3:
VAR_134 = self.settings.reset_password_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.reset_password_onaccept
VAR_261 = self.settings.password_field
VAR_7 = SQLFORM.factory(
VAR_1('new_password', 'password',
VAR_60=self.messages.new_password,
VAR_279=self.table_user()[VAR_261].requires),
VAR_1('new_password2', 'password',
VAR_60=self.messages.verify_password,
VAR_279=[IS_EXPR('value==%s' % repr(VAR_56.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_reset_button,
hidden=dict(_next=VAR_112),
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_56, VAR_139, VAR_134=onvalidation,
hideerror=self.settings.hideerror):
VAR_141.update_record(
**{VAR_261: str(VAR_7.vars.new_password),
'registration_key': '',
'reset_password_key': ''})
VAR_139.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(VAR_141)
VAR_26(VAR_135, VAR_7)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_62(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
VAR_232 = self.settings.retrieve_password_captcha or \
(self.settings.retrieve_password_captcha is not False and self.settings.captcha)
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
VAR_244.flash = self.messages.function_disabled
return ''
if VAR_134 is VAR_3:
VAR_134 = self.settings.request_reset_password_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.request_reset_password_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['reset_password_log']
VAR_260 = self.settings.login_userfield or 'username' \
if self.settings.login_userfield or 'username' \
in VAR_254.fields else 'email'
if VAR_260 == 'email':
VAR_254.email.requires = [
IS_EMAIL(VAR_59=self.messages.invalid_email),
IS_IN_DB(self.db, VAR_254.email,
VAR_59=self.messages.invalid_email)]
if not self.settings.email_case_sensitive:
VAR_254.email.requires.insert(0, IS_LOWER())
elif VAR_260 == 'username':
VAR_254.username.requires = [
IS_IN_DB(self.db, VAR_254.username,
VAR_59=self.messages.invalid_username)]
if not self.settings.username_case_sensitive:
VAR_254.username.requires.insert(0, IS_LOWER())
VAR_7 = SQLFORM(VAR_254,
VAR_133=[VAR_260],
hidden=dict(_next=VAR_112),
showid=self.settings.showid,
submit_button=self.messages.password_reset_button,
delete_label=self.messages.delete_label,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232,
captcha.comment, self.settings.formstyle, 'captcha__row')
if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None,
VAR_171='reset_password', dbio=False,
VAR_134=onvalidation,
hideerror=self.settings.hideerror):
VAR_141 = VAR_254(**{VAR_260: VAR_7.vars.get(VAR_260)})
VAR_199 = VAR_141.registration_key
if not VAR_141:
VAR_139.flash = self.messages['invalid_%s' % VAR_260]
redirect(self.url(VAR_11=VAR_56.args),
client_side=self.settings.client_side)
elif VAR_199 in ('pending', 'disabled', 'blocked') or (VAR_199 or '').startswith('pending'):
VAR_139.flash = self.messages.registration_pending
redirect(self.url(VAR_11=VAR_56.args),
client_side=self.settings.client_side)
if self.email_reset_password(VAR_141):
VAR_139.flash = self.messages.email_sent
else:
VAR_139.flash = self.messages.unable_send_email
self.log_event(VAR_136, VAR_141)
VAR_26(VAR_135, VAR_7)
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_63(self, VAR_141):
VAR_275 = str(int(time.time())) + '-' + web2py_uuid()
VAR_276 = self.url(self.settings.function,
VAR_11=('reset_password',), VAR_96={'key': VAR_275},
VAR_97=True)
VAR_27 = dict(VAR_141)
VAR_27.update(dict(VAR_199=VAR_275, VAR_276=FUNC_173))
if self.settings.mailer and self.settings.mailer.send(
VAR_35=VAR_141.email,
VAR_36=self.messages.reset_password_subject,
VAR_37=self.messages.reset_password % VAR_27):
VAR_141.update_record(VAR_275=reset_password_key)
return True
return False
def FUNC_64(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
if self.settings.reset_password_requires_verification:
return self.request_reset_password(VAR_112, VAR_134, VAR_135, VAR_136)
else:
return self.reset_password_deprecated(VAR_112, VAR_134, VAR_135, VAR_136)
def FUNC_65(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
if self.settings.login_form != self:
VAR_370 = self.settings.login_form
if hasattr(VAR_370, 'change_password_url'):
VAR_112 = VAR_370.change_password_url(VAR_112)
if VAR_112 is not None:
redirect(VAR_112)
VAR_101 = self.db
VAR_254 = self.table_user()
VAR_278 = VAR_101(VAR_254.id == self.user.id)
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.change_password_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.change_password_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.change_password_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['change_password_log']
VAR_261 = self.settings.password_field
VAR_279 = VAR_254[VAR_261].requires
if not isinstance(VAR_279, (list, tuple)):
VAR_279 = [FUNC_73]
VAR_279 = [VAR_445 for VAR_445 in VAR_279 if isinstance(VAR_445, CRYPT)]
if VAR_279:
VAR_279[0] = CRYPT(**VAR_279[0].__dict__) # Copy the existing CRYPT VAR_172
VAR_279[0].min_length = 0 # But do not enforce minimum length for the old VAR_132
VAR_7 = SQLFORM.factory(
VAR_1('old_password', 'password', VAR_279=FUNC_73,
VAR_60=self.messages.old_password),
VAR_1('new_password', 'password',
VAR_60=self.messages.new_password,
VAR_279=VAR_254[VAR_261].requires),
VAR_1('new_password2', 'password',
VAR_60=self.messages.verify_password,
VAR_279=[IS_EXPR('value==%s' % repr(VAR_56.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_change_button,
hidden=dict(_next=VAR_112),
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_56, VAR_139,
VAR_171='change_password',
VAR_134=onvalidation,
hideerror=self.settings.hideerror):
VAR_379 = VAR_278.select(VAR_175=(0, 1), orderby_on_limitby=False).first()
if not VAR_7.vars['old_password'] == VAR_379[VAR_261]:
VAR_7.errors['old_password'] = self.messages.invalid_password
else:
VAR_27 = {VAR_261: str(VAR_7.vars.new_password)}
VAR_278.update(**VAR_27)
VAR_139.flash = self.messages.password_changed
self.log_event(VAR_136, self.user)
VAR_26(VAR_135, VAR_7)
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_66(self,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
):
VAR_254 = self.table_user()
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
VAR_261 = self.settings.password_field
VAR_254[VAR_261].writable = False
VAR_254['email'].writable = False
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
if VAR_112 is VAR_3:
VAR_112 = self.get_vars_next() or self.settings.profile_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.profile_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.profile_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['profile_log']
VAR_7 = SQLFORM(
VAR_254,
self.user.id,
VAR_133=self.settings.profile_fields,
hidden=dict(_next=VAR_112),
showid=self.settings.showid,
submit_button=self.messages.profile_save_button,
delete_label=self.messages.delete_label,
upload=self.settings.download_url,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator,
VAR_170=self.settings.allow_delete_accounts,
)
if VAR_7.accepts(VAR_56, VAR_139,
VAR_171='profile',
VAR_134=onvalidation,
hideerror=self.settings.hideerror):
VAR_360 = self.settings.extra_fields.get(self.settings.table_user_name, [])
if not VAR_7.deleted:
if any(VAR_10.compute for VAR_10 in VAR_360):
VAR_141 = VAR_254[self.user.id]
self._update_session_user(VAR_141)
self.update_groups()
else:
self.user.update(VAR_254._filter_fields(VAR_7.vars))
VAR_139.flash = self.messages.profile_updated
self.log_event(VAR_136, self.user)
VAR_26(VAR_135, VAR_7)
if VAR_7.deleted:
return self.logout()
if not VAR_112:
VAR_112 = self.url(VAR_11=VAR_56.args)
else:
VAR_112 = FUNC_4(VAR_112, VAR_7)
redirect(VAR_112, client_side=self.settings.client_side)
return VAR_7
def FUNC_67(self):
VAR_135 = self.settings.login_onaccept
if VAR_135:
VAR_7 = Storage(dict(VAR_96=self.user))
if not isinstance(VAR_135, (list, tuple)):
VAR_135 = [onaccept]
for VAR_26 in VAR_135:
VAR_26(VAR_7)
def VAR_111(self):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
VAR_380 = self.jwt_handler.jwt_token_manager()
raise HTTP(200, VAR_380, cookies=None, **VAR_263.response.headers)
def FUNC_69(self):
return self.is_logged_in() and 'impersonator' in VAR_263.session.auth
def FUNC_70(self, VAR_143=VAR_3):
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
VAR_63 = VAR_139.auth
VAR_254 = self.table_user()
if not self.is_logged_in():
raise HTTP(401, "Not Authorized")
VAR_280 = VAR_63.user.id
VAR_281 = VAR_143
VAR_141 = None
if VAR_143 is VAR_3:
VAR_143 = VAR_263.request.post_vars.user_id
if VAR_143 and VAR_143 != self.user.id and VAR_143 != '0':
if not self.has_permission('impersonate',
self.table_user(),
VAR_143):
raise HTTP(403, "Forbidden")
VAR_141 = VAR_254(VAR_143)
if not VAR_141:
raise HTTP(401, "Not Authorized")
VAR_63.impersonator = pickle.dumps(VAR_139, pickle.HIGHEST_PROTOCOL)
VAR_63.user.update(
VAR_254._filter_fields(VAR_141, True))
self.user = VAR_63.user
self.update_groups()
VAR_136 = self.messages['impersonate_log']
self.log_event(VAR_136, dict(VAR_215=VAR_280, other_id=VAR_63.user.id))
self.run_login_onaccept()
elif VAR_143 in (0, '0'):
if self.is_impersonating():
VAR_139.clear()
VAR_139.update(pickle.loads(VAR_63.impersonator))
self.user = VAR_139.auth.user
self.update_groups()
self.run_login_onaccept()
return None
if VAR_281 is VAR_3 and not VAR_56.post_vars:
return SQLFORM.factory(VAR_1('user_id', 'integer'))
elif not VAR_141:
return None
else:
return SQLFORM(VAR_254, VAR_141.id, readonly=True)
def VAR_167(self):
if not self.is_logged_in():
redirect(self.settings.login_url)
VAR_282 = self.table_membership()
VAR_283 = self.db(
VAR_282.user_id == self.user.id).select()
VAR_153 = TABLE()
for VAR_284 in VAR_283:
VAR_381 = self.table_group()
VAR_167 = self.db(VAR_381.id == VAR_284.group_id).select()
if VAR_167:
VAR_405 = VAR_167[0]
VAR_153.append(TR(H3(VAR_405.role, '(%VAR_278)' % VAR_405.id)))
VAR_153.append(TR(P(VAR_405.description)))
if not VAR_283:
return None
return VAR_153
def FUNC_72(self):
if VAR_263.request.ajax:
raise HTTP(403, 'ACCESS DENIED')
return self.messages.access_denied
def FUNC_27(self, VAR_90=None):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
return self.jwt_handler.allows_jwt(VAR_90=otherwise)
def VAR_279(self, VAR_144, VAR_145=True, VAR_90=None):
def FUNC_153(VAR_114):
def VAR_10(*VAR_9, **VAR_13):
VAR_431, VAR_432, VAR_141 = self.basic()
VAR_141 = VAR_141 or self.user
VAR_433 = VAR_145
if callable(VAR_433):
login_required = VAR_433()
if VAR_433:
if not VAR_141:
if VAR_263.request.ajax:
raise HTTP(401, self.messages.ajax_failed_authentication)
elif VAR_90 is not None:
if callable(VAR_90):
return VAR_90()
redirect(VAR_90)
elif self.settings.allow_basic_login_only or \
VAR_432 or VAR_263.request.is_restful:
raise HTTP(403, "Not authorized")
else:
VAR_112 = self.here()
VAR_263.session.flash = VAR_263.response.flash
return FUNC_3(self.settings.on_failed_authentication,
self.settings.login_url + '?_next=' + urllib_quote(VAR_112))
if callable(VAR_144):
VAR_453 = VAR_144()
else:
VAR_453 = VAR_144
if not VAR_453:
VAR_263.session.flash = self.messages.access_denied
return FUNC_3(
self.settings.on_failed_authorization)
return VAR_114(*VAR_9, **VAR_13)
VAR_10.__doc__ = VAR_114.__doc__
VAR_10.__name__ = VAR_114.__name__
VAR_10.__dict__.update(VAR_114.__dict__)
return VAR_10
return FUNC_153
def VAR_145(self, VAR_90=None):
return self.requires(True, VAR_90=otherwise)
def FUNC_75(self, VAR_90=None):
if self.settings.enable_tokens is True:
VAR_141 = None
VAR_56 = VAR_263.request
VAR_85 = VAR_56.env.http_web2py_user_token or VAR_56.vars._token
VAR_277 = self.table_token()
VAR_254 = self.table_user()
from gluon.settings import global_settings
if global_settings.web2py_runtime_gae:
VAR_266 = VAR_277(VAR_85=token)
if VAR_266:
VAR_141 = VAR_254(VAR_266.user_id)
else:
VAR_266 = self.db(VAR_277.token == VAR_85)(VAR_254.id == VAR_277.user_id).select().first()
if VAR_266:
VAR_141 = VAR_266[VAR_254._tablename]
if VAR_141:
self.login_user(VAR_141)
return self.requires(True, VAR_90=otherwise)
def FUNC_76(self, VAR_146=None, VAR_147=None, VAR_90=None):
def FUNC_160(self=self, VAR_147=group_id, VAR_146=role):
return self.has_membership(VAR_147=group_id, VAR_146=role)
return self.requires(FUNC_160, VAR_90=otherwise)
def FUNC_77(self, VAR_148, VAR_149='', VAR_150=0,
VAR_90=None):
def FUNC_84(self=self, VAR_148=name, VAR_149=table_name, VAR_150=record_id):
return self.has_permission(VAR_148, VAR_149, VAR_150)
return self.requires(FUNC_84, VAR_90=otherwise)
def FUNC_78(self, VAR_90=None, VAR_151=True, VAR_152=True):
def FUNC_161():
return URL.verify(VAR_263.request, user_signature=True, VAR_151=hash_vars, VAR_152=True)
return self.requires(FUNC_161, VAR_90)
def FUNC_79(self, VAR_148, VAR_153, VAR_143=None):
if not VAR_143:
VAR_143 = self.user_id
VAR_101 = self.db
if isinstance(VAR_153, str) and VAR_153 in self.db.tables():
VAR_153 = self.db[VAR_153]
elif isinstance(VAR_153, (Set, Query)):
if isinstance(VAR_153, Set):
VAR_454 = VAR_153.query
else:
VAR_454 = VAR_153
VAR_434 = VAR_101._adapter.tables(VAR_454)
for VAR_8 in VAR_434:
VAR_454 &= self.accessible_query(VAR_148, VAR_8, VAR_143=user_id)
return VAR_454
if not isinstance(VAR_153, str) and \
self.has_permission(VAR_148, VAR_153, 0, VAR_143):
return VAR_153.id > 0
VAR_284 = self.table_membership()
VAR_285 = self.table_permission()
VAR_173 = VAR_153.id.belongs(
VAR_101(VAR_284.user_id == VAR_143)
(VAR_284.group_id == VAR_285.group_id)
(VAR_285.name == VAR_148)
(VAR_285.table_name == VAR_153)
._select(VAR_285.record_id))
if self.settings.everybody_group_id:
VAR_173 |= VAR_153.id.belongs(
VAR_101(VAR_285.group_id == self.settings.everybody_group_id)
(VAR_285.name == VAR_148)
(VAR_285.table_name == VAR_153)
._select(VAR_285.record_id))
return VAR_173
@staticmethod
def FUNC_80(VAR_7,
VAR_154=None,
VAR_122='current_record',
VAR_155=False,
VAR_133=None):
if not VAR_155 and not VAR_7.record:
return None
VAR_153 = VAR_7.table
if not VAR_154:
VAR_382 = '%s_archive' % VAR_153
if VAR_382 not in VAR_153._db:
VAR_153._db.define_table(
VAR_382,
VAR_1(VAR_122, VAR_153),
*[VAR_177.clone(unique=False) for VAR_177 in VAR_153])
VAR_154 = VAR_153._db[VAR_382]
VAR_286 = {VAR_122: VAR_7.vars.id}
for VAR_385 in VAR_154.fields:
if VAR_385 not in ['id', VAR_122]:
if VAR_155 and VAR_385 in VAR_7.vars:
VAR_286[VAR_385] = VAR_7.vars[VAR_385]
elif VAR_7.record and VAR_385 in VAR_7.record:
VAR_286[VAR_385] = VAR_7.record[VAR_385]
if VAR_133:
VAR_286.update(VAR_133)
VAR_215 = VAR_154.insert(**VAR_286)
return VAR_215
def VAR_287(self,
VAR_156=None,
VAR_157=None,
VAR_158='markmin',
VAR_159=False,
VAR_160='',
VAR_161=False,
VAR_162=True,
VAR_163=None,
VAR_164=None,
VAR_165=None,
VAR_126=True,
VAR_104=None,
VAR_105=None,
VAR_166=False,
VAR_167=None):
if VAR_104 and VAR_105:
VAR_162 = False
if not hasattr(self, '_wiki'):
self._wiki = CLASS_8(self, VAR_158=render,
VAR_159=manage_permissions,
VAR_160=force_prefix,
VAR_161=restrict_search,
VAR_157=env, VAR_163=extra or {},
VAR_164=menu_groups,
VAR_165=templates,
VAR_126=migrate,
VAR_104=controller,
VAR_105=function,
VAR_167=FUNC_71)
else:
self._wiki.settings.extra = VAR_163 or {}
self._wiki.env.update(VAR_157 or {})
VAR_287 = None
if VAR_162:
if VAR_156:
VAR_287 = self._wiki.read(VAR_156, VAR_166)
if isinstance(VAR_287, dict) and 'content' in VAR_287:
VAR_287 = VAR_287['content']
else:
VAR_287 = self._wiki()
if isinstance(VAR_287, basestring):
VAR_287 = XML(VAR_287)
return VAR_287
def FUNC_82(self):
if (hasattr(self, "_wiki") and
self._wiki.settings.controller and
self._wiki.settings.function):
self._wiki.automenu()
class CLASS_4(object): # pragma: no cover
VAR_93 = dict(
submit_button='Submit',
delete_label='Check VAR_35 delete',
record_created='Record Created',
record_updated='Record Updated',
record_deleted='Record Deleted',
update_log='Record %(VAR_215)VAR_278 updated',
create_log='Record %(VAR_215)VAR_278 created',
read_log='Record %(VAR_215)VAR_278 read',
delete_log='Record %(VAR_215)VAR_278 deleted',
)
def VAR_12(self, VAR_10=None, VAR_11=None, VAR_96=None):
if VAR_11 is None:
VAR_11 = []
if VAR_96 is None:
VAR_96 = {}
return URL(VAR_14=self.settings.controller, VAR_10=FUNC_118, VAR_11=args, VAR_96=vars)
def __init__(self, VAR_100, VAR_101=None, VAR_104='default'):
self.db = VAR_101
if not VAR_101 and VAR_100 and isinstance(VAR_100, DAL):
self.db = VAR_100
elif not VAR_101:
raise SyntaxError("must pass VAR_101 as first or second argument")
self.environment = VAR_263
VAR_222 = self.settings = Settings()
VAR_222.auth = None
VAR_222.logger = None
VAR_222.create_next = None
VAR_222.update_next = None
VAR_222.controller = VAR_104
VAR_222.delete_next = self.url()
VAR_222.download_url = self.url('download')
VAR_222.create_onvalidation = StorageList()
VAR_222.update_onvalidation = StorageList()
VAR_222.delete_onvalidation = StorageList()
VAR_222.create_onaccept = StorageList()
VAR_222.update_onaccept = StorageList()
VAR_222.update_ondelete = StorageList()
VAR_222.delete_onaccept = StorageList()
VAR_222.update_deletable = True
VAR_222.showid = False
VAR_222.keepvalues = False
VAR_222.create_captcha = None
VAR_222.update_captcha = None
VAR_222.captcha = None
VAR_222.formstyle = 'table3cols'
VAR_222.label_separator = ': '
VAR_222.hideerror = False
VAR_222.detect_record_change = True
VAR_222.hmac_key = None
VAR_222.lock_keys = True
VAR_249 = self.messages = Messages(VAR_263.T)
VAR_249.update(CLASS_4.default_messages)
VAR_249.lock_keys = True
def __call__(self):
VAR_11 = VAR_263.request.args
if len(VAR_11) < 1:
raise HTTP(404)
elif VAR_11[0] == 'tables':
return self.tables()
elif len(VAR_11) > 1 and not VAR_11(1) in self.db.tables:
raise HTTP(404)
VAR_153 = self.db[VAR_11(1)]
if VAR_11[0] == 'create':
return self.create(VAR_153)
elif VAR_11[0] == 'select':
return self.select(VAR_153, linkto=self.url(VAR_11='read'))
elif VAR_11[0] == 'search':
VAR_7, VAR_289 = self.search(VAR_153, linkto=self.url(VAR_11='read'))
return DIV(VAR_7, SQLTABLE(VAR_289))
elif VAR_11[0] == 'read':
return self.read(VAR_153, VAR_11(2))
elif VAR_11[0] == 'update':
return self.update(VAR_153, VAR_11(2))
elif VAR_11[0] == 'delete':
return self.delete(VAR_153, VAR_11(2))
else:
raise HTTP(404)
def FUNC_83(self, VAR_37, VAR_96):
if self.settings.logger:
self.settings.logger.log_event(VAR_37, VAR_96, origin='crud')
def FUNC_84(self, VAR_148, VAR_153, VAR_168=0):
if not self.settings.auth:
return True
try:
VAR_150 = VAR_168.id
except:
VAR_150 = VAR_168
return self.settings.auth.has_permission(VAR_148, str(VAR_153), VAR_150)
def VAR_119(self):
return TABLE(*[TR(A(VAR_148,
_href=self.url(VAR_11=('select', VAR_148))))
for VAR_148 in self.db.tables])
@staticmethod
def FUNC_80(VAR_7, VAR_154=None, VAR_122='current_record'):
return CLASS_3.archive(VAR_7, VAR_154=archive_table,
VAR_122=current_record)
def FUNC_86(self,
VAR_153,
VAR_168,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_169=VAR_3,
VAR_136=VAR_3,
VAR_37=VAR_3,
VAR_170=VAR_3,
VAR_171=VAR_3,
**VAR_172
):
if not (isinstance(VAR_153, VAR_0) or VAR_153 in self.db.tables) \
or (isinstance(VAR_168, str) and not str(VAR_168).isdigit()):
raise HTTP(404)
if not isinstance(VAR_153, VAR_0):
VAR_153 = self.db[VAR_153]
try:
VAR_150 = VAR_168.id
except:
VAR_150 = VAR_168 or 0
if VAR_150 and not self.has_permission('update', VAR_153, VAR_150):
redirect(self.settings.auth.settings.on_failed_authorization)
if not VAR_150 and not self.has_permission('create', VAR_153, VAR_150):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_139 = VAR_263.session
if VAR_56.extension == 'json' and VAR_56.vars.json:
VAR_56.vars.update(FUNC_98.loads(VAR_56.vars.json))
if VAR_112 is VAR_3:
VAR_112 = VAR_56.get_vars._next \
or VAR_56.post_vars._next \
or self.settings.update_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.update_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.update_onaccept
if VAR_169 is VAR_3:
VAR_169 = self.settings.update_ondelete
if VAR_136 is VAR_3:
VAR_136 = self.messages['update_log']
if VAR_170 is VAR_3:
VAR_170 = self.settings.update_deletable
if VAR_37 is VAR_3:
VAR_37 = self.messages.record_updated
if 'hidden' not in VAR_172:
attributes['hidden'] = {}
VAR_172['hidden']['_next'] = VAR_112
VAR_7 = SQLFORM(
VAR_153,
VAR_168,
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_170=deletable,
upload=self.settings.download_url,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator,
**VAR_172 # contains hidden
)
self.accepted = False
self.deleted = False
VAR_232 = self.settings.update_captcha or self.settings.captcha
if VAR_168 and VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232, VAR_232.comment, self.settings.formstyle, 'captcha__row')
VAR_232 = self.settings.create_captcha or self.settings.captcha
if not VAR_168 and VAR_232:
FUNC_5(VAR_7, VAR_232.label, VAR_232, VAR_232.comment, self.settings.formstyle, 'captcha__row')
if VAR_56.extension not in ('html', 'load'):
(VAR_383, VAR_384) = (None, None)
else:
(VAR_383, VAR_384) = (
VAR_139, '%VAR_278/%s' % (VAR_153._tablename, VAR_7.record_id))
if VAR_171 is not VAR_3:
VAR_384 = VAR_171
VAR_288 = self.settings.keepvalues
if VAR_56.vars.delete_this_record:
VAR_288 = False
if isinstance(VAR_134, StorageList):
VAR_134 = onvalidation.get(VAR_153._tablename, [])
if VAR_7.accepts(VAR_56, VAR_383, VAR_171=VAR_384,
VAR_134=onvalidation, VAR_288=keepvalues,
hideerror=self.settings.hideerror,
detect_record_change=self.settings.detect_record_change):
self.accepted = True
VAR_244.flash = VAR_37
if VAR_136:
self.log_event(VAR_136, VAR_7.vars)
if VAR_56.vars.delete_this_record:
self.deleted = True
VAR_37 = self.messages.record_deleted
VAR_26(VAR_169, VAR_7, VAR_153._tablename)
VAR_244.flash = VAR_37
VAR_26(VAR_135, VAR_7, VAR_153._tablename)
if VAR_56.extension not in ('html', 'load'):
raise HTTP(200, 'RECORD CREATED/UPDATED')
if isinstance(VAR_112, (list, tuple)): # fix issue with 2.6
VAR_112 = next[0]
if VAR_112: # Only redirect when explicit
VAR_112 = FUNC_4(VAR_112, VAR_7)
VAR_139.flash = VAR_244.flash
redirect(VAR_112)
elif VAR_56.extension not in ('html', 'load'):
raise HTTP(401, serializers.json(dict(errors=VAR_7.errors)))
return VAR_7
def VAR_337(self,
VAR_153,
VAR_112=VAR_3,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_37=VAR_3,
VAR_171=VAR_3,
**VAR_172
):
if VAR_112 is VAR_3:
VAR_112 = self.settings.create_next
if VAR_134 is VAR_3:
VAR_134 = self.settings.create_onvalidation
if VAR_135 is VAR_3:
VAR_135 = self.settings.create_onaccept
if VAR_136 is VAR_3:
VAR_136 = self.messages['create_log']
if VAR_37 is VAR_3:
VAR_37 = self.messages.record_created
return self.update(VAR_153,
None,
VAR_112=next,
VAR_134=onvalidation,
VAR_135=onaccept,
VAR_136=log,
VAR_37=message,
VAR_170=False,
VAR_171=formname,
**VAR_172
)
def FUNC_88(self, VAR_153, VAR_168):
if not (isinstance(VAR_153, VAR_0) or VAR_153 in self.db.tables) \
or (isinstance(VAR_168, str) and not str(VAR_168).isdigit()):
raise HTTP(404)
if not isinstance(VAR_153, VAR_0):
VAR_153 = self.db[VAR_153]
if not self.has_permission('read', VAR_153, VAR_168):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_7 = SQLFORM(
VAR_153,
VAR_168,
readonly=True,
comments=False,
upload=self.settings.download_url,
showid=self.settings.showid,
VAR_273=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_263.request.extension not in ('html', 'load'):
return VAR_153._filter_fields(VAR_7.record, VAR_215=True)
return VAR_7
def FUNC_89(self,
VAR_153,
VAR_150,
VAR_112=VAR_3,
VAR_37=VAR_3,
):
if not (isinstance(VAR_153, VAR_0) or VAR_153 in self.db.tables):
raise HTTP(404)
if not isinstance(VAR_153, VAR_0):
VAR_153 = self.db[VAR_153]
if not self.has_permission('delete', VAR_153, VAR_150):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_56 = VAR_263.request
VAR_139 = VAR_263.session
if VAR_112 is VAR_3:
VAR_112 = VAR_56.get_vars._next \
or VAR_56.post_vars._next \
or self.settings.delete_next
if VAR_37 is VAR_3:
VAR_37 = self.messages.record_deleted
VAR_168 = VAR_153[VAR_150]
if VAR_168:
VAR_26(self.settings.delete_onvalidation, VAR_168)
del VAR_153[VAR_150]
VAR_26(self.settings.delete_onaccept, VAR_168, VAR_153._tablename)
VAR_139.flash = VAR_37
redirect(VAR_112)
def VAR_289(self,
VAR_153,
VAR_173=None,
VAR_133=None,
VAR_174=None,
VAR_175=None,
):
if not (isinstance(VAR_153, VAR_0) or VAR_153 in self.db.tables):
raise HTTP(404)
if not self.has_permission('select', VAR_153):
redirect(self.settings.auth.settings.on_failed_authorization)
if not isinstance(VAR_153, VAR_0):
VAR_153 = self.db[VAR_153]
if not VAR_173:
VAR_173 = VAR_153.id > 0
if not VAR_133:
VAR_133 = [VAR_177 for VAR_177 in VAR_153 if VAR_177.readable]
else:
VAR_133 = [VAR_153[VAR_10] if isinstance(VAR_10, str) else VAR_10 for VAR_10 in VAR_133]
VAR_289 = self.db(VAR_173).select(*VAR_133, **dict(VAR_174=orderby,
VAR_175=limitby))
return VAR_289
def FUNC_91(self,
VAR_153,
VAR_173=None,
VAR_133=None,
VAR_174=None,
VAR_175=None,
VAR_18=None,
**VAR_176
):
VAR_18 = VAR_18 or {}
VAR_289 = self.rows(VAR_153, VAR_173, VAR_133, VAR_174, VAR_175)
if not VAR_289:
return None # Nicer than an empty VAR_153.
if 'upload' not in VAR_176:
attr['upload'] = self.url('download')
if VAR_263.request.extension not in ('html', 'load'):
return VAR_289.as_list()
if not VAR_18:
if isinstance(VAR_153, str):
VAR_153 = self.db[VAR_153]
VAR_18 = dict((str(VAR_346), k.label) for VAR_346 in VAR_153)
return SQLTABLE(VAR_289, VAR_18=headers, **VAR_176)
def FUNC_92(self, VAR_177):
VAR_290 = VAR_177._db[VAR_177.type[10:]]
VAR_291 = VAR_290.get('_format', None)
if VAR_291 and isinstance(VAR_291, str):
return VAR_291[2:-2]
return VAR_177.name
def FUNC_93(self, VAR_177, VAR_178, VAR_179, VAR_180=False):
try:
if VAR_180:
VAR_291 = self.get_format(VAR_177)
if VAR_178 == 'equals':
if not VAR_180:
return VAR_177 == VAR_179
else:
return lambda VAR_266: VAR_266[VAR_177.name][VAR_291] == VAR_179
elif VAR_178 == 'not equal':
if not VAR_180:
return VAR_177 != VAR_179
else:
return lambda VAR_266: VAR_266[VAR_177.name][VAR_291] != VAR_179
elif VAR_178 == 'greater than':
if not VAR_180:
return VAR_177 > VAR_179
else:
return lambda VAR_266: VAR_266[VAR_177.name][VAR_291] > VAR_179
elif VAR_178 == 'less than':
if not VAR_180:
return VAR_177 < VAR_179
else:
return lambda VAR_266: VAR_266[VAR_177.name][VAR_291] < VAR_179
elif VAR_178 == 'starts with':
if not VAR_180:
return VAR_177.like(VAR_179 + '%')
else:
return lambda VAR_266: str(VAR_266[VAR_177.name][VAR_291]).startswith(VAR_179)
elif VAR_178 == 'ends with':
if not VAR_180:
return VAR_177.like('%' + VAR_179)
else:
return lambda VAR_266: str(VAR_266[VAR_177.name][VAR_291]).endswith(VAR_179)
elif VAR_178 == 'contains':
if not VAR_180:
return VAR_177.like('%' + VAR_179 + '%')
else:
return lambda VAR_266: VAR_179 in VAR_266[VAR_177.name][VAR_291]
except:
return None
def FUNC_94(self, *VAR_119, **VAR_11):
VAR_153 = VAR_119[0]
VAR_133 = VAR_11.get('fields', VAR_153.fields)
VAR_292 = VAR_11.get('validate', True)
VAR_56 = VAR_263.request
VAR_101 = self.db
if not (isinstance(VAR_153, VAR_0) or VAR_153 in VAR_101.tables):
raise HTTP(404)
VAR_172 = {}
for VAR_199 in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):
if VAR_199 in VAR_11:
VAR_172[VAR_199] = VAR_11[VAR_199]
VAR_293 = TABLE()
VAR_294 = []
VAR_180 = []
VAR_295 = []
VAR_296 = VAR_11.get('showall', False)
if VAR_296:
VAR_294 = VAR_133
VAR_297 = VAR_11.get('chkall', False)
if VAR_297:
for VAR_10 in VAR_133:
VAR_56.vars['chk%s' % VAR_10] = 'on'
VAR_298 = VAR_11.get('queries', [])
VAR_299 = VAR_11.get('zero', '')
if not VAR_298:
ops = ['equals', 'not equal', 'greater than',
'less than', 'starts with',
'ends with', 'contains']
VAR_298.insert(0, VAR_299)
VAR_300 = VAR_11.get('query_labels', {})
VAR_173 = VAR_11.get('query', VAR_153.id > 0)
VAR_301 = VAR_11.get('field_labels', {})
for VAR_177 in VAR_133:
VAR_177 = VAR_153[VAR_177]
if not VAR_177.readable:
continue
VAR_385 = VAR_177.name
VAR_386 = VAR_56.vars.get('chk' + VAR_385, None)
VAR_387 = VAR_56.vars.get('txt' + VAR_385, None)
VAR_388 = VAR_56.vars.get('op' + VAR_385, None)
VAR_266 = TR(TD(INPUT(_type="checkbox", _name="chk" + VAR_385,
_disabled=(VAR_177.type == 'id'),
VAR_179=(VAR_177.type == 'id' or VAR_386 == 'on'))),
TD(VAR_301.get(VAR_385, VAR_177.label)),
TD(SELECT([OPTION(VAR_300.get(VAR_178, op),
_value=VAR_178) for VAR_178 in VAR_298],
_name="op" + VAR_385,
VAR_179=VAR_388)),
TD(INPUT(_type="text", _name="txt" + VAR_385,
_value=VAR_387, VAR_16='txt' + VAR_385,
_class=str(VAR_177.type))))
VAR_293.append(VAR_266)
if VAR_56.post_vars and (VAR_386 or VAR_177.type == 'id'):
if VAR_387 and VAR_388 != '':
if VAR_177.type[0:10] == 'reference ':
VAR_180.append(self.get_query(VAR_177, VAR_388, VAR_387, VAR_180=True))
elif VAR_292:
VAR_179, VAR_394 = VAR_177.validate(VAR_387)
if not VAR_394:
VAR_173 &= self.get_query(VAR_177, VAR_388, VAR_179)
else:
VAR_266[3].append(DIV(VAR_394, _class='error'))
else:
VAR_173 &= self.get_query(VAR_177, VAR_388, VAR_387)
VAR_294.append(VAR_177)
VAR_7 = FORM(VAR_293, INPUT(_type="submit"))
if VAR_294:
try:
VAR_295 = VAR_101(VAR_173).select(*VAR_294, **VAR_172)
for VAR_391 in VAR_180:
VAR_295 = results.find(VAR_391)
except: # TODO: hmmm, we should do better FUNC_30
VAR_295 = None
return VAR_7, VAR_295
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor()))
def FUNC_6(VAR_12, VAR_17=None, VAR_18=None,
VAR_19=Cookie.SimpleCookie(),
VAR_20='Mozilla/5.0'):
VAR_18 = VAR_18 or {}
if VAR_17 is not None:
VAR_17 = urlencode(VAR_17)
if VAR_20:
VAR_18['User-agent'] = VAR_20
VAR_18['Cookie'] = ' '.join(
['%VAR_278=%s;' % (VAR_14.key, VAR_14.value) for VAR_14 in VAR_19.values()])
try:
from google.appengine.api import .urlfetch
except ImportError:
VAR_389 = urllib2.Request(VAR_12, VAR_17, VAR_18)
VAR_303 = urlopen(VAR_389).read()
else:
VAR_302 = ((VAR_17 is None) and urlfetch.GET) or urlfetch.POST
while VAR_12 is not None:
VAR_244 = urlfetch.fetch(VAR_12=VAR_12, VAR_81=VAR_17,
VAR_302=method, VAR_18=headers,
allow_truncated=False, follow_redirects=False,
deadline=10)
VAR_17 = None
VAR_302 = urlfetch.GET
VAR_19.load(VAR_244.headers.get('set-cookie', ''))
VAR_12 = VAR_244.headers.get('location')
VAR_303 = VAR_244.content
return VAR_303
VAR_21 = \
re.compile(r"""<geometry>[\W]*?<VAR_316>[\W]*?<VAR_23>(?P<VAR_306>[^<]*)</VAR_23>[\W]*?<VAR_24>(?P<VAR_307>[^<]*)</VAR_24>[\W]*?</VAR_316>""")
def FUNC_7(VAR_22):
try:
VAR_9 = urllib_quote(VAR_22)
VAR_304 = FUNC_6('http://VAR_361.googleapis.com/VAR_361/api/FUNC_7/VAR_404?sensor=false&VAR_22=%s' % VAR_9)
VAR_305 = VAR_21.search(VAR_304)
(VAR_306, VAR_307) = (float(VAR_305.group('la')), float(VAR_305.group('lo')))
return (VAR_306, VAR_307)
except:
return (0.0, 0.0)
def FUNC_8(VAR_23, VAR_24, VAR_25=None):
if not VAR_25:
lang = VAR_263.T.accepted_language
try:
return FUNC_98.loads(FUNC_6('http://VAR_361.googleapis.com/VAR_361/api/FUNC_7/FUNC_98?latlng=%(VAR_23)VAR_278,%(VAR_24)VAR_278&language=%(VAR_25)s' % locals()))['results'][0]['formatted_address']
except:
return ''
def FUNC_9(VAR_10, *VAR_9, **VAR_13):
VAR_14 = VAR_10.__code__.co_argcount
VAR_181 = VAR_10.__code__.co_varnames[:VAR_14]
VAR_182 = VAR_10.__defaults__ or []
VAR_183 = VAR_181[0:-len(VAR_182)]
VAR_184 = VAR_181[-len(VAR_182):]
VAR_185 = {}
for VAR_308, pos_val in enumerate(VAR_9[:VAR_14]):
VAR_185[VAR_181[VAR_308]] = pos_val # VAR_181[VAR_308] is the VAR_148 of the argument
for VAR_390 in VAR_183[len(VAR_185):]:
if VAR_390 in VAR_13:
VAR_185[VAR_390] = VAR_13[VAR_390]
if len(VAR_185) >= len(VAR_183):
for VAR_390 in VAR_184:
if VAR_390 in VAR_13:
VAR_185[VAR_390] = VAR_13[VAR_390]
return VAR_10(**VAR_185)
raise HTTP(404, "Object does not exist")
class CLASS_5(object):
def __init__(self, VAR_100=None, VAR_186=False):
self.check_args = VAR_186
self.run_procedures = {}
self.csv_procedures = {}
self.xml_procedures = {}
self.rss_procedures = {}
self.json_procedures = {}
self.jsonrpc_procedures = {}
self.jsonrpc2_procedures = {}
self.xmlrpc_procedures = {}
self.amfrpc_procedures = {}
self.amfrpc3_procedures = {}
self.soap_procedures = {}
def FUNC_95(self, VAR_10):
self.run_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def VAR_336(self, VAR_10):
self.csv_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def VAR_404(self, VAR_10):
self.xml_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_97(self, VAR_10):
self.rss_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_98(self, VAR_10):
self.json_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_99(self, VAR_10):
self.jsonrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_100(self, VAR_10):
self.jsonrpc2_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_101(self, VAR_10):
self.xmlrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_102(self, VAR_10):
self.amfrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_103(self, VAR_187='default'):
if not isinstance(VAR_187, str):
raise SyntaxError("AMF3 VAR_279 VAR_9 VAR_187 for function")
def FUNC_162(VAR_10):
if VAR_187:
self.amfrpc3_procedures[VAR_187 + '.' + VAR_10.__name__] = VAR_10
else:
self.amfrpc3_procedures[VAR_10.__name__] = VAR_10
return VAR_10
return FUNC_162
def FUNC_104(self, VAR_148=None, VAR_188=None, VAR_11=None, VAR_189=None, VAR_190=None):
def FUNC_163(VAR_10):
self.soap_procedures[VAR_148 or VAR_10.__name__] = VAR_10, VAR_188, VAR_11, VAR_189, VAR_190
return VAR_10
return FUNC_163
def FUNC_105(self, VAR_11=None):
VAR_56 = VAR_263.request
if not VAR_11:
VAR_11 = VAR_56.args
if VAR_11 and VAR_11[0] in self.run_procedures:
return str(self.call_service_function(self.run_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_56.vars)))
self.error()
def FUNC_106(self, VAR_11=None):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_244.headers['Content-Type'] = 'text/VAR_30-csv'
if not VAR_11:
VAR_11 = VAR_56.args
def FUNC_164(VAR_179):
if isinstance(VAR_179, unicodeT):
return VAR_179.encode('utf8')
if hasattr(VAR_179, 'isoformat'):
return VAR_179.isoformat()[:19].replace('T', ' ')
if VAR_179 is None:
return '<NULL>'
return VAR_179
if VAR_11 and VAR_11[0] in self.csv_procedures:
import .types
VAR_391 = self.call_service_function(self.csv_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_56.vars))
VAR_278 = StringIO()
if hasattr(VAR_391, 'export_to_csv_file'):
VAR_391.export_to_csv_file(VAR_278)
elif VAR_391 and not isinstance(VAR_391, types.GeneratorType) and isinstance(VAR_391[0], (dict, Storage)):
import .csv
VAR_455 = VAR_336.writer(VAR_278)
VAR_455.writerow(list(VAR_391[0].keys()))
for line in VAR_391:
VAR_455.writerow([FUNC_164(v)
for v in line.values()])
else:
import .csv
VAR_455 = VAR_336.writer(VAR_278)
for line in VAR_391:
VAR_455.writerow(line)
return VAR_278.getvalue()
self.error()
def FUNC_107(self, VAR_11=None):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_244.headers['Content-Type'] = 'text/xml'
if not VAR_11:
VAR_11 = VAR_56.args
if VAR_11 and VAR_11[0] in self.xml_procedures:
VAR_278 = self.call_service_function(self.xml_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_56.vars))
if hasattr(VAR_278, 'as_list'):
VAR_278 = VAR_278.as_list()
return serializers.xml(VAR_278, quote=False)
self.error()
def FUNC_108(self, VAR_11=None):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
if not VAR_11:
VAR_11 = VAR_56.args
if VAR_11 and VAR_11[0] in self.rss_procedures:
VAR_392 = self.call_service_function(self.rss_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_56.vars))
else:
self.error()
VAR_244.headers['Content-Type'] = 'application/FUNC_97+xml'
return serializers.rss(VAR_392)
def FUNC_109(self, VAR_11=None):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_244.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
if not VAR_11:
VAR_11 = VAR_56.args
VAR_27 = dict(VAR_56.vars)
if VAR_11 and VAR_11[0] in self.json_procedures:
VAR_278 = self.call_service_function(self.json_procedures[VAR_11[0]], *VAR_11[1:], **VAR_27)
if hasattr(VAR_278, 'as_list'):
VAR_278 = VAR_278.as_list()
return VAR_244.json(VAR_278)
self.error()
class CLASS_11(Exception):
def __init__(self, VAR_309, VAR_310):
VAR_393 = CLASS_5.jsonrpc_errors.get(VAR_309)
if VAR_393:
self.message, self.description = VAR_393
self.code, self.info = VAR_309, VAR_310
VAR_191 = {
-32700: ("Parse VAR_394. Invalid JSON was received by the VAR_31.",
"An VAR_394 occurred on the VAR_31 while parsing the JSON VAR_213."),
-32600: ("Invalid Request", "The JSON sent is not VAR_9 valid Request object."),
-32601: ("Method not found", "The VAR_302 does not exist / is not available."),
-32602: ("Invalid params", "Invalid VAR_302 parameter(VAR_278)."),
-32603: ("Internal error", "Internal JSON-RPC VAR_394."),
-32099: ("Server error", "Reserved for implementation-defined VAR_31-errors.")}
def FUNC_110(self):
def FUNC_165(VAR_215, VAR_224):
return serializers.json({'version': '1.1', 'id': VAR_215, 'result': VAR_224, 'error': None})
def FUNC_166(VAR_215, VAR_309, VAR_37, VAR_17=None):
VAR_394 = {'name': 'JSONRPCError',
'code': VAR_309, 'message': VAR_37}
if VAR_17 is not None:
VAR_394['data'] = VAR_17
return serializers.json({'id': VAR_215,
'version': '1.1',
'error': VAR_394,
})
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_244.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
VAR_311 = self.jsonrpc_procedures
VAR_17 = FUNC_98.loads(VAR_56.body.read())
VAR_312 = VAR_17.get('jsonrpc')
if VAR_312: # hand over VAR_35 VAR_137 2 of the protocol
return self.serve_jsonrpc2(VAR_17)
VAR_215, VAR_302, VAR_227 = VAR_17.get('id'), VAR_17.get('method'), VAR_17.get('params', [])
if VAR_215 is None:
return FUNC_166(0, 100, 'missing id')
if VAR_302 not in VAR_311:
return FUNC_166(VAR_215, 100, 'method "%s" does not exist' % VAR_302)
try:
if isinstance(VAR_227, dict):
VAR_278 = VAR_311[VAR_302](**VAR_227)
else:
VAR_278 = VAR_311[VAR_302](*VAR_227)
if hasattr(VAR_278, 'as_list'):
VAR_278 = VAR_278.as_list()
return FUNC_165(VAR_215, VAR_278)
except CLASS_5.JsonRpcException as e:
return FUNC_166(VAR_215, e.code, e.info)
except:
VAR_435, VAR_436, VAR_437 = sys.exc_info()
VAR_37 = '%VAR_278: %s' % (VAR_435.__name__, VAR_436)
VAR_17 = VAR_56.is_local and traceback.format_tb(VAR_437)
VAR_2.warning('jsonrpc exception %VAR_278\VAR_181%s' % (VAR_37, traceback.format_tb(VAR_437)))
return FUNC_166(VAR_215, 100, VAR_37, VAR_17)
def FUNC_111(self, VAR_17=None, VAR_192=False):
def FUNC_165(VAR_215, VAR_224):
if not VAR_399:
return None
return serializers.json({'jsonrpc': '2.0', 'id': VAR_215, 'result': VAR_224})
def FUNC_166(VAR_215, VAR_309, VAR_37=None, VAR_17=None):
VAR_394 = {'code': VAR_309}
if VAR_309 in CLASS_5.jsonrpc_errors:
VAR_394['message'] = CLASS_5.jsonrpc_errors[VAR_309][0]
VAR_394['data'] = CLASS_5.jsonrpc_errors[VAR_309][1]
if VAR_37 is not None:
VAR_394['message'] = VAR_37
if VAR_17 is not None:
VAR_394['data'] = VAR_17
return serializers.json({'jsonrpc': '2.0', 'id': VAR_215, 'error': VAR_394})
def VAR_292(VAR_17):
VAR_395 = set(VAR_17.keys())
VAR_396 = set(['jsonrpc', 'method'])
VAR_397 = VAR_396 - VAR_395
if VAR_397:
raise CLASS_5.JsonRpcException(-32600, 'Missing arguments %VAR_278.' % list(VAR_397))
if VAR_17['jsonrpc'] != '2.0':
raise CLASS_5.JsonRpcException(-32603, 'Unsupported FUNC_99 VAR_137 "%s"' % VAR_17['jsonrpc'])
if 'id' not in VAR_395:
return False
return True
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
if not VAR_17:
VAR_244.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
try:
VAR_17 = FUNC_98.loads(VAR_56.body.read())
except ValueError: # decoding VAR_394 in FUNC_98 lib
return FUNC_166(None, -32700)
if isinstance(VAR_17, list) and not VAR_192:
VAR_398 = []
for VAR_14 in VAR_17:
VAR_438 = self.serve_jsonrpc2(VAR_14, VAR_192=True)
if VAR_438: # do not add empty responses
VAR_398.append(VAR_438)
if len(VAR_398) == 0: # return nothing
return ''
else:
return "[" + ','.join(VAR_398) + "]"
VAR_311 = self.jsonrpc2_procedures
VAR_311.update(self.jsonrpc_procedures)
try:
VAR_399 = VAR_292(VAR_17)
except CLASS_5.JsonRpcException as e:
return FUNC_166(None, e.code, e.info)
VAR_215, VAR_302, VAR_227 = VAR_17.get('id'), VAR_17['method'], VAR_17.get('params', '')
if VAR_302 not in VAR_311:
return FUNC_166(VAR_215, -32601, VAR_17='Method "%s" does not exist' % VAR_302)
try:
if isinstance(VAR_227, dict):
VAR_278 = VAR_311[VAR_302](**VAR_227)
else:
VAR_278 = VAR_311[VAR_302](*VAR_227)
if hasattr(VAR_278, 'as_list'):
VAR_278 = VAR_278.as_list()
if VAR_399:
return FUNC_165(VAR_215, VAR_278)
else:
return ''
except HTTP as e:
raise e
except CLASS_5.JsonRpcException as e:
return FUNC_166(VAR_215, e.code, e.info)
except:
VAR_435, VAR_436, VAR_437 = sys.exc_info()
VAR_17 = '%VAR_278: %VAR_278\n' % (VAR_435.__name__, VAR_436) + str(VAR_56.is_local and traceback.format_tb(VAR_437))
VAR_2.warning('%VAR_278: %VAR_278\VAR_181%s' % (VAR_435.__name__, VAR_436, traceback.format_tb(VAR_437)))
return FUNC_166(VAR_215, -32099, VAR_17=data)
def FUNC_112(self):
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_313 = list(self.xmlrpc_procedures.values())
return VAR_244.xmlrpc(VAR_56, VAR_313)
def FUNC_113(self, VAR_137=0):
try:
import .pyamf
import .pyamf.remoting.gateway
except:
return "pyamf not installed or not in Python sys.path"
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
if VAR_137 == 3:
VAR_313 = self.amfrpc3_procedures
VAR_400 = pyamf.remoting.gateway.BaseGateway(VAR_313)
VAR_401 = pyamf.remoting.decode(VAR_56.body)
else:
VAR_313 = self.amfrpc_procedures
VAR_400 = pyamf.remoting.gateway.BaseGateway(VAR_313)
VAR_402 = pyamf.get_context(pyamf.AMF0)
VAR_401 = pyamf.remoting.decode(VAR_56.body, VAR_402)
VAR_314 = pyamf.remoting.Envelope(VAR_401.amfVersion)
for VAR_148, VAR_37 in VAR_401:
VAR_314[VAR_148] = VAR_400.getProcessor(VAR_37)(message)
VAR_244.headers['Content-Type'] = pyamf.remoting.CONTENT_TYPE
if VAR_137 == 3:
return pyamf.remoting.encode(VAR_314).getvalue()
else:
return pyamf.remoting.encode(VAR_314, VAR_402).getvalue()
def FUNC_114(self, VAR_137="1.1"):
try:
from gluon.contrib.pysimplesoap.server import SoapDispatcher
except:
return "pysimplesoap not installed in contrib"
VAR_56 = VAR_263.request
VAR_244 = VAR_263.response
VAR_315 = self.soap_procedures
VAR_316 = "%VAR_278://%VAR_278%s" % (VAR_56.env.wsgi_url_scheme,
VAR_56.env.http_host,
URL(VAR_391=VAR_56, VAR_10="call/soap", VAR_96={}))
VAR_317 = 'namespace' in VAR_244 and VAR_244.namespace or VAR_316
VAR_318 = VAR_244.description or ''
VAR_319 = SoapDispatcher(
VAR_148=VAR_244.title,
VAR_316=location,
VAR_114=VAR_316, # SOAPAction
VAR_317=namespace,
VAR_113='pys',
VAR_318=documentation,
ns=True)
for VAR_302, (VAR_105, VAR_188, VAR_11, VAR_189, resp_elem_name) in iteritems(VAR_315):
VAR_319.register_function(VAR_302, VAR_105, VAR_188, VAR_11, VAR_189, resp_elem_name)
if VAR_56.env.request_method == 'POST':
VAR_403 = {}
VAR_244.headers['Content-Type'] = 'text/xml'
VAR_404 = VAR_319.dispatch(VAR_56.body.read(), VAR_403=fault)
if VAR_403:
VAR_244.status = 500
return VAR_404
elif 'WSDL' in VAR_56.vars:
VAR_244.headers['Content-Type'] = 'text/xml'
return VAR_319.wsdl()
elif 'op' in VAR_56.vars:
VAR_244.headers['Content-Type'] = 'text/html'
VAR_302 = VAR_56.vars['op']
VAR_456, VAR_457, VAR_189 = VAR_319.help(VAR_302)
VAR_82 = [H1("Welcome VAR_35 Web2Py SOAP webservice gateway"),
A("See all webservice operations",
_href=URL(VAR_391=VAR_56, VAR_10="call/soap", VAR_96={})),
H2(VAR_302),
P(VAR_189),
UL(LI("Location: %s" % VAR_319.location),
LI("Namespace: %s" % VAR_319.namespace),
LI("SoapAction: %s" % VAR_319.action),
),
H3("Sample SOAP XML Request Message:"),
CODE(VAR_456, language="xml"),
H3("Sample SOAP XML Response Message:"),
CODE(VAR_457, language="xml"),
]
return {'body': VAR_82}
else:
VAR_244.headers['Content-Type'] = 'text/html'
VAR_82 = [H1("Welcome VAR_35 Web2Py SOAP webservice gateway"),
P(VAR_244.description),
P("The following operations are available"),
A("See WSDL for webservice description",
_href=URL(VAR_391=VAR_56, VAR_10="call/soap", VAR_96={"WSDL": None})),
UL([LI(A("%VAR_278: %s" % (VAR_302, VAR_189 or ''),
_href=URL(VAR_391=VAR_56, VAR_10="call/soap", VAR_96={'op': VAR_302})))
for VAR_302, VAR_189 in VAR_319.list_methods()]),
]
return {'body': VAR_82}
def __call__(self):
VAR_56 = VAR_263.request
if len(VAR_56.args) < 1:
raise HTTP(404, "Not Found")
VAR_320 = VAR_56.args(0)
if VAR_320 == 'run':
return self.serve_run(VAR_56.args[1:])
elif VAR_320 == 'rss':
return self.serve_rss(VAR_56.args[1:])
elif VAR_320 == 'csv':
return self.serve_csv(VAR_56.args[1:])
elif VAR_320 == 'xml':
return self.serve_xml(VAR_56.args[1:])
elif VAR_320 == 'json':
return self.serve_json(VAR_56.args[1:])
elif VAR_320 == 'jsonrpc':
return self.serve_jsonrpc()
elif VAR_320 == 'jsonrpc2':
return self.serve_jsonrpc2()
elif VAR_320 == 'xmlrpc':
return self.serve_xmlrpc()
elif VAR_320 == 'amfrpc':
return self.serve_amfrpc()
elif VAR_320 == 'amfrpc3':
return self.serve_amfrpc(3)
elif VAR_320 == 'soap':
return self.serve_soap()
else:
self.error()
def VAR_394(self):
raise HTTP(404, "Object does not exist")
def FUNC_116(self, VAR_10, *VAR_9, **VAR_13):
if self.check_args:
return FUNC_9(VAR_10, *VAR_9, **VAR_13)
else:
return VAR_10(*VAR_9, **VAR_13)
def FUNC_10(VAR_26):
def FUNC_117(VAR_10):
def FUNC_168(*VAR_9, **VAR_13):
VAR_27 = None
try:
VAR_27 = VAR_10(*VAR_9, **VAR_13)
return VAR_27
finally:
thread.start_new_thread(VAR_26, (VAR_27,))
return FUNC_168
return FUNC_117
def FUNC_11(VAR_27, VAR_28=lambda VAR_30: VAR_30, VAR_29=False):
VAR_193 = datetime.datetime.utcnow() if VAR_29 else datetime.datetime.now()
if isinstance(VAR_27, datetime.datetime):
VAR_321 = VAR_193 - VAR_27
elif isinstance(VAR_27, datetime.date):
VAR_321 = VAR_193.date() - VAR_27
elif not VAR_27:
return ''
else:
return '[invalid date]'
if VAR_321.days < 0:
VAR_322 = ' from now'
VAR_321 = -dt
else:
VAR_322 = ' ago'
if VAR_321.days >= 2 * 365:
return VAR_28('%VAR_27 years' + VAR_322) % int(VAR_321.days // 365)
elif VAR_321.days >= 365:
return VAR_28('1 year' + VAR_322)
elif VAR_321.days >= 60:
return VAR_28('%VAR_27 months' + VAR_322) % int(VAR_321.days // 30)
elif VAR_321.days >= 27: # 4 weeks ugly
return VAR_28('1 month' + VAR_322)
elif VAR_321.days >= 14:
return VAR_28('%VAR_27 weeks' + VAR_322) % int(VAR_321.days // 7)
elif VAR_321.days >= 7:
return VAR_28('1 week' + VAR_322)
elif VAR_321.days > 1:
return VAR_28('%VAR_27 days' + VAR_322) % VAR_321.days
elif VAR_321.days == 1:
return VAR_28('1 day' + VAR_322)
elif VAR_321.seconds >= 2 * 60 * 60:
return VAR_28('%VAR_27 hours' + VAR_322) % int(VAR_321.seconds // 3600)
elif VAR_321.seconds >= 60 * 60:
return VAR_28('1 hour' + VAR_322)
elif VAR_321.seconds >= 2 * 60:
return VAR_28('%VAR_27 minutes' + VAR_322) % int(VAR_321.seconds // 60)
elif VAR_321.seconds >= 60:
return VAR_28('1 minute' + VAR_322)
elif VAR_321.seconds > 1:
return VAR_28('%VAR_27 seconds' + VAR_322) % VAR_321.seconds
elif VAR_321.seconds == 1:
return VAR_28('1 second' + VAR_322)
else:
return VAR_28('now')
def FUNC_12():
def VAR_10():
VAR_14 = CLASS_6()
VAR_194.acquire()
VAR_195.acquire()
VAR_14.x = 7
VAR_194.release()
VAR_195.release()
VAR_194 = thread.allocate_lock()
VAR_195 = thread.allocate_lock()
VAR_194.acquire()
thread.start_new_thread(VAR_10, ())
VAR_9 = CLASS_6()
VAR_9.x = 5
VAR_194.release()
VAR_195.acquire()
return VAR_9.x
class CLASS_6(object):
VAR_196 = {}
def __new__(VAR_197, *VAR_9, **VAR_13):
VAR_215 = thread.get_ident()
VAR_323 = thread.allocate_lock()
try:
VAR_323.acquire()
try:
return VAR_197.instances[VAR_215]
except KeyError:
VAR_458 = object.__new__(VAR_197, *VAR_9, **VAR_13)
VAR_197.instances[VAR_215] = VAR_458
return VAR_458
finally:
VAR_323.release()
def __init__(self, VAR_198=None, **VAR_182):
if not VAR_198:
self.__dict__.clear()
VAR_222 = self.__getattr__(VAR_198)
VAR_222.installed = True
VAR_222.update(
(VAR_346, v) for VAR_346, v in VAR_182.items() if VAR_346 not in VAR_222)
def __getattr__(self, VAR_199):
if VAR_199 not in self.__dict__:
self.__dict__[VAR_199] = Storage()
return self.__dict__[VAR_199]
def VAR_128(self):
return list(self.__dict__.keys())
def __contains__(self, VAR_199):
return VAR_199 in self.__dict__
class CLASS_7(object):
def __init__(self, VAR_200=None, VAR_201=None, VAR_202=None,
VAR_203=True, VAR_204=False):
self.follow_symlink_out = VAR_204
self.base = self.normalize_path(
VAR_200 or VAR_418.path.join(VAR_263.request.folder, 'static'))
self.basename = VAR_201 or VAR_263.request.function
self.base = VAR_200 = VAR_418.path.realpath(VAR_200 or VAR_418.path.join(VAR_263.request.folder, 'static'))
VAR_201 = VAR_201 or VAR_263.request.function
self.basename = VAR_201
if VAR_263.request.raw_args:
self.args = [arg for arg in VAR_263.request.raw_args.split('/') if arg]
else:
self.args = [arg for arg in VAR_263.request.args if arg]
VAR_94 = VAR_418.path.join(self.base, *self.args)
if not VAR_418.path.exists(VAR_94):
raise HTTP(404, "FILE NOT FOUND")
if not self.in_base(VAR_94):
raise HTTP(401, "NOT AUTHORIZED")
if VAR_203 and not VAR_418.path.isdir(VAR_94):
VAR_263.response.headers['Content-Type'] = contenttype(VAR_94)
raise HTTP(200, open(VAR_94, 'rb'), **VAR_263.response.headers)
self.path = VAR_324 = VAR_418.path.join(VAR_94, '*')
VAR_325 = len(VAR_324) - 1
VAR_326 = [VAR_10 for VAR_10 in sorted(glob.glob(VAR_324))
if not any([self.isprivate(VAR_10), self.issymlink_out(VAR_10)])]
self.folders = [VAR_10[VAR_325:]
for VAR_10 in VAR_326 if VAR_418.path.isdir(VAR_10)]
self.filenames = [VAR_10[VAR_325:]
for VAR_10 in VAR_326 if not VAR_418.path.isdir(VAR_10)]
if 'README' in self.filenames:
with open(VAR_418.path.join(VAR_94, 'README')) as VAR_10:
VAR_439 = VAR_10.read()
self.paragraph = MARKMIN(VAR_439)
else:
self.paragraph = None
if VAR_202:
self.filenames = [VAR_10 for VAR_10 in self.filenames
if VAR_418.path.splitext(VAR_10)[-1] in VAR_202]
def FUNC_120(self, VAR_201):
VAR_324 = []
VAR_327 = SPAN()
VAR_327.append(A(VAR_201, _href=URL()))
for arg in self.args:
VAR_327.append('/')
VAR_324.append(arg)
VAR_327.append(A(arg, _href=URL(VAR_11='/'.join(VAR_324))))
return VAR_327
def FUNC_121(self):
if self.folders:
return SPAN(H3('Folders'),
TABLE(*[TR(TD(A(folder, _href=URL(VAR_11=self.args + [folder]))))
for folder in self.folders], **dict(_class="table")))
return ''
@staticmethod
def FUNC_122(VAR_205, VAR_206, VAR_207=VAR_418.path.sep):
VAR_278 = lambda VAR_10: '%VAR_278%s' % (VAR_10.rstrip(VAR_207), sep) # VAR_10 -> FUNC_118/
return VAR_278(VAR_205).startswith(VAR_278(VAR_206))
def FUNC_123(self, VAR_10):
return self.__in_base(self.normalize_path(VAR_10), self.base)
def FUNC_124(self, VAR_10):
if self.follow_symlink_out:
return VAR_418.path.normpath(VAR_10)
else:
return VAR_418.path.realpath(VAR_10)
def FUNC_125(self, VAR_10):
return VAR_418.path.islink(VAR_10) and not self.in_base(VAR_10)
@staticmethod
def FUNC_126(VAR_10):
if VAR_10.startswith('/private/'):
VAR_10 = FUNC_118[8:]
return 'private' in VAR_10 or VAR_10.startswith('.') or VAR_10.endswith('~')
@staticmethod
def FUNC_127(VAR_10):
return VAR_418.path.splitext(VAR_10)[-1].lower() in (
'.png', '.jpg', '.jpeg', '.gif', '.tiff')
def FUNC_128(self, VAR_208=160):
if self.filenames:
return SPAN(H3('Files'),
TABLE(*[TR(TD(A(VAR_10, _href=URL(VAR_11=self.args + [VAR_10]))),
TD(IMG(_src=URL(VAR_11=self.args + [VAR_10]),
_style='max-VAR_208:%spx' % VAR_208)
if VAR_208 and self.isimage(VAR_10) else ''))
for VAR_10 in self.filenames], **dict(_class="table")))
return ''
def VAR_404(self):
return DIV(
H2(self.breadcrumbs(self.basename)),
self.paragraph or '',
self.table_folders(),
self.table_files()).xml()
class CLASS_8(object):
VAR_209 = 'everybody'
VAR_210 = 25
def FUNC_129(self, VAR_82):
return MARKMIN(VAR_82, VAR_163=self.settings.extra,
VAR_12=True, VAR_100=self.env,
autolinks=lambda VAR_276: expand_one(VAR_276, {})).xml()
def FUNC_130(self, VAR_211):
return DIV(
_class='w2p_wiki_tags',
*[A(VAR_445.strip(), _href=URL(VAR_11='_search', VAR_96=dict(q=VAR_445)))
for VAR_445 in VAR_211 or [] if VAR_445.strip()])
def FUNC_131(self, VAR_212):
return self.markmin_base(VAR_212.body) + self.render_tags(VAR_212.tags).xml()
def FUNC_132(self, VAR_212):
VAR_303 = VAR_212.body
VAR_303 = replace_at_urls(VAR_303, URL)
VAR_303 = replace_autolinks(VAR_303, lambda VAR_276: expand_one(VAR_276, {}))
VAR_303 = replace_components(VAR_303, self.env)
VAR_303 = VAR_303 + self.render_tags(VAR_212.tags).xml()
return VAR_303
@staticmethod
def FUNC_133(VAR_213):
VAR_250 = VAR_213.split('/')
VAR_104, VAR_105, VAR_11 = VAR_250[0], VAR_250[1], VAR_250[2:]
return LOAD(VAR_104, VAR_105, VAR_11=args, ajax=True).xml()
def FUNC_134(self):
if isinstance(self.settings.render, basestring):
VAR_391 = getattr(self, "%s_render" % self.settings.render)
elif callable(self.settings.render):
VAR_391 = self.settings.render
elif isinstance(self.settings.render, dict):
def FUNC_174(VAR_212):
if VAR_212.render:
if VAR_212.render in self.settings.render.keys():
VAR_472 = self.settings.render[VAR_212.render]
else:
VAR_472 = getattr(self, "%s_render" % VAR_212.render)
else:
VAR_472 = self.markmin_render
return VAR_472(VAR_212)
VAR_391 = FUNC_174
else:
raise ValueError(
"Invalid VAR_158 type %s" % type(self.settings.render))
return VAR_391
def __init__(self, VAR_63, VAR_157=None, VAR_158='markmin',
VAR_159=False, VAR_160='',
VAR_161=False, VAR_163=None,
VAR_164=None, VAR_165=None, VAR_126=True,
VAR_104=None, VAR_105=None, VAR_167=None):
VAR_222 = self.settings = VAR_63.settings.wiki
"""
Args:
VAR_158:
- "markmin"
- "html"
- `<VAR_105>` : Sets VAR_9 custom VAR_158 VAR_105
- `dict(VAR_303=<VAR_105>, markmin=...)`: dict(...) allows
multiple custom VAR_158 functions
- "multiple" : Is the same as `{}`. It enables per-VAR_168
formats using builtins
"""
VAR_328 = set(['markmin', 'html'])
VAR_329 = False
if VAR_158 == "multiple":
VAR_158 = {}
if isinstance(VAR_158, dict):
[VAR_328.add(VAR_199) for VAR_199 in VAR_158]
VAR_329 = True
VAR_222.render = VAR_158
VAR_330 = VAR_222.manage_permissions = VAR_159
VAR_222.force_prefix = VAR_160
VAR_222.restrict_search = VAR_161
VAR_222.extra = VAR_163 or {}
VAR_222.menu_groups = VAR_164
VAR_222.templates = VAR_165
VAR_222.controller = VAR_104
VAR_222.function = VAR_105
VAR_222.groups = list(VAR_63.user_groups.values()) \
if VAR_167 is None else VAR_167
VAR_101 = VAR_63.db
self.env = VAR_157 or {}
self.env['component'] = CLASS_8.component
self.auth = VAR_63
self.wiki_menu_items = None
if self.auth.user:
self.settings.force_prefix = VAR_160 % self.auth.user
else:
self.settings.force_prefix = VAR_160
self.host = VAR_263.request.env.http_host
VAR_331 = [
('wiki_page', {
'args': [
VAR_1('slug',
VAR_279=[IS_SLUG(),
IS_NOT_IN_DB(VAR_101, 'wiki_page.slug')],
writable=False),
VAR_1('title', length=255, unique=True),
VAR_1('body', 'text', notnull=True),
VAR_1('tags', 'list:string'),
VAR_1('can_read', 'list:string',
writable=VAR_330,
readable=VAR_330,
VAR_5=[CLASS_8.everybody]),
VAR_1('can_edit', 'list:string',
writable=VAR_330, readable=VAR_330,
VAR_5=[CLASS_8.everybody]),
VAR_1('changelog'),
VAR_1('html', 'text',
compute=self.get_renderer(),
readable=False, writable=False),
VAR_1('render', VAR_5="markmin",
readable=VAR_329,
writable=VAR_329,
VAR_279=IS_EMPTY_OR(
IS_IN_SET(VAR_328))),
VAR_63.signature],
'vars': {'format': '%(VAR_441)s', 'migrate': VAR_126}}),
('wiki_tag', {
'args': [
VAR_1('name'),
VAR_1('wiki_page', 'reference wiki_page'),
VAR_63.signature],
'vars':{'format': '%(VAR_441)s', 'migrate': VAR_126}}),
('wiki_media', {
'args': [
VAR_1('wiki_page', 'reference wiki_page'),
VAR_1('title', VAR_91=True),
VAR_1('filename', 'upload', VAR_91=True),
VAR_63.signature],
'vars': {'format': '%(VAR_441)s', 'migrate': VAR_126}}),
]
for VAR_199, VAR_179 in VAR_331:
VAR_11 = []
if VAR_199 not in VAR_101.tables():
VAR_360 = VAR_63.settings.extra_fields
if VAR_360:
if VAR_199 in VAR_360:
if VAR_360[VAR_199]:
for VAR_177 in VAR_360[VAR_199]:
VAR_11.append(VAR_177)
VAR_11 += VAR_179['args']
VAR_101.define_table(VAR_199, *VAR_11, **VAR_179['vars'])
if self.settings.templates is None and not self.settings.manage_permissions:
self.settings.templates = \
VAR_101.wiki_page.tags.contains('template') & VAR_101.wiki_page.can_read.contains('everybody')
def FUNC_169(VAR_212, VAR_215, VAR_101=db):
for VAR_440 in VAR_212.tags or []:
VAR_440 = tag.strip().lower()
if VAR_440:
VAR_101.wiki_tag.insert(VAR_148=VAR_440, wiki_page=VAR_215)
def FUNC_170(VAR_332, VAR_212, VAR_101=db):
VAR_212 = VAR_332.select(VAR_175=(0, 1)).first()
VAR_101(VAR_101.wiki_tag.wiki_page == VAR_212.id).delete()
for VAR_440 in VAR_212.tags or []:
VAR_440 = tag.strip().lower()
if VAR_440:
VAR_101.wiki_tag.insert(VAR_148=VAR_440, wiki_page=VAR_212.id)
VAR_101.wiki_page._after_insert.append(FUNC_169)
VAR_101.wiki_page._after_update.append(FUNC_170)
if (VAR_63.user and
check_credentials(VAR_263.request, gae_login=False) and
'wiki_editor' not in VAR_63.user_groups.values() and
self.settings.groups == list(VAR_63.user_groups.values())):
VAR_405 = VAR_101.auth_group(VAR_146='wiki_editor')
VAR_406 = VAR_405.id if VAR_405 else VAR_101.auth_group.insert(
VAR_146='wiki_editor')
VAR_63.add_membership(VAR_406)
VAR_222.lock_keys = True
def FUNC_72(self, VAR_212=None):
raise HTTP(401)
def FUNC_135(self, VAR_212):
if 'everybody' in VAR_212.can_read or not self.settings.manage_permissions:
return True
elif self.auth.user:
VAR_167 = self.settings.groups
if ('wiki_editor' in VAR_167 or
set(VAR_167).intersection(set(VAR_212.can_read + VAR_212.can_edit)) or
VAR_212.created_by == self.auth.user.id):
return True
return False
def FUNC_136(self, VAR_212=None):
if not self.auth.user:
redirect(self.auth.settings.login_url)
VAR_167 = self.settings.groups
return ('wiki_editor' in VAR_167 or
(VAR_212 is None and 'wiki_author' in VAR_167) or
VAR_212 is not None and (set(VAR_167).intersection(set(VAR_212.can_edit)) or
VAR_212.created_by == self.auth.user.id))
def FUNC_137(self):
if not self.auth.user:
return False
VAR_167 = self.settings.groups
return 'wiki_editor' in VAR_167
def FUNC_138(self):
return True
def FUNC_139(self):
if self.auth.user:
if self.settings.menu_groups is None:
return True
else:
VAR_167 = self.settings.groups
if any(VAR_445 in self.settings.menu_groups for VAR_445 in VAR_167):
return True
return False
def FUNC_140(self):
if (not self.wiki_menu_items and self.settings.controller and self.settings.function):
self.wiki_menu_items = self.menu(self.settings.controller,
self.settings.function)
VAR_263.response.menu += self.wiki_menu_items
def __call__(self):
VAR_56 = VAR_263.request
VAR_222 = self.settings
VAR_222.controller = VAR_222.controller or VAR_56.controller
VAR_222.function = VAR_222.function or VAR_56.function
self.automenu()
VAR_299 = VAR_56.args(0) or 'index'
if VAR_299 and VAR_299.isdigit():
return self.media(int(VAR_299))
elif not VAR_299 or not VAR_299.startswith('_'):
return self.read(VAR_299)
elif VAR_299 == '_edit':
return self.edit(VAR_56.args(1) or 'index', VAR_56.args(2) or 0)
elif VAR_299 == '_editmedia':
return self.editmedia(VAR_56.args(1) or 'index')
elif VAR_299 == '_create':
return self.create()
elif VAR_299 == '_pages':
return self.pages()
elif VAR_299 == '_search':
return self.search()
elif VAR_299 == '_recent':
VAR_473 = int(VAR_56.vars.page or 0)
VAR_173 = self.auth.db.wiki_page.created_by == VAR_56.args(
1, cast=int)
return self.search(VAR_173=query,
VAR_174=~self.auth.db.wiki_page.created_on,
VAR_175=(VAR_473 * self.rows_page,
(VAR_473 + 1) * self.rows_page),
)
elif VAR_299 == '_cloud':
return self.cloud()
elif VAR_299 == '_preview':
return self.preview(self.get_renderer())
def FUNC_141(self, VAR_212):
if not self.can_read(VAR_212):
VAR_407 = (VAR_212.body or '').replace('\r', '')
VAR_408 = [VAR_446 for VAR_446 in VAR_407.split('\VAR_181\n') if not VAR_446.startswith('#') and VAR_446.strip()]
if VAR_408:
return VAR_408[0]
return ''
def FUNC_142(self, VAR_82):
return (VAR_82 or '').replace('://HOSTNAME', '://%s' % self.host)
def FUNC_88(self, VAR_156, VAR_166=False):
if VAR_156 in '_cloud':
return self.cloud()
elif VAR_156 in '_search':
return self.search()
VAR_212 = self.auth.db.wiki_page(VAR_156=slug)
if VAR_212 and (not self.can_read(VAR_212)):
return self.not_authorized(VAR_212)
if VAR_263.request.extension == 'html':
if not VAR_212:
VAR_12 = URL(VAR_11=('_create', VAR_156))
return dict(VAR_229=A('Create VAR_212 "%s"' % VAR_156, _href=VAR_12, _class="btn"))
else:
VAR_303 = VAR_212.html if not VAR_166 else self.get_renderer()(VAR_212)
VAR_229 = XML(self.fix_hostname(VAR_303))
return dict(VAR_441=VAR_212.title,
VAR_156=VAR_212.slug,
VAR_212=page,
VAR_229=content,
VAR_211=VAR_212.tags,
created_on=VAR_212.created_on,
modified_on=VAR_212.modified_on)
elif VAR_263.request.extension == 'load':
return self.fix_hostname(VAR_212.html) if VAR_212 else ''
else:
if not VAR_212:
raise HTTP(404)
else:
return dict(VAR_441=VAR_212.title,
VAR_156=VAR_212.slug,
VAR_212=page,
VAR_229=VAR_212.body,
VAR_211=VAR_212.tags,
created_on=VAR_212.created_on,
modified_on=VAR_212.modified_on)
def FUNC_143(self, VAR_156, VAR_214=0):
VAR_63 = self.auth
VAR_101 = VAR_63.db
VAR_212 = VAR_101.wiki_page(VAR_156=slug)
if not self.can_edit(VAR_212):
return self.not_authorized(VAR_212)
VAR_333 = ' '.join(VAR_14.capitalize() for VAR_14 in VAR_156.split('-'))
if not VAR_212:
if not (self.can_manage() or
VAR_156.startswith(self.settings.force_prefix)):
VAR_263.session.flash = 'slug must have "%s" prefix' \
% self.settings.force_prefix
redirect(URL(VAR_11=('_create')))
VAR_101.wiki_page.can_read.default = [CLASS_8.everybody]
VAR_101.wiki_page.can_edit.default = [VAR_63.user_group_role()]
VAR_101.wiki_page.title.default = VAR_333
VAR_101.wiki_page.slug.default = VAR_156
if VAR_156 == 'wiki-menu':
VAR_101.wiki_page.body.default = \
'- Menu Item > @////index\VAR_181- - Submenu > http://web2py.com'
else:
VAR_101.wiki_page.body.default = VAR_101(VAR_101.wiki_page.id == VAR_214).select(VAR_101.wiki_page.body)[0].body \
if int(VAR_214) > 0 else '## %VAR_278\VAR_181\npage content' % VAR_333
VAR_96 = VAR_263.request.post_vars
if VAR_96.body:
VAR_96.body = VAR_96.body.replace('://%s' % self.host, '://HOSTNAME')
VAR_7 = SQLFORM(VAR_101.wiki_page, VAR_212, VAR_170=True,
VAR_273='table2cols', showid=False).process()
if VAR_7.deleted:
VAR_263.session.flash = 'page deleted'
redirect(URL())
elif VAR_7.accepted:
VAR_263.session.flash = 'page created'
redirect(URL(VAR_11=VAR_156))
VAR_334 = """
jQuery(VAR_105() {
if (!jQuery('#wiki_page_body').length) return;
var pagecontent = jQuery('#wiki_page_body');
pagecontent.css('font-family',
'Monaco,Menlo,Consolas,"Courier New",monospace');
var prevbutton = jQuery('<button class="btn nopreview">Preview</button>');
var VAR_217 = jQuery('<div VAR_215="preview"></div>').hide();
var previewmedia = jQuery('<div VAR_215="previewmedia"></div>');
var VAR_7 = pagecontent.closest('form');
VAR_217.insertBefore(VAR_7);
prevbutton.insertBefore(VAR_7);
if(%(link_media)VAR_278) {
var mediabutton = jQuery('<button class="btn nopreview">Media</button>');
mediabutton.insertBefore(VAR_7);
previewmedia.insertBefore(VAR_7);
mediabutton.click(VAR_105() {
if (mediabutton.hasClass('nopreview')) {
web2py_component('%(urlmedia)s', 'previewmedia');
} else {
previewmedia.empty();
}
mediabutton.toggleClass('nopreview');
});
}
prevbutton.click(VAR_105(e) {
e.preventDefault();
if (prevbutton.hasClass('nopreview')) {
prevbutton.addClass('preview').removeClass(
'nopreview').html('Edit Source');
try{var wiki_render = jQuery('#wiki_page_render').val()}
catch(e){var wiki_render = null;}
web2py_ajax_page('post', \
'%(VAR_12)s', {VAR_82: jQuery('#wiki_page_body').val(), \
VAR_158: wiki_render}, 'preview');
VAR_7.fadeOut('fast', VAR_105() {VAR_217.fadeIn()});
} else {
prevbutton.addClass(
'nopreview').removeClass('preview').html('Preview');
VAR_217.fadeOut('fast', VAR_105() {VAR_7.fadeIn()});
}
})
})
""" % dict(VAR_12=URL(VAR_11=('_preview', VAR_156)), link_media=('true' if VAR_212 else 'false'),
urlmedia=URL(extension='load',
VAR_11=('_editmedia', VAR_156),
VAR_96=dict(embedded=1)))
return dict(VAR_229=TAG[''](VAR_7, SCRIPT(VAR_334)))
def FUNC_144(self, VAR_156):
VAR_63 = self.auth
VAR_101 = VAR_63.db
VAR_212 = VAR_101.wiki_page(VAR_156=slug)
if not (VAR_212 and self.can_edit(VAR_212)):
return self.not_authorized(VAR_212)
self.auth.db.wiki_media.id.represent = lambda VAR_215, VAR_266: \
VAR_215 if not VAR_266.filename else \
SPAN('@////%i/%VAR_278.%s' % (VAR_215, IS_SLUG.urlify(VAR_266.title.split('.')[0]), VAR_266.filename.split('.')[-1]))
self.auth.db.wiki_media.wiki_page.default = VAR_212.id
self.auth.db.wiki_media.wiki_page.writable = False
VAR_335 = []
VAR_336 = True
VAR_337 = True
if VAR_263.request.vars.embedded:
VAR_334 = "var VAR_14 = jQuery('#wiki_page_body'); VAR_14.val(VAR_14.val() + jQuery('%s').text()); return false;"
VAR_409 = self.auth.db.wiki_media.id.represent
VAR_336 = False
VAR_337 = False
VAR_335 = [lambda VAR_266: A('copy into source', _href='#', _onclick=VAR_334 % (VAR_409(VAR_266.id, VAR_266)))]
VAR_229 = SQLFORM.grid(
self.auth.db.wiki_media.wiki_page == VAR_212.id,
VAR_174=self.auth.db.wiki_media.title,
VAR_335=links,
VAR_336=FUNC_96,
VAR_337=FUNC_87,
VAR_11=['_editmedia', VAR_156],
user_signature=False)
return dict(VAR_229=content)
def VAR_337(self):
if not self.can_edit():
return self.not_authorized()
VAR_101 = self.auth.db
VAR_338 = VAR_101(VAR_101.wiki_page.id > 0).select(VAR_101.wiki_page.id, VAR_101.wiki_page.slug)
VAR_61 = [OPTION(VAR_266.slug, _value=VAR_266.id) for VAR_266 in VAR_338]
VAR_61.insert(0, OPTION('', _value=''))
VAR_133 = [VAR_1("slug", VAR_5=VAR_263.request.args(1) or
self.settings.force_prefix,
VAR_279=(IS_SLUG(), IS_NOT_IN_DB(VAR_101, VAR_101.wiki_page.slug))), ]
if self.settings.templates:
VAR_133.append(
VAR_1("from_template", "reference wiki_page",
VAR_279=IS_EMPTY_OR(IS_IN_DB(VAR_101(self.settings.templates), VAR_101.wiki_page._id, '%(VAR_156)s')),
VAR_62=VAR_263.T("Choose Template or empty for new Page")))
VAR_7 = SQLFORM.factory(*VAR_133, **dict(_class="well"))
VAR_7.element("[type=submit]").attributes["_value"] = \
VAR_263.T("Create Page from Slug")
if VAR_7.process().accepted:
VAR_7.vars.from_template = 0 if not VAR_7.vars.from_template else VAR_7.vars.from_template
redirect(URL(VAR_11=('_edit', VAR_7.vars.slug, VAR_7.vars.from_template or 0))) # added param
return dict(VAR_229=VAR_7)
def VAR_414(self):
if not self.can_manage():
return self.not_authorized()
self.auth.db.wiki_page.slug.represent = lambda VAR_156, VAR_266: SPAN(
'@////%s' % VAR_156)
self.auth.db.wiki_page.title.represent = lambda VAR_441, VAR_266: \
A(VAR_441, _href=URL(VAR_11=VAR_266.slug))
VAR_339 = self.auth.db.wiki_page
VAR_229 = SQLFORM.grid(
VAR_339,
VAR_133=[VAR_339.slug,
VAR_339.title, VAR_339.tags,
VAR_339.can_read, VAR_339.can_edit],
VAR_335=[
lambda VAR_266:
A('edit', _href=URL(VAR_11=('_edit', VAR_266.slug)), _class='btn'),
lambda VAR_266:
A('media', _href=URL(VAR_11=('_editmedia', VAR_266.slug)), _class='btn')],
details=False, editable=False, VAR_170=False, VAR_337=False,
VAR_174=self.auth.db.wiki_page.title,
VAR_11=['_pages'],
user_signature=False)
return dict(VAR_229=content)
def VAR_340(self, VAR_215):
VAR_56, VAR_244, VAR_101 = VAR_263.request, VAR_263.response, self.auth.db
VAR_340 = VAR_101.wiki_media(VAR_215)
if VAR_340:
if self.settings.manage_permissions:
VAR_212 = VAR_101.wiki_page(VAR_340.wiki_page)
if not self.can_read(VAR_212):
return self.not_authorized(VAR_212)
VAR_56.args = [VAR_340.filename]
VAR_410 = VAR_244.download(VAR_56, VAR_101)
VAR_263.session.forget() # VAR_130 rid of the VAR_19
VAR_244.headers['Last-Modified'] = \
VAR_56.utcnow.strftime("%VAR_9, %VAR_27 %VAR_13 %Y %H:%M:%S GMT")
if 'Content-Disposition' in VAR_244.headers:
del VAR_244.headers['Content-Disposition']
VAR_244.headers['Pragma'] = 'cache'
VAR_244.headers['Cache-Control'] = 'private'
return VAR_410
else:
raise HTTP(404)
def VAR_342(self, VAR_104='default', VAR_105='index'):
VAR_101 = self.auth.db
VAR_56 = VAR_263.request
VAR_341 = VAR_101.wiki_page(VAR_156='wiki-menu')
VAR_342 = []
if VAR_341:
VAR_411 = {'': VAR_342}
VAR_412 = re.compile('[\VAR_391\VAR_181\VAR_445]*(?P<VAR_200>(\VAR_278*\-\s*)+)(?P<VAR_441>\w.*?)\VAR_278+\>\s+(?P<VAR_276>\S+)')
for match in VAR_412.finditer(self.fix_hostname(VAR_341.body)):
VAR_200 = match.group('base').replace(' ', '')
VAR_441 = match.group('title')
VAR_276 = match.group('link')
VAR_442 = None
if VAR_276.startswith('@'):
VAR_250 = VAR_276[2:].split('/')
if len(VAR_250) > 3:
VAR_442 = VAR_250[3]
VAR_276 = URL(VAR_9=VAR_250[0] or None, VAR_14=VAR_250[1] or VAR_104,
VAR_10=VAR_250[2] or VAR_105, VAR_11=VAR_250[3:])
VAR_443 = VAR_411.get(VAR_200[1:], VAR_411[''])
VAR_444 = []
VAR_411[VAR_200] = VAR_444
VAR_443.append((VAR_263.T(VAR_441),
VAR_56.args(0) == VAR_442,
VAR_276, VAR_444))
if self.can_see_menu():
VAR_413 = []
VAR_342.append((VAR_263.T('[CLASS_8]'), None, None, VAR_413))
if URL() == URL(VAR_104, VAR_105):
if not str(VAR_56.args(0)).startswith('_'):
VAR_156 = VAR_56.args(0) or 'index'
VAR_118 = 1
elif VAR_56.args(0) == '_edit':
VAR_156 = VAR_56.args(1) or 'index'
VAR_118 = 2
elif VAR_56.args(0) == '_editmedia':
VAR_156 = VAR_56.args(1) or 'index'
VAR_118 = 3
else:
VAR_118 = 0
if VAR_118 in (2, 3):
VAR_413.append((VAR_263.T('View Page'), None,
URL(VAR_104, VAR_105, VAR_11=VAR_156)))
if VAR_118 in (1, 3):
VAR_413.append((VAR_263.T('Edit Page'), None,
URL(VAR_104, VAR_105, VAR_11=('_edit', VAR_156))))
if VAR_118 in (1, 2):
VAR_413.append((VAR_263.T('Edit Page Media'), None,
URL(VAR_104, VAR_105, VAR_11=('_editmedia', VAR_156))))
VAR_413.append((VAR_263.T('Create New Page'), None,
URL(VAR_104, VAR_105, VAR_11=('_create'))))
if self.can_manage():
VAR_413.append((VAR_263.T('Manage Pages'), None,
URL(VAR_104, VAR_105, VAR_11=('_pages'))))
VAR_413.append((VAR_263.T('Edit Menu'), None,
URL(VAR_104, VAR_105, VAR_11=('_edit', 'wiki-menu'))))
VAR_413.append((VAR_263.T('Search Pages'), None,
URL(VAR_104, VAR_105, VAR_11=('_search'))))
return VAR_342
def FUNC_94(self, VAR_211=None, VAR_173=None, VAR_216=True, VAR_217=True,
VAR_175=(0, 100), VAR_174=None):
if not self.can_search():
return self.not_authorized()
VAR_56 = VAR_263.request
VAR_229 = CAT()
if VAR_211 is None and VAR_173 is None:
VAR_7 = FORM(INPUT(_name='q', VAR_279=IS_NOT_EMPTY(),
VAR_179=VAR_56.vars.q),
INPUT(_type="submit", _value=VAR_263.T('Search')),
_method='GET')
VAR_229.append(DIV(VAR_7, _class='w2p_wiki_form'))
if VAR_56.vars.q:
VAR_211 = [v.strip() for v in VAR_56.vars.q.split(',')]
VAR_211 = [v.lower() for v in VAR_211 if v]
if VAR_211 or VAR_173 is not None:
VAR_101 = self.auth.db
VAR_343 = VAR_101.wiki_tag.wiki_page.count()
VAR_133 = [VAR_101.wiki_page.id, VAR_101.wiki_page.slug,
VAR_101.wiki_page.title, VAR_101.wiki_page.tags,
VAR_101.wiki_page.can_read, VAR_101.wiki_page.can_edit]
if VAR_217:
VAR_133.append(VAR_101.wiki_page.body)
if VAR_173 is None:
VAR_173 = (VAR_101.wiki_page.id == VAR_101.wiki_tag.wiki_page) &\
(VAR_101.wiki_tag.name.belongs(VAR_211))
VAR_173 = VAR_173 | VAR_101.wiki_page.title.contains(VAR_56.vars.q)
if self.settings.restrict_search and not self.can_manage():
VAR_173 = VAR_173 & (VAR_101.wiki_page.created_by == self.auth.user_id)
VAR_414 = VAR_101(VAR_173).select(VAR_343,
*VAR_133, **dict(VAR_174=orderby or ~VAR_343,
groupby=reduce(lambda VAR_9, VAR_13: VAR_9 | VAR_13, VAR_133),
distinct=True,
VAR_175=limitby))
if VAR_56.extension in ('html', 'load'):
if not VAR_414:
VAR_229.append(DIV(VAR_263.T("No results"),
_class='w2p_wiki_form'))
def VAR_276(VAR_445):
return A(VAR_445, _href=URL(VAR_11='_search', VAR_96=dict(q=VAR_445)))
VAR_250 = [DIV(H3(A(VAR_446.wiki_page.title, _href=URL(
VAR_11=VAR_446.wiki_page.slug))),
MARKMIN(self.first_paragraph(VAR_446.wiki_page))
if VAR_217 else '',
DIV(_class='w2p_wiki_tags',
*[VAR_276(VAR_445.strip()) for VAR_445 in
VAR_446.wiki_page.tags or [] if VAR_445.strip()]),
_class='w2p_wiki_search_item')
for VAR_446 in VAR_414]
VAR_229.append(DIV(_class='w2p_wiki_pages', *VAR_250))
else:
VAR_216 = False
VAR_229 = [VAR_446.wiki_page.as_dict() for VAR_446 in VAR_414]
elif VAR_216:
VAR_229.append(self.cloud()['content'])
if VAR_56.extension == 'load':
return VAR_229
return dict(VAR_229=content)
def VAR_216(self):
VAR_101 = self.auth.db
VAR_343 = VAR_101.wiki_tag.wiki_page.count(distinct=True)
VAR_344 = VAR_101(VAR_101.wiki_tag).select(
VAR_101.wiki_tag.name, VAR_343,
distinct=True,
groupby=VAR_101.wiki_tag.name,
VAR_174=~VAR_343, VAR_175=(0, 20))
if VAR_344:
VAR_9, VAR_13 = VAR_344[0](VAR_343), VAR_344[-1](VAR_343)
def VAR_15(VAR_14):
VAR_415 = 'padding:0 0.2em;line-height:%.2fem;font-VAR_416:%.2fem'
VAR_416 = (1.5 * (VAR_14 - VAR_13) / max(VAR_9 - VAR_13, 1) + 1.3)
return VAR_415 % (1.3, VAR_416)
VAR_250 = []
for VAR_305 in VAR_344:
VAR_250.append(A(VAR_305.wiki_tag.name,
_style=VAR_15(VAR_305(VAR_343)),
_href=URL(VAR_11='_search',
VAR_96=dict(q=VAR_305.wiki_tag.name))))
VAR_250.append(' ')
return dict(VAR_229=DIV(_class='w2p_cloud', *VAR_250))
def VAR_217(self, VAR_158):
VAR_56 = VAR_263.request
if 'render' not in VAR_56.post_vars:
VAR_56.post_vars.render = None
return VAR_158(VAR_56.post_vars)
class CLASS_9(object):
def __init__(
self,
VAR_94,
VAR_218,
VAR_219={}
):
self.config = configparser.ConfigParser(VAR_219)
self.config.read(VAR_94)
if not self.config.has_section(VAR_218):
self.config.add_section(VAR_218)
self.section = VAR_218
self.filename = VAR_94
def FUNC_88(self):
if not(isinstance(VAR_263.session['settings_%s' % self.section], dict)):
VAR_222 = dict(self.config.items(self.section))
else:
VAR_222 = VAR_263.session['settings_%s' % self.section]
return VAR_222
def FUNC_150(self, VAR_61):
for option, VAR_179 in VAR_61:
self.config.set(self.section, option, VAR_179)
try:
self.config.write(open(self.filename, 'w'))
VAR_224 = True
except:
VAR_263.session['settings_%s' % self.section] = dict(self.config.items(self.section))
VAR_224 = False
return VAR_224
if __name__ == '__main__':
import .doctest
doctest.testmod()
|
import .base64
from functools import .reduce
from gluon._compat import .pickle, thread, urllib2, Cookie, StringIO, urlencode
from gluon._compat import .configparser, MIMEBase, MIMEMultipart, MIMEText, Header
from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems
from gluon._compat import .to_bytes, to_native, add_charset, string_types
from gluon._compat import .charset_QP, basestring, unicodeT, to_unicode
from gluon._compat import .urllib2, urlopen
import .datetime
import .logging
import .sys
import glob
import .os
import .re
import .time
import .fnmatch
import .traceback
import .smtplib
import email.utils
import .random
import hmac
import hashlib
import .json
from email import .message_from_string
from gluon.authapi import .AuthAPI
from gluon.contenttype import .contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid, compare
from gluon.fileutils import .read_file, check_credentials
from gluon import *
from gluon.contrib.autolinks import expand_one
from gluon.contrib.markmin.markmin2html import .replace_at_urls
from gluon.contrib.markmin.markmin2html import .replace_autolinks
from gluon.contrib.markmin.markmin2html import .replace_components
from pydal.objects import Row, Set, Query
import gluon.serializers as serializers
VAR_0 = DAL.Table
VAR_1 = DAL.Field
__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',
'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']
VAR_2 = logging.getLogger("web2py")
VAR_3 = lambda: None
def FUNC_0(VAR_4, VAR_5=None):
VAR_11 = VAR_265.request.args
if VAR_4 < 0 and len(VAR_11) >= -VAR_4:
return VAR_11[VAR_4]
elif VAR_4 >= 0 and len(VAR_11) > VAR_4:
return VAR_11[VAR_4]
else:
return VAR_5
def VAR_27(VAR_6, VAR_7, VAR_8=None):
if VAR_6:
if VAR_8 and isinstance(VAR_6, dict):
VAR_6 = actions.get(VAR_8, [])
if not isinstance(VAR_6, (list, tuple)):
VAR_6 = [actions]
[VAR_114(VAR_7) for VAR_114 in VAR_6]
def FUNC_2(*VAR_9):
VAR_14 = []
for VAR_307 in VAR_9:
if isinstance(VAR_307, (list, tuple)):
VAR_14 = b + list(VAR_307)
else:
VAR_14.append(VAR_307)
return VAR_14
def FUNC_3(VAR_10, *VAR_11):
if callable(VAR_10):
redirect(VAR_10(*VAR_11))
else:
redirect(VAR_10)
def FUNC_4(VAR_12, VAR_7):
if VAR_12:
VAR_12 = VAR_12.replace('[VAR_216]', str(VAR_7.vars.id))
if VAR_12[0] == '/' or VAR_12[:4] == 'http':
return VAR_12
return URL(VAR_12)
VAR_13 = re.compile(r"^(\w+)?[:]?(/$|//.*|/\\.*|[~]/.*)")
def FUNC_5(VAR_12):
VAR_32 = VAR_265.request.env.http_host
print(VAR_32)
if not VAR_12:
return None
if VAR_13.match(VAR_12):
VAR_221 = VAR_12.split('/')
if len(VAR_221) > 2 and VAR_221[2] == VAR_32:
return VAR_12
return None
return VAR_12
class CLASS_0(object):
class CLASS_10(MIMEBase):
def __init__(
self,
VAR_83,
VAR_96=None,
VAR_222=None,
VAR_223=None,
VAR_44='utf-8'):
if isinstance(VAR_83, str):
if VAR_96 is None:
VAR_96 = VAR_419.path.basename(VAR_83)
VAR_83 = read_file(VAR_83, 'rb')
else:
if VAR_96 is None:
raise Exception('Missing VAR_418 name')
VAR_83 = payload.read()
VAR_96 = filename.encode(VAR_44)
if VAR_223 is None:
VAR_223 = contenttype(VAR_96)
self.my_filename = VAR_96
self.my_payload = VAR_83
MIMEBase.__init__(self, *VAR_223.split('/', 1))
self.set_payload(VAR_83)
self['Content-Disposition'] = Header('attachment; VAR_96="%s"' % to_native(VAR_96, VAR_44), 'utf-8')
if VAR_222 is not None:
self['Content-Id'] = '<%VAR_280>' % to_native(VAR_222, VAR_44)
Encoders.encode_base64(self)
def __init__(self, VAR_33=None, VAR_34=None, VAR_35=None, VAR_36=True):
VAR_224 = self.settings = Settings()
VAR_224.server = VAR_33
VAR_224.sender = VAR_34
VAR_224.login = VAR_35
VAR_224.tls = VAR_36
VAR_224.timeout = 5 # seconds
VAR_224.hostname = None
VAR_224.ssl = False
VAR_224.cipher_type = None
VAR_224.gpg_home = None
VAR_224.sign = True
VAR_224.sign_passphrase = None
VAR_224.encrypt = True
VAR_224.x509_sign_keyfile = None
VAR_224.x509_sign_certfile = None
VAR_224.x509_sign_chainfile = None
VAR_224.x509_nocerts = False
VAR_224.x509_crypt_certfiles = None
VAR_224.debug = False
VAR_224.lock_keys = True
self.result = {}
self.error = None
def FUNC_14(self,
VAR_37,
VAR_38='[no VAR_38]',
VAR_39='[no VAR_39]',
VAR_40=None,
VAR_41=None,
VAR_42=None,
VAR_43=None,
VAR_34=None,
VAR_44='utf-8',
VAR_45=False,
VAR_19={},
VAR_46=None,
VAR_47=None,
VAR_48=None,
VAR_49=None,
VAR_50=None,
VAR_51=None,
VAR_52=None,
VAR_53=None,
VAR_54=None,
VAR_55=None
):
add_charset('utf-8', charset_QP, charset_QP, 'utf-8')
def FUNC_151(VAR_200):
if [VAR_15 for VAR_15 in VAR_200 if 32 > ord(VAR_15) or ord(VAR_15) > 127]:
return Header(VAR_200.encode('utf-8'), 'utf-8')
else:
return VAR_200
def FUNC_152(VAR_214):
if VAR_45:
VAR_214 = FUNC_151(VAR_214)
return VAR_214
VAR_34 = VAR_34 or self.settings.sender
if not isinstance(self.settings.server, str):
raise Exception('Server VAR_23 not specified')
if not isinstance(VAR_34, str):
raise Exception('Sender VAR_23 not specified')
if not VAR_45 and VAR_40:
VAR_347 = MIMEMultipart('mixed')
elif VAR_45:
if not isinstance(VAR_39, basestring):
VAR_39 = VAR_39.read()
if isinstance(VAR_39, unicodeT):
VAR_214 = VAR_39.encode('utf-8')
elif not VAR_44 == 'utf-8':
VAR_214 = VAR_39.decode(VAR_44).encode('utf-8')
else:
VAR_214 = VAR_39
VAR_347 = MIMEText(VAR_214)
if VAR_37:
if not isinstance(VAR_37, (list, tuple)):
VAR_37 = [to]
else:
raise Exception('Target receiver VAR_23 not specified')
if VAR_43:
if not isinstance(VAR_43, (list, tuple)):
VAR_43 = [reply_to]
if VAR_41:
if not isinstance(VAR_41, (list, tuple)):
VAR_41 = [cc]
if VAR_42:
if not isinstance(VAR_42, (list, tuple)):
VAR_42 = [bcc]
if VAR_39 is None:
VAR_214 = VAR_305 = None
elif isinstance(VAR_39, (list, tuple)):
VAR_214, VAR_305 = VAR_39
elif VAR_39.strip().startswith('<html') and \
VAR_39.strip().endswith('</VAR_305>'):
VAR_214 = self.settings.server == 'gae' and VAR_39 or None
VAR_305 = VAR_39
else:
VAR_214 = VAR_39
VAR_305 = None
if (VAR_214 is not None or VAR_305 is not None) and (not VAR_45):
if VAR_214 is not None:
if not isinstance(VAR_214, basestring):
VAR_214 = VAR_214.read()
if isinstance(VAR_214, unicodeT):
VAR_214 = VAR_214.encode('utf-8')
elif not VAR_44 == 'utf-8':
VAR_214 = VAR_214.decode(VAR_44).encode('utf-8')
if VAR_305 is not None:
if not isinstance(VAR_305, basestring):
VAR_305 = html.read()
if isinstance(VAR_305, unicodeT):
VAR_305 = html.encode('utf-8')
elif not VAR_44 == 'utf-8':
VAR_305 = html.decode(VAR_44).encode('utf-8')
if VAR_214 is not None and VAR_305:
VAR_418 = MIMEMultipart('alternative')
VAR_418.attach(MIMEText(VAR_214, _charset='utf-8'))
VAR_418.attach(MIMEText(VAR_305, 'html', _charset='utf-8'))
elif VAR_214 is not None:
VAR_418 = MIMEText(VAR_214, _charset='utf-8')
elif VAR_305:
VAR_418 = MIMEText(VAR_305, 'html', _charset='utf-8')
if VAR_40:
VAR_347.attach(VAR_418)
else:
VAR_347 = VAR_418
if (VAR_40 is None) or VAR_45:
pass
elif isinstance(VAR_40, (list, tuple)):
for VAR_418 in VAR_40:
VAR_347.attach(VAR_418)
else:
VAR_347.attach(VAR_40)
VAR_40 = [VAR_40]
VAR_47 = cipher_type or self.settings.cipher_type
VAR_48 = VAR_48 if VAR_48 is not None else self.settings.sign
VAR_49 = sign_passphrase or self.settings.sign_passphrase
VAR_50 = VAR_50 if VAR_50 is not None else self.settings.encrypt
if VAR_47 == 'gpg':
if self.settings.gpg_home:
import .os
VAR_419.environ['GNUPGHOME'] = self.settings.gpg_home
if not VAR_48 and not VAR_50:
self.error = "No VAR_48 and no VAR_50 is set but VAR_422 type VAR_37 gpg"
return False
from pyme import .core, errors
from pyme.constants.sig import .mode
if VAR_48:
import .string
core.check_version(None)
VAR_420 = VAR_82.replace(VAR_347.as_string(), '\n', '\VAR_392\n')
VAR_421 = core.Data(VAR_420)
VAR_242 = core.Data()
VAR_15 = core.Context()
VAR_15.set_armor(1)
VAR_15.signers_clear()
for sigkey in VAR_15.op_keylist_all(VAR_34, 1):
if sigkey.can_sign:
VAR_15.signers_add(sigkey)
if not VAR_15.signers_enum(0):
self.error = 'No VAR_200 for signing [%VAR_280]' % VAR_34
return False
VAR_15.set_passphrase_cb(lambda VAR_31, y, z: VAR_49)
try:
VAR_15.op_sign(VAR_421, VAR_242, VAR_118.DETACH)
VAR_242.seek(0, 0)
VAR_83 = MIMEMultipart('signed',
boundary=None,
_subparts=None,
**dict(micalg="pgp-sha1",
protocol="application/pgp-signature"))
payload.attach(VAR_347)
VAR_447 = MIMEBase("application", 'pgp-signature')
VAR_447.set_payload(VAR_242.read())
VAR_83.attach(VAR_447)
VAR_347 = VAR_83
except errors.GPGMEError as ex:
self.error = "GPG VAR_395: %s" % ex.getstring()
return False
if VAR_50:
core.check_version(None)
VAR_421 = core.Data(VAR_347.as_string())
VAR_422 = core.Data()
VAR_15 = core.Context()
VAR_15.set_armor(1)
VAR_423 = []
VAR_424 = VAR_37[:]
if VAR_41:
VAR_424.extend(VAR_41)
if VAR_42:
VAR_424.extend(VAR_42)
for addr in VAR_424:
VAR_15.op_keylist_start(addr, 0)
VAR_392 = VAR_15.op_keylist_next()
if VAR_392 is None:
self.error = 'No VAR_200 for [%VAR_280]' % addr
return False
VAR_423.append(VAR_392)
try:
VAR_15.op_encrypt(VAR_423, 1, VAR_421, VAR_422)
cipher.seek(0, 0)
VAR_83 = MIMEMultipart('encrypted',
boundary=None,
_subparts=None,
**dict(protocol="application/pgp-encrypted"))
VAR_447 = MIMEBase("application", 'pgp-encrypted')
VAR_447.set_payload("Version: 1\VAR_392\n")
VAR_83.attach(VAR_447)
VAR_447 = MIMEBase("application", 'octet-stream')
VAR_447.set_payload(VAR_422.read())
VAR_83.attach(VAR_447)
except errors.GPGMEError as ex:
self.error = "GPG VAR_395: %s" % ex.getstring()
return False
elif VAR_47 == 'x509':
if not VAR_48 and not VAR_50:
self.error = "No VAR_48 and no VAR_50 is set but VAR_422 type VAR_37 x509"
return False
import .os
VAR_51 = x509_sign_keyfile or self.settings.x509_sign_keyfile
VAR_52 = x509_sign_chainfile or self.settings.x509_sign_chainfile
VAR_53 = x509_sign_certfile or self.settings.x509_sign_certfile or \
VAR_51 or self.settings.x509_sign_certfile
VAR_54 = x509_crypt_certfiles or self.settings.x509_crypt_certfiles
VAR_55 = x509_nocerts or\
self.settings.x509_nocerts
try:
from M2Crypto import BIO, SMIME, X509
except Exception as e:
self.error = "Can't load M2Crypto module"
return False
VAR_425 = BIO.MemoryBuffer(VAR_347.as_string())
VAR_280 = SMIME.SMIME()
if VAR_48:
try:
VAR_460 = BIO.openfile(VAR_51)\
if VAR_419.path.isfile(VAR_51)\
else BIO.MemoryBuffer(VAR_51)
VAR_461 = BIO.openfile(VAR_53)\
if VAR_419.path.isfile(VAR_53)\
else BIO.MemoryBuffer(VAR_53)
VAR_280.load_key_bio(VAR_460, VAR_461,
VAR_27=lambda VAR_31: VAR_49)
if VAR_52:
VAR_463 = X509.X509_Stack()
VAR_467 = X509.load_cert(VAR_52)\
if VAR_419.path.isfile(VAR_52)\
else X509.load_cert_string(VAR_52)
VAR_463.push(VAR_467)
VAR_280.set_x509_stack(VAR_463)
except Exception as e:
self.error = "Something went wrong on certificate / private VAR_200 loading: <%VAR_280>" % str(e)
return False
try:
if VAR_55:
VAR_468 = SMIME.PKCS7_NOCERTS
else:
VAR_468 = 0
if not VAR_50:
VAR_468 += SMIME.PKCS7_DETACHED
VAR_462 = VAR_280.sign(VAR_425, VAR_468=flags)
VAR_425 = BIO.MemoryBuffer(VAR_347.as_string(
)) # Recreate coz VAR_48() has consumed it.
except Exception as e:
self.error = "Something went wrong on signing: <%VAR_280> %s" % (
str(e), str(VAR_468))
return False
if VAR_50:
try:
VAR_463 = X509.X509_Stack()
if not isinstance(VAR_54, (list, tuple)):
VAR_54 = [x509_crypt_certfiles]
for crypt_certfile in VAR_54:
VAR_469 = X509.load_cert(crypt_certfile)\
if VAR_419.path.isfile(crypt_certfile)\
else X509.load_cert_string(crypt_certfile)
VAR_463.push(VAR_469)
VAR_280.set_x509_stack(VAR_463)
VAR_280.set_cipher(SMIME.Cipher('des_ede3_cbc'))
VAR_464 = BIO.MemoryBuffer()
if VAR_48:
VAR_280.write(VAR_464, VAR_462)
else:
VAR_464.write(VAR_347.as_string())
VAR_462 = VAR_280.encrypt(VAR_464)
except Exception as e:
self.error = "Something went wrong on encrypting: <%VAR_280>" % str(e)
return False
VAR_426 = BIO.MemoryBuffer()
if VAR_50:
VAR_280.write(VAR_426, VAR_462)
else:
if VAR_48:
VAR_280.write(VAR_426, VAR_462, VAR_425, SMIME.PKCS7_DETACHED)
else:
VAR_426.write('\VAR_392\n')
VAR_426.write(VAR_347.as_string())
VAR_426.close()
VAR_427 = str(VAR_426.read())
VAR_83 = message_from_string(VAR_427)
else:
VAR_83 = VAR_347
if VAR_46:
VAR_83['From'] = FUNC_152(to_unicode(VAR_46, VAR_44))
else:
VAR_83['From'] = FUNC_152(to_unicode(VAR_34, VAR_44))
VAR_225 = VAR_37[:]
if VAR_37:
VAR_83['To'] = FUNC_152(to_unicode(', '.join(VAR_37), VAR_44))
if VAR_43:
VAR_83['Reply-To'] = FUNC_152(to_unicode(', '.join(VAR_43), VAR_44))
if VAR_41:
VAR_83['Cc'] = FUNC_152(to_unicode(', '.join(VAR_41), VAR_44))
VAR_37.extend(VAR_41)
if VAR_42:
VAR_37.extend(VAR_42)
VAR_83['Subject'] = FUNC_152(to_unicode(VAR_38, VAR_44))
VAR_83['Date'] = email.utils.formatdate()
for VAR_348, v in iteritems(VAR_19):
VAR_83[VAR_348] = FUNC_152(to_unicode(v, VAR_44))
VAR_226 = {}
try:
if self.settings.server == 'logging':
VAR_428 = 'email not sent\VAR_182%VAR_280\nFrom: %VAR_280\nTo: %VAR_280\nSubject: %VAR_280\VAR_182\VAR_182%VAR_280\VAR_182%VAR_280\n' % \
('-' * 40, VAR_34, ', '.join(VAR_37), VAR_38, VAR_214 or VAR_305, '-' * 40)
VAR_2.warning(VAR_428)
elif self.settings.server.startswith('logging:'):
VAR_428 = 'email not sent\VAR_182%VAR_280\nFrom: %VAR_280\nTo: %VAR_280\nSubject: %VAR_280\VAR_182\VAR_182%VAR_280\VAR_182%VAR_280\n' % \
('-' * 40, VAR_34, ', '.join(VAR_37), VAR_38, VAR_214 or VAR_305, '-' * 40)
open(self.settings.server[8:], 'a').write(VAR_428)
elif self.settings.server == 'gae':
VAR_465 = dict()
if VAR_41:
VAR_465['cc'] = VAR_41
if VAR_42:
VAR_465['bcc'] = VAR_42
if VAR_43:
VAR_465['reply_to'] = VAR_43
from google.appengine.api import .mail
VAR_40 = VAR_40 and [mail.Attachment(
VAR_9.my_filename,
VAR_9.my_payload,
VAR_222='<VAR_418-%VAR_280>' % VAR_348
) for VAR_348, VAR_9 in enumerate(VAR_40) if not VAR_45]
if VAR_40:
VAR_226 = mail.send_mail(
VAR_34=sender, VAR_37=VAR_225,
VAR_38=to_unicode(VAR_38, VAR_44),
VAR_84=to_unicode(VAR_214 or '', VAR_44),
VAR_305=html,
VAR_40=attachments, **VAR_465)
elif VAR_305 and (not VAR_45):
VAR_226 = mail.send_mail(
VAR_34=sender, VAR_37=VAR_225,
VAR_38=to_unicode(VAR_38, VAR_44), VAR_84=to_unicode(VAR_214 or '', VAR_44), VAR_305=html, **VAR_465)
else:
VAR_226 = mail.send_mail(
VAR_34=sender, VAR_37=VAR_225,
VAR_38=to_unicode(VAR_38, VAR_44), VAR_84=to_unicode(VAR_214 or '', VAR_44), **VAR_465)
elif self.settings.server == 'aws':
import .boto3
from botocore.exceptions import ClientError
VAR_470 = boto3.client('ses')
try:
VAR_45 = {'Data': VAR_83.as_string()}
VAR_246 = VAR_470.send_raw_email(RawMessage=VAR_45,
Source=VAR_34,
Destinations=VAR_37)
return True
except ClientError as e:
return False
else:
VAR_471 = self.settings.server.split(':')
VAR_352 = dict(timeout=self.settings.timeout)
VAR_472 = smtplib.SMTP_SSL if self.settings.ssl else smtplib.SMTP
VAR_33 = VAR_472(*VAR_471, **VAR_352)
try:
if self.settings.tls and not self.settings.ssl:
VAR_33.ehlo(self.settings.hostname)
VAR_33.starttls()
VAR_33.ehlo(self.settings.hostname)
if self.settings.login:
VAR_33.login(*self.settings.login.split(':', 1))
VAR_226 = VAR_33.sendmail(VAR_34, VAR_37, VAR_83.as_string())
finally:
try:
VAR_33.quit()
except smtplib.SMTPException:
try:
VAR_33.close()
except Exception:
pass
except Exception as e:
VAR_2.warning('Mail.send failure:%s' % e)
self.result = VAR_226
self.error = e
return False
self.result = VAR_226
self.error = None
return True
class CLASS_1(DIV):
VAR_56 = 'https://www.google.com/recaptcha/api.js'
VAR_57 = 'https://www.google.com/recaptcha/api/siteverify'
def __init__(self,
VAR_58=None,
VAR_59='',
VAR_60='',
VAR_61='invalid',
VAR_62='Verify:',
VAR_63=None,
VAR_64='',
):
VAR_58 = VAR_58 or VAR_265.request
self.request_vars = VAR_58 and VAR_58.vars or VAR_265.request.vars
self.remote_addr = VAR_58.env.remote_addr
self.public_key = VAR_59
self.private_key = VAR_60
self.errors = Storage()
self.error_message = VAR_61
self.components = []
self.attributes = {}
self.label = VAR_62
self.options = VAR_63 or {}
self.comment = VAR_64
def FUNC_15(self):
VAR_227 = self.request_vars.pop('g-recaptcha-response', None)
VAR_228 = self.remote_addr
if not VAR_227:
self.errors['captcha'] = self.error_message
return False
VAR_229 = urlencode({
'secret': self.private_key,
'remoteip': VAR_228,
'response': VAR_227,
}).encode('utf-8')
VAR_58 = urllib2.Request(
VAR_12=self.VERIFY_SERVER,
VAR_18=to_bytes(VAR_229),
VAR_19={'Content-type': 'application/VAR_31-www-VAR_7-urlencoded',
'User-agent': 'reCAPTCHA Python'})
VAR_230 = urlopen(VAR_58)
VAR_231 = VAR_230.read()
VAR_230.close()
try:
VAR_349 = FUNC_98.loads(to_native(VAR_231))
except:
self.errors['captcha'] = self.error_message
return False
if VAR_349.get('success', False):
self.request_vars.captcha = ''
return True
else:
self.errors['captcha'] = self.error_message
return False
def VAR_405(self):
VAR_232 = self.API_URI
VAR_233 = self.options.pop('hl', None)
if VAR_233:
VAR_232 = self.API_URI + '?VAR_233=%s' % VAR_233
VAR_59 = self.public_key
self.options['sitekey'] = VAR_59
VAR_234 = DIV(
SCRIPT(_src=VAR_232, _async='', _defer=''),
DIV(_class="g-recaptcha", VAR_18=self.options),
TAG.noscript(XML("""
<div VAR_16="width: 302px; height: 352px;">
<div VAR_16="width: 302px; height: 352px; VAR_4: relative;">
<div VAR_16="width: 302px; height: 352px; VAR_4: absolute;">
<iframe src="https://www.google.com/recaptcha/api/fallback?VAR_348=%(VAR_59)s"
frameborder="0" scrolling="no"
VAR_16="width: 302px; height:352px; border-VAR_16: none;">
</iframe>
</div>
<div VAR_16="width: 250px; height: 80px; VAR_4: absolute; border-VAR_16: none;
bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;">
<textarea VAR_216="g-recaptcha-response" VAR_149="g-recaptcha-response"
class="g-recaptcha-response"
VAR_16="width: 250px; height: 80px; border: 1px solid #c1c1c1;
margin: 0px; padding: 0px; resize: none;" VAR_180="">
</textarea>
</div>
</div>
</div>""" % dict(VAR_59=public_key))
)
)
if not self.errors.captcha:
return XML(VAR_234).xml()
else:
VAR_234.append(DIV(self.errors['captcha'], _class='error'))
return XML(VAR_234).xml()
def FUNC_6(VAR_7, VAR_9, VAR_14, VAR_15, VAR_16, VAR_17, VAR_4=-1):
if VAR_16 == "divs":
VAR_7[0].insert(VAR_4, DIV(DIV(LABEL(VAR_9), _class='w2p_fl'),
DIV(VAR_14, _class='w2p_fw'),
DIV(VAR_15, _class='w2p_fc'),
VAR_17=_id))
elif VAR_16 == "table2cols":
VAR_7[0].insert(VAR_4, TR(TD(LABEL(VAR_9), _class='w2p_fl'),
TD(VAR_15, _class='w2p_fc')))
VAR_7[0].insert(VAR_4 + 1, TR(TD(VAR_14, _class='w2p_fw'),
_colspan=2, VAR_17=_id))
elif VAR_16 == "ul":
VAR_7[0].insert(VAR_4, LI(DIV(LABEL(VAR_9), _class='w2p_fl'),
DIV(VAR_14, _class='w2p_fw'),
DIV(VAR_15, _class='w2p_fc'),
VAR_17=_id))
elif VAR_16 == "bootstrap":
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-label'),
DIV(VAR_14, SPAN(VAR_15, _class='inline-help'),
_class='controls'),
_class='control-group', VAR_17=_id))
elif VAR_16 in ("bootstrap3_inline", "bootstrap4_inline"):
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-VAR_62 col-sm-3'),
DIV(VAR_14, SPAN(VAR_15, _class='help-block'),
_class='col-sm-9'),
_class='form-VAR_406 row', VAR_17=_id))
elif VAR_16 in ("bootstrap3_stacked", "bootstrap4_stacked"):
VAR_7[0].insert(VAR_4, DIV(LABEL(VAR_9, _class='control-label'),
VAR_14, SPAN(VAR_15, _class='help-block'),
_class='form-VAR_406 row', VAR_17=_id))
else:
VAR_7[0].insert(VAR_4, TR(TD(LABEL(VAR_9), _class='w2p_fl'),
TD(VAR_14, _class='w2p_fw'),
TD(VAR_15, _class='w2p_fc'), VAR_17=_id))
class CLASS_2(object):
def __init__(self,
VAR_65,
VAR_66,
VAR_67='HS256',
VAR_68=True,
VAR_69=30,
VAR_70=60 * 5,
VAR_71=True,
VAR_72=60 * 60,
VAR_73='Bearer',
VAR_74=None,
VAR_75='username',
VAR_76='password',
VAR_77='Login required',
VAR_78=None,
VAR_79=None,
VAR_80=None,
VAR_81=4 * 1024,
):
self.secret_key = VAR_66
self.auth = VAR_65
self.algorithm = VAR_67
if self.algorithm not in ('HS256', 'HS384', 'HS512'):
raise NotImplementedError('Algorithm %VAR_280 not allowed' % VAR_67)
self.verify_expiration = VAR_68
self.leeway = VAR_69
self.expiration = VAR_70
self.allow_refresh = VAR_71
self.refresh_expiration_delta = VAR_72
self.header_prefix = VAR_73
self.jwt_add_header = VAR_74 or {}
VAR_235 = {'alg': self.algorithm, 'typ': 'JWT'}
for VAR_348, v in iteritems(self.jwt_add_header):
VAR_235[VAR_348] = v
self.cached_b64h = self.jwt_b64e(FUNC_98.dumps(VAR_235))
VAR_236 = {
'HS256': hashlib.sha256,
'HS384': hashlib.sha384,
'HS512': hashlib.sha512
}
self.digestmod = VAR_236[VAR_67]
self.user_param = VAR_75
self.pass_param = VAR_76
self.realm = VAR_77
self.salt = VAR_78
self.additional_payload = VAR_79
self.before_authorization = VAR_80
self.max_header_length = VAR_81
self.recvd_token = None
@staticmethod
def FUNC_17(VAR_82):
VAR_82 = to_bytes(VAR_82)
return base64.urlsafe_b64encode(VAR_82).strip(b'=')
@staticmethod
def FUNC_18(VAR_82):
VAR_82 = to_bytes(VAR_82, 'ascii', 'ignore')
return base64.urlsafe_b64decode(VAR_82 + b'=' * (-len(VAR_82) % 4))
def FUNC_19(self, VAR_83):
VAR_86 = to_bytes(self.secret_key)
if self.salt:
if callable(self.salt):
VAR_86 = "%VAR_280$%s" % (VAR_86, self.salt(VAR_83))
else:
VAR_86 = "%VAR_280$%s" % (VAR_86, self.salt)
if isinstance(VAR_86, unicodeT):
VAR_86 = secret.encode('ascii', 'ignore')
VAR_237 = self.cached_b64h
VAR_238 = self.jwt_b64e(serializers.json(VAR_83))
VAR_239 = VAR_237 + b'.' + VAR_238
VAR_240 = hmac.new(VAR_200=VAR_86, msg=VAR_239, digestmod=self.digestmod)
VAR_241 = self.jwt_b64e(VAR_240.digest())
return to_native(VAR_239 + b'.' + VAR_241)
def FUNC_20(self, VAR_84, VAR_85, VAR_86):
VAR_240 = hmac.new(VAR_200=VAR_86, msg=VAR_84, digestmod=self.digestmod)
return compare(self.jwt_b64e(VAR_240.digest()), VAR_85)
def FUNC_21(self, VAR_87):
VAR_87 = to_bytes(VAR_87, 'utf-8', 'strict')
VAR_84, VAR_242 = VAR_87.rsplit(b'.', 1)
VAR_237, VAR_243 = VAR_84.split(b'.', 1)
if VAR_237 != self.cached_b64h:
raise HTTP(400, 'Invalid JWT Header')
VAR_86 = self.secret_key
VAR_91 = serializers.loads_json(to_native(self.jwt_b64d(VAR_243)))
if self.salt:
if callable(self.salt):
VAR_86 = "%VAR_280$%s" % (VAR_86, self.salt(VAR_91))
else:
VAR_86 = "%VAR_280$%s" % (VAR_86, self.salt)
VAR_86 = to_bytes(VAR_86, 'ascii', 'ignore')
if not self.verify_signature(VAR_84, VAR_242, VAR_86):
raise HTTP(400, 'Token VAR_85 is invalid')
if self.verify_expiration:
VAR_194 = time.mktime(datetime.datetime.utcnow().timetuple())
if VAR_91['exp'] + self.leeway < VAR_194:
raise HTTP(400, 'Token is expired')
if callable(self.before_authorization):
self.before_authorization(VAR_91)
return VAR_91
def FUNC_22(self, VAR_88):
VAR_194 = time.mktime(datetime.datetime.utcnow().timetuple())
VAR_244 = VAR_194 + self.expiration
VAR_83 = dict(
VAR_104=VAR_88['hmac_key'],
user_groups=VAR_88['user_groups'],
VAR_142=VAR_88['user'].as_dict(),
iat=VAR_194,
exp=VAR_244
)
return VAR_83
def FUNC_23(self, VAR_89):
VAR_194 = time.mktime(datetime.datetime.utcnow().timetuple())
if self.verify_expiration:
VAR_350 = VAR_89['exp']
if VAR_350 + self.leeway < VAR_194:
raise HTTP(400, 'Token already expired')
VAR_245 = VAR_89.get('orig_iat') or VAR_89['iat']
if VAR_245 + self.refresh_expiration_delta < VAR_194:
raise HTTP(400, 'Token issued too long ago')
VAR_244 = VAR_194 + self.expiration
VAR_89.update(
VAR_245=orig_iat,
iat=VAR_194,
exp=VAR_244,
VAR_104=web2py_uuid()
)
self.alter_payload(VAR_89)
return VAR_89
def FUNC_24(self, VAR_83):
if self.additional_payload:
if callable(self.additional_payload):
VAR_83 = self.additional_payload(VAR_83)
elif isinstance(self.additional_payload, dict):
VAR_83.update(self.additional_payload)
return VAR_83
def FUNC_25(self, VAR_90='_token'):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
VAR_140.forget(VAR_246)
VAR_247 = None
VAR_248 = None
VAR_87 = None
try:
VAR_87 = self.recvd_token or self.get_jwt_token_from_request(VAR_90)
except HTTP:
pass
if VAR_87:
if not self.allow_refresh:
raise HTTP(403, 'Refreshing VAR_87 is not allowed')
VAR_91 = self.load_token(VAR_87)
VAR_351 = self.refresh_token(VAR_91)
VAR_248 = {'token': self.generate_token(VAR_351)}
elif self.user_param in VAR_58.vars and self.pass_param in VAR_58.vars:
VAR_124 = VAR_58.vars[self.user_param]
VAR_132 = VAR_58.vars[self.pass_param]
VAR_247 = self.auth.login_bare(VAR_124, VAR_132)
else:
VAR_247 = self.auth.user
self.auth.login_user(VAR_247)
if VAR_247:
VAR_83 = self.serialize_auth_session(VAR_140.auth)
self.alter_payload(VAR_83)
VAR_248 = {'token': self.generate_token(VAR_83)}
elif VAR_248 is None:
raise HTTP(401,
'Not Authorized - need VAR_37 be logged in, VAR_37 pass VAR_9 VAR_87 '
'for refresh or VAR_124 and VAR_132 for login',
**{'WWW-Authenticate': 'JWT VAR_77="%s"' % self.realm})
VAR_246.headers['Content-Type'] = 'application/json'
return serializers.json(VAR_248)
def FUNC_26(self, VAR_91):
self.auth.user = Storage(VAR_91['user'])
self.auth.user_groups = VAR_91['user_groups']
self.auth.hmac_key = VAR_91['hmac_key']
def FUNC_27(self, VAR_90='_token'):
VAR_87 = None
VAR_249 = VAR_265.request.env.http_authorization
if VAR_249:
VAR_221 = VAR_249.split()
if VAR_221[0].lower() != self.header_prefix.lower():
raise HTTP(400, 'Invalid JWT header')
elif len(VAR_221) == 1:
raise HTTP(400, 'Invalid JWT header, missing token')
elif len(VAR_221) > 2:
raise HTTP(400, 'Invalid JWT header, VAR_87 contains spaces')
VAR_87 = VAR_221[1]
else:
VAR_87 = VAR_265.request.vars.get(VAR_90)
if VAR_87 is None:
raise HTTP(400, 'JWT header not found and JWT parameter {} missing in request'.format(VAR_90))
self.recvd_token = VAR_87
return VAR_87
def FUNC_28(self, VAR_92=None, VAR_93=True, VAR_68=True, VAR_90='_token'):
def FUNC_153(VAR_114):
def VAR_10(*VAR_11, **VAR_352):
try:
VAR_87 = self.get_jwt_token_from_request(VAR_90=token_param)
except HTTP as e:
if VAR_93:
raise e
VAR_87 = None
if VAR_87 and len(VAR_87) < self.max_header_length:
VAR_448 = self.verify_expiration
try:
self.verify_expiration = VAR_68
VAR_91 = self.load_token(VAR_87)
except ValueError:
raise HTTP(400, 'Invalid JWT header, wrong VAR_87 format')
finally:
self.verify_expiration = VAR_448
self.inject_token(VAR_91)
return VAR_114(*VAR_11, **VAR_352)
VAR_10.__doc__ = VAR_114.__doc__
VAR_10.__name__ = VAR_114.__name__
VAR_10.__dict__.update(VAR_114.__dict__)
return VAR_10
return FUNC_153
class CLASS_3(AuthAPI):
VAR_94 = dict(AuthAPI.default_settings,
allow_basic_login=False,
allow_basic_login_only=False,
allow_delete_accounts=False,
alternate_requires_registration=False,
auth_manager_role=None,
auth_two_factor_enabled=False,
auth_two_factor_tries_left=3,
bulk_register_enabled=False,
VAR_234=None,
cas_maps=None,
client_side=True,
VAR_275=None,
hideerror=False,
label_separator=None,
login_after_password_change=True,
login_after_registration=False,
login_captcha=None,
login_specify_error=False,
long_expiration=3600 * 30 * 24, # one month
VAR_103=None,
manager_actions={},
VAR_272=False,
on_failed_authentication=lambda VAR_31: redirect(VAR_31),
pre_registration_div=None,
prevent_open_redirect_attacks=True,
prevent_password_reset_attacks=True,
profile_fields=None,
register_captcha=None,
register_fields=None,
register_verify_password=True,
remember_me_form=True,
reset_password_requires_verification=False,
retrieve_password_captcha=None,
retrieve_username_captcha=None,
showid=False,
FUNC_34=None,
table_cas_name='auth_cas',
table_event=None,
VAR_382=None,
VAR_284=None,
table_permission=None,
table_token_name='auth_token',
VAR_256=None,
two_factor_authentication_group=None,
VAR_129=['email'],
VAR_289=Settings()
)
VAR_95 = dict(AuthAPI.default_messages,
access_denied='Insufficient privileges',
bulk_invite_body='You have been invited VAR_37 join %(site)VAR_280, click %(VAR_278)VAR_280 VAR_37 complete '
'the process',
bulk_invite_subject='Invitation VAR_37 join %(site)s',
delete_label='Check VAR_37 delete',
email_sent='Email sent',
email_verified='Email verified',
function_disabled='Function disabled',
impersonate_log='User %(VAR_216)VAR_280 is impersonating %(other_id)s',
invalid_reset_password='Invalid reset password',
invalid_two_factor_code='Incorrect VAR_311. {0} more attempt(VAR_280) remaining.',
is_empty="Cannot be empty",
label_client_ip='Client IP',
label_description='Description',
label_email='E-mail',
label_first_name='First name',
label_group_id='Group ID',
label_last_name='Last name',
label_name='Name',
label_origin='Origin',
label_password='Password',
label_record_id='Record ID',
label_registration_id='Registration identifier',
label_registration_key='Registration key',
label_remember_me="Remember me (for 30 days)",
label_reset_password_key='Reset Password key',
label_role='Role',
label_table_name='Object or VAR_154 name',
label_time_stamp='Timestamp',
label_two_factor='Authentication code',
label_user_id='User ID',
label_username='Username',
login_button='Log In',
login_disabled='Login disabled by administrator',
new_password='New password',
new_password_sent='A new VAR_132 was emailed VAR_37 you',
old_password='Old password',
password_change_button='Change password',
password_reset_button='Request reset password',
profile_save_button='Apply changes',
register_button='Sign Up',
FUNC_61='Click on the VAR_278 %(VAR_278)VAR_280 VAR_37 reset your password',
reset_password_log='User %(VAR_216)VAR_280 Password reset',
reset_password_subject='Password reset',
FUNC_64='Your VAR_132 is: %(VAR_132)s',
retrieve_password_log='User %(VAR_216)VAR_280 Password retrieved',
retrieve_password_subject='Password retrieve',
retrieve_two_factor_code='Your temporary VAR_35 VAR_311 is {0}',
retrieve_two_factor_code_subject='Two-step Login Authentication Code',
FUNC_54='Your VAR_124 is: %(VAR_124)s',
retrieve_username_log='User %(VAR_216)VAR_280 Username retrieved',
retrieve_username_subject='Username retrieve',
submit_button='Submit',
two_factor_comment='This VAR_311 was emailed VAR_37 you and is VAR_93 for VAR_35.',
unable_send_email='Unable VAR_37 FUNC_14 email',
username_sent='Your VAR_124 was emailed VAR_37 you',
FUNC_53='Welcome %(VAR_124)VAR_280! Click on the VAR_278 %(VAR_278)VAR_280 VAR_37 FUNC_161 your email',
verify_email_log='User %(VAR_216)VAR_280 Verification email sent',
verify_email_subject='Email verification',
verify_password='Verify Password',
verify_password_comment='please input your VAR_132 again'
)
"""
Class for authentication, authorization, VAR_147 based access control.
Includes:
- registration and FUNC_66
- VAR_35 and FUNC_50
- VAR_124 and VAR_132 retrieval
- event logging
- VAR_147 creation and assignment
- VAR_142 defined VAR_406/VAR_147 based VAR_287
Args:
VAR_101: is there for legacy but unused (awful)
VAR_102: has VAR_37 be the database where VAR_37 VAR_339 VAR_119 for authentication
VAR_103: `CLASS_0(...)` or None (no VAR_103) or True (make VAR_9 VAR_103)
VAR_104: can be VAR_9 VAR_104 or VAR_104=CLASS_3.get_or_create_key()
VAR_105: (where is the VAR_142 VAR_114?)
VAR_107: (delegate authentication VAR_37 the URL, CAS2)
Authentication Example::
from gluon.contrib.utils import *
mail=CLASS_0()
mail.settings.server='smtp.gmail.com:587'
mail.settings.sender='you@somewhere.com'
mail.settings.login='username:password'
VAR_65=CLASS_3(VAR_102)
VAR_65.settings.mailer=mail
VAR_65.define_tables()
def authentication():
return dict(VAR_7=VAR_65())
Exposes:
- `http://.../{application}/{VAR_105}/authentication/VAR_35`
- `http://.../{application}/{VAR_105}/authentication/FUNC_50`
- `http://.../{application}/{VAR_105}/authentication/FUNC_52`
- `http://.../{application}/{VAR_105}/authentication/FUNC_53`
- `http://.../{application}/{VAR_105}/authentication/FUNC_54`
- `http://.../{application}/{VAR_105}/authentication/FUNC_64`
- `http://.../{application}/{VAR_105}/authentication/FUNC_61`
- `http://.../{application}/{VAR_105}/authentication/FUNC_66`
- `http://.../{application}/{VAR_105}/authentication/FUNC_65`
On registration VAR_9 VAR_406 with VAR_147=new_user.id is created
and VAR_142 is given VAR_286 of this VAR_406.
You can VAR_339 VAR_9 VAR_406 with::
VAR_148=VAR_65.add_group('Manager', 'can access the manage action')
VAR_65.add_permission(VAR_148, 'access VAR_37 manage')
Here "access VAR_37 manage" is just VAR_9 VAR_142 defined VAR_82.
You can give access VAR_37 VAR_9 VAR_142::
VAR_65.add_membership(VAR_148, VAR_144)
If VAR_142 VAR_216 is omitted, the logged in VAR_142 is assumed
Then you can decorate any VAR_114::
@VAR_65.requires_permission('access VAR_37 manage')
def manage():
return dict()
You can restrict VAR_9 VAR_287 VAR_37 VAR_9 specific VAR_154::
VAR_65.add_permission(VAR_148, 'edit', VAR_102.sometable)
@VAR_65.requires_permission('edit', VAR_102.sometable)
Or VAR_37 VAR_9 specific VAR_169::
VAR_65.add_permission(VAR_148, 'edit', VAR_102.sometable, 45)
@VAR_65.requires_permission('edit', VAR_102.sometable, 45)
If authorization is not granted calls::
VAR_65.settings.on_failed_authorization
Other VAR_63::
VAR_65.settings.mailer=None
VAR_65.settings.expiration=3600 # seconds
...
...
"""
@staticmethod
def FUNC_29(VAR_96=None, VAR_97='sha512'):
VAR_58 = VAR_265.request
if not VAR_96:
VAR_96 = VAR_419.path.join(VAR_58.folder, 'private', 'auth.key')
if VAR_419.path.exists(VAR_96):
VAR_200 = open(VAR_96, 'r').read().strip()
else:
VAR_200 = VAR_97 + ':' + web2py_uuid()
open(VAR_96, 'w').write(VAR_200)
return VAR_200
def VAR_12(self, VAR_10=None, VAR_11=None, VAR_98=None, VAR_99=False):
if VAR_11 is None:
VAR_11 = []
if VAR_98 is None:
VAR_98 = {}
VAR_32 = VAR_99 and self.settings.host
return URL(VAR_15=self.settings.controller,
VAR_10=FUNC_118, VAR_11=args, VAR_98=vars, VAR_99=scheme, VAR_32=host)
def FUNC_31(self):
return URL(VAR_11=VAR_265.request.args, VAR_98=VAR_265.request.get_vars)
def FUNC_32(self, VAR_32, VAR_100=None):
if VAR_32:
if VAR_100:
for VAR_307 in VAR_100:
if fnmatch.fnmatch(VAR_32, VAR_307):
break
else:
raise HTTP(403, "Invalid Hostname")
elif VAR_100:
VAR_32 = VAR_100[0]
else:
VAR_32 = 'localhost'
return VAR_32
def __init__(self, VAR_101=None, VAR_102=None, VAR_103=True,
VAR_104=None, VAR_105='default', VAR_106='user',
VAR_107=None, VAR_85=True, VAR_108=False,
VAR_109=True, VAR_110=None,
VAR_111=None, VAR_112=None, VAR_100=None):
if not VAR_102 and VAR_101 and isinstance(VAR_101, DAL):
VAR_102 = VAR_101
self.db = VAR_102
self.environment = VAR_265
self.csrf_prevention = VAR_109
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
VAR_65 = VAR_140.auth
self.user_groups = VAR_65 and VAR_65.user_groups or {}
if VAR_108:
VAR_58.requires_https()
VAR_194 = VAR_58.now
if VAR_65:
VAR_353 = datetime.timedelta(days=0, seconds=VAR_65.expiration)
if VAR_65.last_visit and VAR_65.last_visit + VAR_353 > VAR_194:
self.user = VAR_65.user
if (VAR_194 - VAR_65.last_visit).seconds > (VAR_65.expiration // 10):
VAR_65.last_visit = VAR_194
else:
self.user = None
if VAR_140.auth:
del VAR_140.auth
VAR_140.renew(clear_session=True)
else:
self.user = None
if VAR_140.auth:
del VAR_140.auth
VAR_111 = url_index or URL(VAR_105, 'index')
VAR_250 = URL(VAR_105, VAR_106, VAR_11='login',
extension=VAR_110)
VAR_224 = self.settings = Settings()
VAR_224.update(CLASS_3.default_settings)
VAR_32 = self.select_host(VAR_58.env.http_host, VAR_100)
VAR_224.update(
cas_domains=[VAR_32],
VAR_125=False,
VAR_107=cas_provider,
cas_actions=dict(VAR_35='login',
VAR_294='validate',
servicevalidate='serviceValidate',
proxyvalidate='proxyValidate',
FUNC_50='logout'),
cas_create_user=True,
VAR_361={},
actions_disabled=[],
VAR_105=controller,
VAR_106=function,
login_url=VAR_250,
logged_url=URL(VAR_105, VAR_106, VAR_11='profile'),
download_url=URL(VAR_105, 'download'),
VAR_103=(mailer is True) and CLASS_0() or VAR_103,
on_failed_authorization=URL(VAR_105, VAR_106, VAR_11='not_authorized'),
login_next=VAR_111,
login_onvalidation=[],
login_onaccept=[],
login_onfail=[],
login_methods=[self],
login_form=self,
VAR_354=VAR_111,
logout_onlogout=None,
register_next=VAR_111,
register_onvalidation=[],
register_onaccept=[],
verify_email_next=VAR_250,
verify_email_onaccept=[],
profile_next=VAR_111,
profile_onvalidation=[],
profile_onaccept=[],
retrieve_username_next=VAR_111,
retrieve_password_next=VAR_111,
request_reset_password_next=VAR_250,
reset_password_next=VAR_111,
change_password_next=VAR_111,
change_password_onvalidation=[],
change_password_onaccept=[],
retrieve_password_onvalidation=[],
request_reset_password_onvalidation=[],
request_reset_password_onaccept=[],
reset_password_onvalidation=[],
reset_password_onaccept=[],
VAR_104=hmac_key,
VAR_275=VAR_265.response.formstyle,
label_separator=VAR_265.response.form_label_separator,
VAR_431=[],
two_factor_onvalidation=[],
VAR_32=host,
)
VAR_224.lock_keys = True
VAR_251 = self.messages = Messages(VAR_265.T)
VAR_251.update(CLASS_3.default_messages)
VAR_251.update(ajax_failed_authentication=
DIV(H4('NOT AUTHORIZED'),
'Please ',
A('login',
_href=self.settings.login_url +
('?_next=' + urllib_quote(VAR_265.request.env.http_web2py_component_location))
if VAR_265.request.env.http_web2py_component_location else ''),
' VAR_37 view this VAR_231.',
_class='not-authorized alert alert-block'))
VAR_251.lock_keys = True
VAR_246 = VAR_265.response
if VAR_65 and VAR_65.remember_me:
VAR_246.session_cookie_expires = VAR_65.expiration
if VAR_85:
self.define_signature()
else:
self.signature = None
self.jwt_handler = VAR_112 and CLASS_2(self, **VAR_112)
def FUNC_33(self):
VAR_134 = VAR_265.request.vars._next
if isinstance(VAR_134, (list, tuple)):
VAR_134 = next[0]
if VAR_134 and self.settings.prevent_open_redirect_attacks:
return FUNC_5(VAR_134)
return VAR_134 or None
def FUNC_34(self):
return self.db[self.settings.table_cas_name]
def VAR_279(self):
return self.db[self.settings.table_token_name]
def FUNC_36(self, *VAR_9, **VAR_14):
raise HTTP(*VAR_9, **VAR_14)
def __call__(self):
VAR_58 = VAR_265.request
VAR_11 = VAR_58.args
if not VAR_11:
redirect(self.url(VAR_11='login', VAR_98=VAR_58.vars))
elif VAR_11[0] in self.settings.actions_disabled:
raise HTTP(404)
if VAR_11[0] in ('login', 'logout', 'register', 'verify_email',
'retrieve_username', 'retrieve_password',
'reset_password', 'request_reset_password',
'change_password', 'profile', 'groups',
'impersonate', 'not_authorized', 'confirm_registration',
'bulk_register', 'manage_tokens', 'jwt'):
if len(VAR_58.args) >= 2 and VAR_11[0] == 'impersonate':
return getattr(self, VAR_11[0])(VAR_58.args[1])
else:
return getattr(self, VAR_11[0])()
elif VAR_11[0] == 'cas' and not self.settings.cas_provider:
if VAR_11(1) == self.settings.cas_actions['login']:
return self.cas_login(VAR_138=2)
elif VAR_11(1) == self.settings.cas_actions['validate']:
return self.cas_validate(VAR_138=1)
elif VAR_11(1) == self.settings.cas_actions['servicevalidate']:
return self.cas_validate(VAR_138=2, VAR_139=False)
elif VAR_11(1) == self.settings.cas_actions['proxyvalidate']:
return self.cas_validate(VAR_138=2, VAR_139=True)
elif (VAR_11(1) == 'p3'
and VAR_11(2) == self.settings.cas_actions['servicevalidate']):
return self.cas_validate(VAR_138=3, VAR_139=False)
elif (VAR_11(1) == 'p3'
and VAR_11(2) == self.settings.cas_actions['proxyvalidate']):
return self.cas_validate(VAR_138=3, VAR_139=True)
elif VAR_11(1) == self.settings.cas_actions['logout']:
return self.logout(VAR_134=VAR_58.vars.service or VAR_3)
else:
raise HTTP(404)
def FUNC_37(self, VAR_113='Welcome', VAR_114=None,
VAR_115=(' [ ', ' | ', ' ] '), VAR_116=VAR_3,
VAR_117=VAR_3, VAR_118='default'):
VAR_252 = [] # Hold all VAR_344 VAR_252 in VAR_9 list
self.bar = '' # The final
VAR_29 = VAR_265.T
VAR_117 = [] if not VAR_117 else VAR_117
if not VAR_114:
VAR_114 = self.url(self.settings.function)
VAR_58 = VAR_265.request
if URL() == VAR_114:
VAR_134 = ''
else:
VAR_134 = '?_next=' + urllib_quote(URL(VAR_11=VAR_58.args,
VAR_98=VAR_58.get_vars))
VAR_253 = lambda VAR_106: \
'%VAR_280/%VAR_280%s' % (VAR_114, VAR_106, VAR_134 if VAR_117 is VAR_3 or VAR_106 in VAR_117 else '')
if isinstance(VAR_113, str):
VAR_113 = VAR_29(VAR_113)
if VAR_113:
VAR_113 = VAR_113.strip() + ' '
def FUNC_154(*VAR_9, **VAR_14):
b['_rel'] = 'nofollow'
return A(*VAR_9, **VAR_14)
if self.user_id: # User is logged in
VAR_354 = self.settings.logout_next
VAR_252.append({'name': VAR_29('Log Out'),
'href': '%VAR_280/FUNC_50?_next=%s' % (VAR_114, urllib_quote(VAR_354)),
'icon': 'icon-off'})
if 'profile' not in self.settings.actions_disabled:
VAR_252.append({'name': VAR_29('Profile'), 'href': VAR_253('profile'),
'icon': 'icon-user'})
if 'change_password' not in self.settings.actions_disabled:
VAR_252.append({'name': VAR_29('Password'),
'href': VAR_253('change_password'),
'icon': 'icon-lock'})
if VAR_116 is VAR_3:
VAR_116 = '%(first_name)s'
if callable(VAR_116):
VAR_116 = VAR_116(self.user)
elif ((isinstance(VAR_116, str) or
type(VAR_116).__name__ == 'lazyT') and
re.search(r'%\(.+\)s', VAR_116)):
VAR_116 = VAR_116 % self.user
if not VAR_116:
VAR_116 = ''
else: # User is not logged in
VAR_252.append({'name': VAR_29('Log In'), 'href': VAR_253('login'),
'icon': 'icon-off'})
if 'register' not in self.settings.actions_disabled:
VAR_252.append({'name': VAR_29('Sign Up'), 'href': VAR_253('register'),
'icon': 'icon-user'})
if 'request_reset_password' not in self.settings.actions_disabled:
VAR_252.append({'name': VAR_29('Lost VAR_132?'),
'href': VAR_253('request_reset_password'),
'icon': 'icon-lock'})
if self.settings.use_username and 'retrieve_username' not in self.settings.actions_disabled:
VAR_252.append({'name': VAR_29('Forgot VAR_124?'),
'href': VAR_253('retrieve_username'),
'icon': 'icon-edit'})
def VAR_344(): # For inclusion in MENU
self.bar = [(VAR_252[0]['name'], False, VAR_252[0]['href'], [])]
del VAR_252[0]
for VAR_307 in VAR_252:
self.bar[0][3].append((VAR_307['name'], False, VAR_307['href']))
def FUNC_155(): # Default web2py scaffolding
def FUNC_172(VAR_355): return VAR_355 + ' ' + VAR_355.replace('icon', 'glyphicon')
self.bar = UL(LI(FUNC_154(I(_class=FUNC_172('icon ' + VAR_252[0]['icon'])),
' ' + VAR_252[0]['name'],
_href=VAR_252[0]['href'])), _class='dropdown-menu')
del VAR_252[0]
for VAR_307 in VAR_252:
self.bar.insert(-1, LI(FUNC_154(I(_class=FUNC_172('icon ' + VAR_307['icon'])),
' ' + VAR_307['name'],
_href=VAR_307['href'])))
self.bar.insert(-1, LI('', _class='divider'))
if self.user_id:
self.bar = LI(FUNC_154(VAR_113, VAR_116,
_href='#', _class="dropdown-toggle",
VAR_18={'toggle': 'dropdown'}),
self.bar, _class='dropdown')
else:
self.bar = LI(FUNC_154(VAR_29('Log In'),
_href='#', _class="dropdown-toggle",
VAR_18={'toggle': 'dropdown'}), self.bar,
_class='dropdown')
def VAR_356():
VAR_356 = {'prefix': VAR_113, 'user': VAR_116 if self.user_id else None}
for i in VAR_252:
if i['name'] == VAR_29('Log In'):
VAR_348 = 'login'
elif i['name'] == VAR_29('Sign Up'):
VAR_348 = 'register'
elif i['name'] == VAR_29('Lost VAR_132?'):
VAR_348 = 'request_reset_password'
elif i['name'] == VAR_29('Forgot VAR_124?'):
VAR_348 = 'retrieve_username'
elif i['name'] == VAR_29('Log Out'):
VAR_348 = 'logout'
elif i['name'] == VAR_29('Profile'):
VAR_348 = 'profile'
elif i['name'] == VAR_29('Password'):
VAR_348 = 'change_password'
VAR_356[VAR_348] = i['href']
self.bar = VAR_356
VAR_63 = {'asmenu': VAR_344,
'dropdown': FUNC_155,
'bare': VAR_356
} # Define custom modes.
if VAR_118 in VAR_63 and callable(VAR_63[VAR_118]):
VAR_63[VAR_118]()
else:
VAR_357, VAR_358, VAR_359 = VAR_115
if self.user_id:
self.bar = SPAN(VAR_113, VAR_116, VAR_357,
FUNC_154(VAR_252[0]['name'],
_href=VAR_252[0]['href']), VAR_359,
_class='auth_navbar')
else:
self.bar = SPAN(VAR_357, FUNC_154(VAR_252[0]['name'],
_href=VAR_252[0]['href']), VAR_359,
_class='auth_navbar')
for VAR_307 in VAR_252[1:]:
self.bar.insert(-1, VAR_358)
self.bar.insert(-1, FUNC_154(VAR_307['name'], _href=VAR_307['href']))
return self.bar
def FUNC_38(self,
VAR_119,
VAR_120=None,
VAR_121='%(VAR_8)s_archive',
VAR_122='current_record',
VAR_123=None):
VAR_123 = current_record_label or VAR_265.T(
VAR_122.replace('_', ' ').title())
for VAR_154 in VAR_119:
VAR_360 = VAR_154.fields()
if 'id' in VAR_360 and 'modified_on' in VAR_360 and VAR_122 not in VAR_360:
VAR_154._enable_record_versioning(VAR_120=archive_db,
archive_name=VAR_121,
VAR_122=current_record,
VAR_123=current_record_label)
def FUNC_39(self, VAR_124=None, VAR_85=None, VAR_125=False,
VAR_126=None, VAR_127=None):
VAR_102 = self.db
if VAR_126 is None:
VAR_126 = VAR_102._migrate
if VAR_127 is None:
VAR_127 = VAR_102._fake_migrate
VAR_224 = self.settings
VAR_224.enable_tokens = VAR_125
VAR_254 = \
super(CLASS_3, self).define_tables(VAR_124, VAR_85, VAR_126, VAR_127)._table_signature_list
VAR_194 = VAR_265.request.now
VAR_255 = 'reference %s' % VAR_224.table_user_name
if VAR_224.cas_domains:
if VAR_224.table_cas_name not in VAR_102.tables:
VAR_102.define_table(
VAR_224.table_cas_name,
VAR_1('user_id', VAR_255, VAR_5=None,
VAR_62=self.messages.label_user_id),
VAR_1('created_on', 'datetime', VAR_5=VAR_194),
VAR_1('service', VAR_281=IS_URL()),
VAR_1('ticket'),
VAR_1('renew', 'boolean', VAR_5=False),
*VAR_224.extra_fields.get(VAR_224.table_cas_name, []),
**dict(
VAR_126=self._get_migrate(
VAR_224.table_cas_name, VAR_126),
VAR_127=fake_migrate))
if VAR_224.enable_tokens:
VAR_361 = VAR_224.extra_fields.get(
VAR_224.table_token_name, []) + VAR_254
if VAR_224.table_token_name not in VAR_102.tables:
VAR_102.define_table(
VAR_224.table_token_name,
VAR_1('user_id', VAR_255, VAR_5=None,
VAR_62=self.messages.label_user_id),
VAR_1('expires_on', 'datetime', VAR_5=datetime.datetime(2999, 12, 31)),
VAR_1('token', writable=False, VAR_5=web2py_uuid, unique=True),
*VAR_361,
**dict(VAR_126=self._get_migrate(VAR_224.table_token_name, VAR_126),
VAR_127=fake_migrate))
if not VAR_102._lazy_tables:
VAR_224.table_user = VAR_102[VAR_224.table_user_name]
VAR_224.table_group = VAR_102[VAR_224.table_group_name]
VAR_224.table_membership = VAR_102[VAR_224.table_membership_name]
VAR_224.table_permission = VAR_102[VAR_224.table_permission_name]
VAR_224.table_event = VAR_102[VAR_224.table_event_name]
if VAR_224.cas_domains:
VAR_224.table_cas = VAR_102[VAR_224.table_cas_name]
if VAR_224.cas_provider: # THIS IS NOT LAZY
VAR_224.actions_disabled = \
['profile', 'register', 'change_password',
'request_reset_password', 'retrieve_username']
from gluon.contrib.login_methods.cas_auth import CasAuth
VAR_362 = VAR_224.cas_maps
if not VAR_362:
VAR_256 = self.table_user()
VAR_362 = dict((VAR_149, lambda v, VAR_182=VAR_149: v.get(VAR_182, None)) for VAR_149 in
VAR_256.fields if VAR_149 != 'id'
and VAR_256[VAR_149].readable)
VAR_362['registration_id'] = \
lambda v, VAR_447=VAR_224.cas_provider: '%VAR_280/%s' % (VAR_447, v['user'])
VAR_6 = [VAR_224.cas_actions['login'],
VAR_224.cas_actions['servicevalidate'],
VAR_224.cas_actions['logout']]
VAR_224.login_form = CasAuth(
casversion=2,
urlbase=VAR_224.cas_provider,
VAR_6=actions,
VAR_362=maps)
return self
def FUNC_40(self, VAR_128, VAR_129=['email'],
VAR_35=True, VAR_130=True):
VAR_256 = self.table_user()
VAR_257 = self.settings.cas_create_user
VAR_142 = None
VAR_258 = []
VAR_259 = ['registration_id', 'username', 'email']
if self.settings.login_userfield:
VAR_259.append(self.settings.login_userfield)
for VAR_386 in VAR_259:
if VAR_386 in VAR_256.fields() and \
VAR_128.get(VAR_386, None):
VAR_258.append(VAR_386)
VAR_180 = VAR_128[VAR_386]
VAR_142 = VAR_256(**{VAR_386: VAR_180})
if VAR_142:
break
if not VAR_258:
return None
if 'registration_id' not in VAR_128:
FUNC_119['registration_id'] = VAR_128[VAR_258[0]]
if 'registration_id' in VAR_258 \
and VAR_142 \
and VAR_142.registration_id \
and ('registration_id' not in VAR_128 or VAR_142.registration_id != str(VAR_128['registration_id'])):
VAR_142 = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if VAR_142:
if not VAR_130:
return None
VAR_363 = dict(registration_id=VAR_128['registration_id'])
for VAR_200 in VAR_129:
if VAR_200 in VAR_128:
VAR_363[VAR_200] = VAR_128[VAR_200]
VAR_142.update_record(**VAR_363)
elif VAR_258:
if VAR_257 is False:
self.logout(VAR_134=None, VAR_141=None, VAR_137=None)
raise HTTP(403, "Forbidden. User need VAR_37 be created first.")
if 'first_name' not in VAR_128 and 'first_name' in VAR_256.fields:
VAR_449 = VAR_128.get('email', 'anonymous').split('@')[0]
VAR_128['first_name'] = VAR_128.get('username', VAR_449)
VAR_98 = VAR_256._filter_fields(VAR_128)
VAR_144 = VAR_256.insert(**VAR_98)
VAR_142 = VAR_256[VAR_144]
if self.settings.create_user_groups:
VAR_148 = self.add_group(self.settings.create_user_groups % VAR_142)
self.add_membership(VAR_148, VAR_144)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, VAR_144)
if VAR_35:
self.user = VAR_142
if self.settings.register_onaccept:
VAR_27(self.settings.register_onaccept, Storage(VAR_98=VAR_142))
return VAR_142
def VAR_260(self, VAR_131=False):
if not self.settings.allow_basic_login:
return (False, False, False)
VAR_260 = VAR_265.request.env.http_authorization
if VAR_131:
if callable(VAR_131):
VAR_131 = VAR_131()
elif isinstance(VAR_131, string_types):
VAR_450 = to_unicode(VAR_131)
elif VAR_131 is True:
VAR_450 = '' + VAR_265.request.application
VAR_364 = HTTP(401, 'Not Authorized', **{'WWW-Authenticate': 'Basic VAR_77="' + VAR_450 + '"'})
if not VAR_260 or not VAR_260[:6].lower() == 'basic ':
if VAR_131:
raise VAR_364
return (True, False, False)
(VAR_124, VAR_208, VAR_132) = base64.b64decode(VAR_260[6:]).partition(b':')
VAR_261 = VAR_208 and self.login_bare(VAR_124, VAR_132)
if not VAR_261 and VAR_131:
raise VAR_364
return (True, True, VAR_261)
def FUNC_42(self):
VAR_256 = self.table_user()
VAR_262 = self.settings.login_userfield or ('username' \
if self.settings.login_userfield or 'username' \
in VAR_256.fields else 'email')
VAR_263 = self.settings.password_field
return Storage({'table_user': VAR_256,
'userfield': VAR_262,
'passfield': VAR_263})
def FUNC_43(self, VAR_124, VAR_132):
VAR_224 = self._get_login_settings()
VAR_142 = VAR_224.table_user(**{VAR_224.userfield: VAR_124})
if VAR_142 and VAR_142.get(VAR_224.passfield, False):
VAR_132 = VAR_224.table_user[
VAR_224.passfield].validate(VAR_132)[0]
if ((VAR_142.registration_key is None or
not VAR_142.registration_key.strip()) and
VAR_132 == VAR_142[VAR_224.passfield]):
self.login_user(VAR_142)
return VAR_142
else:
for login_method in self.settings.login_methods:
if login_method != self and login_method(VAR_124, VAR_132):
self.user = VAR_142
return VAR_142
return False
def FUNC_44(self, **VAR_133):
VAR_224 = self._get_login_settings()
if VAR_133.get(VAR_224.passfield):
VAR_133[VAR_224.passfield] = \
VAR_224.table_user[VAR_224.passfield].validate(VAR_133[VAR_224.passfield], None)[0]
if not VAR_133.get(VAR_224.userfield):
raise ValueError('register_bare: VAR_262 not provided or invalid')
VAR_142 = self.get_or_create_user(VAR_133, VAR_35=False, VAR_130=False,
VAR_129=self.settings.update_fields)
if not VAR_142:
return False
return VAR_142
def FUNC_45(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
VAR_138=2,
):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
VAR_102, VAR_154 = self.db, self.table_cas()
VAR_140._cas_service = VAR_58.vars.service or VAR_140._cas_service
if VAR_58.env.http_host not in self.settings.cas_domains or \
not VAR_140._cas_service:
raise HTTP(403, 'not authorized')
def FUNC_157(VAR_264=False):
VAR_268 = VAR_154(VAR_365=VAR_140._cas_service, VAR_144=self.user.id)
if VAR_268:
VAR_266 = VAR_268.ticket
else:
VAR_266 = 'ST-' + web2py_uuid()
VAR_154.insert(VAR_365=VAR_140._cas_service,
VAR_144=self.user.id,
VAR_266=ticket,
created_on=VAR_58.now,
VAR_267=VAR_264)
VAR_365 = VAR_140._cas_service
VAR_366 = '&' if '?' in VAR_365 else '?'
del VAR_140._cas_service
if 'warn' in VAR_58.vars and not VAR_264:
VAR_246.headers[
'refresh'] = "5;URL=%s" % VAR_365 + VAR_366 + "ticket=" + VAR_266
return A("Continue VAR_37 %s" % VAR_365,
_href=VAR_365 + VAR_366 + "ticket=" + VAR_266)
else:
redirect(VAR_365 + VAR_366 + "ticket=" + VAR_266)
if self.is_logged_in() and 'renew' not in VAR_58.vars:
return FUNC_157()
elif not self.is_logged_in() and 'gateway' in VAR_58.vars:
redirect(VAR_140._cas_service)
def FUNC_158(VAR_7, VAR_136=onaccept):
if VAR_136 is not VAR_3:
VAR_136(VAR_7)
return FUNC_157(VAR_264=True)
return self.login(VAR_134, VAR_135, FUNC_158, VAR_137)
def FUNC_46(self, VAR_138=2, VAR_139=False):
VAR_58 = VAR_265.request
VAR_102, VAR_154 = self.db, self.table_cas()
VAR_265.response.headers['Content-Type'] = 'text'
VAR_266 = VAR_58.vars.ticket
VAR_267 = 'renew' in VAR_58.vars
VAR_268 = VAR_154(VAR_266=ticket)
VAR_269 = False
if VAR_268:
VAR_262 = self.settings.login_userfield or 'username' \
if 'username' in VAR_154.fields else 'email'
if VAR_266[0:3] == 'ST-' and \
not ((VAR_268.renew and VAR_267) ^ renew):
VAR_142 = self.table_user()(VAR_268.user_id)
VAR_268.delete_record()
VAR_269 = True
def FUNC_159(VAR_84):
VAR_367 = to_native(TAG['cas:serviceResponse'](
VAR_84, **{'_xmlns:cas': 'http://www.yale.edu/tp/cas'}).xml())
return '<?VAR_405 VAR_138="1.0" VAR_44="UTF-8"?>\n' + VAR_367
if VAR_269:
if VAR_138 == 1:
VAR_39 = 'yes\VAR_182%s' % VAR_142[VAR_262]
elif VAR_138 == 3:
VAR_124 = VAR_142.get('username', VAR_142[VAR_262])
VAR_39 = FUNC_159(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](VAR_124),
TAG['cas:attributes'](
*[TAG['cas:' + VAR_178.name](VAR_142[VAR_178.name])
for VAR_178 in self.table_user()
if VAR_178.readable])))
else: # assume VAR_138 2
VAR_124 = VAR_142.get('username', VAR_142[VAR_262])
VAR_39 = FUNC_159(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](VAR_124),
*[TAG['cas:' + VAR_178.name](VAR_142[VAR_178.name])
for VAR_178 in self.table_user()
if VAR_178.readable]))
else:
if VAR_138 == 1:
VAR_39 = 'no\n'
elif VAR_268:
VAR_39 = FUNC_159(TAG['cas:authenticationFailure']())
else:
VAR_39 = FUNC_159(
TAG['cas:authenticationFailure'](
'Ticket %VAR_280 not recognized' % VAR_266,
_code='INVALID TICKET'))
raise HTTP(200, VAR_39)
def FUNC_47(self, VAR_140):
VAR_140.auth_two_factor_user = None
VAR_140.auth_two_factor = None
VAR_140.auth_two_factor_enabled = False
VAR_140.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
def FUNC_48(self, VAR_134, VAR_140):
if self.is_logged_in():
if VAR_134 == VAR_140._auth_next:
del VAR_140._auth_next
redirect(VAR_134, client_side=self.settings.client_side)
def VAR_35(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_224 = self.settings
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
VAR_270 = self.get_vars_next()
if VAR_270:
VAR_140._auth_next = VAR_270
elif VAR_140._auth_next:
VAR_270 = VAR_140._auth_next
if VAR_134 is VAR_3:
VAR_134 = VAR_224.login_next
if callable(VAR_134):
VAR_134 = VAR_134()
VAR_368 = VAR_270
if VAR_368:
VAR_429 = VAR_368.split('://')
if VAR_429[0].lower() in ['http', 'https', 'ftp']:
VAR_451 = VAR_368.split('//', 1)[-1].split('/')[0]
if VAR_451 in VAR_224.cas_domains:
VAR_134 = VAR_368
else:
VAR_134 = VAR_368
self.when_is_logged_in_bypass_next_in_url(VAR_134=next, VAR_140=session)
VAR_256 = self.table_user()
if 'username' in VAR_256.fields or \
not VAR_224.login_email_validate:
VAR_369 = IS_NOT_EMPTY(VAR_61=self.messages.is_empty)
if not VAR_224.username_case_sensitive:
VAR_369 = [IS_LOWER(), VAR_369]
else:
VAR_369 = IS_EMAIL(VAR_61=self.messages.invalid_email)
if not VAR_224.email_case_sensitive:
VAR_369 = [IS_LOWER(), VAR_369]
VAR_263 = VAR_224.password_field
try:
VAR_256[VAR_263].requires[-1].min_length = 0
except:
pass
if VAR_135 is VAR_3:
VAR_135 = VAR_224.login_onvalidation
if VAR_136 is VAR_3:
VAR_136 = VAR_224.login_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['login_log']
VAR_271 = VAR_224.login_onfail
VAR_142 = None # VAR_5
VAR_272 = False
if self.settings.login_userfield:
VAR_124 = self.settings.login_userfield
else:
if 'username' in VAR_256.fields:
VAR_124 = 'username'
else:
VAR_124 = 'email'
if self.settings.multi_login:
VAR_272 = True
VAR_273 = VAR_256[VAR_124].requires
VAR_256[VAR_124].requires = VAR_369
if VAR_140.auth_two_factor_enabled and VAR_140.auth_two_factor_tries_left < 1:
VAR_142 = None
VAR_370 = False
self._reset_two_factor_auth(VAR_140)
redirect(VAR_134, client_side=VAR_224.client_side)
if VAR_140.auth_two_factor_user is None:
if VAR_224.remember_me_form:
VAR_361 = [
VAR_1('remember_me', 'boolean', VAR_5=False,
VAR_62=self.messages.label_remember_me)]
else:
VAR_361 = []
if VAR_224.login_form == self:
VAR_7 = SQLFORM(VAR_256,
VAR_133=[VAR_124, VAR_263],
hidden=dict(_next=VAR_134),
showid=VAR_224.showid,
submit_button=self.messages.login_button,
delete_label=self.messages.delete_label,
VAR_275=VAR_224.formstyle,
separator=VAR_224.label_separator,
VAR_361=extra_fields,
)
VAR_234 = VAR_224.login_captcha or \
(VAR_224.login_captcha is not False and VAR_224.captcha)
if VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234, VAR_234.comment,
VAR_224.formstyle, 'captcha__row')
VAR_370 = False
VAR_430 = self.messages.invalid_user
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='login', dbio=False,
VAR_135=onvalidation,
hideerror=VAR_224.hideerror):
VAR_370 = True
VAR_452 = VAR_7.vars[VAR_124]
if VAR_272 and '@' in VAR_452:
VAR_142 = VAR_256(email=VAR_452)
else:
VAR_142 = VAR_256(**{VAR_124: VAR_452})
if VAR_142:
VAR_430 = self.messages.invalid_password
VAR_466 = VAR_142
if (VAR_466.registration_key or '').startswith('pending'):
VAR_246.flash = self.messages.registration_pending
return VAR_7
elif VAR_466.registration_key in ('disabled', 'blocked'):
VAR_246.flash = self.messages.login_disabled
return VAR_7
elif (VAR_466.registration_key is not None and VAR_466.registration_key.strip()):
VAR_246.flash = \
self.messages.registration_verifying
return VAR_7
VAR_142 = None
for login_method in VAR_224.login_methods:
if login_method != self and \
login_method(VAR_58.vars[VAR_124],
VAR_58.vars[VAR_263]):
if self not in VAR_224.login_methods:
VAR_7.vars[VAR_263] = None
VAR_142 = self.get_or_create_user(
VAR_7.vars, VAR_224.update_fields)
break
if not VAR_142:
if VAR_224.login_methods[0] == self:
if VAR_7.vars.get(VAR_263, '') == VAR_466[VAR_263]:
VAR_142 = VAR_466
else:
if not VAR_224.alternate_requires_registration:
for login_method in VAR_224.login_methods:
if login_method != self and \
login_method(VAR_58.vars[VAR_124],
VAR_58.vars[VAR_263]):
if self not in VAR_224.login_methods:
VAR_7.vars[VAR_263] = None
VAR_142 = self.get_or_create_user(
VAR_7.vars, VAR_224.update_fields)
break
if not VAR_142:
self.log_event(self.messages['login_failed_log'],
VAR_58.post_vars)
VAR_140.flash = VAR_430 if self.settings.login_specify_error else self.messages.invalid_login
VAR_27(VAR_271, None)
redirect(self.url(VAR_11=VAR_58.args, VAR_98=VAR_58.get_vars),client_side=VAR_224.client_side)
else: # use VAR_9 central authentication VAR_33
VAR_371 = VAR_224.login_form
VAR_372 = VAR_371.get_user()
if VAR_372:
cas_user[VAR_263] = None
VAR_142 = self.get_or_create_user(
VAR_256._filter_fields(VAR_372),
VAR_224.update_fields)
elif hasattr(VAR_371, 'login_form'):
return VAR_371.login_form()
else:
VAR_134 = self.url(VAR_224.function, VAR_11='login')
redirect(VAR_371.login_url(VAR_134),
client_side=VAR_224.client_side)
if VAR_142 and self.settings.auth_two_factor_enabled is True:
VAR_140.auth_two_factor_enabled = True
elif VAR_142 and self.settings.two_factor_authentication_group:
VAR_147 = self.settings.two_factor_authentication_group
VAR_140.auth_two_factor_enabled = self.has_membership(VAR_144=VAR_142.id, VAR_147=role)
if VAR_140.auth_two_factor_enabled:
VAR_7 = SQLFORM.factory(
VAR_1('authentication_code',
VAR_62=self.messages.label_two_factor,
VAR_93=True,
VAR_64=self.messages.two_factor_comment),
hidden=dict(_next=VAR_134),
VAR_275=VAR_224.formstyle,
separator=VAR_224.label_separator
)
VAR_370 = False
if VAR_140.auth_two_factor_user is None and VAR_142 is not None:
VAR_140.auth_two_factor_user = VAR_142 # store the validated VAR_142 and associate with this VAR_140
VAR_140.auth_two_factor = random.randint(100000, 999999)
VAR_140.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left
VAR_431 = self.settings.two_factor_methods
if not VAR_431:
self.settings.mailer.send(
VAR_37=VAR_142.email,
VAR_38=self.messages.retrieve_two_factor_code_subject,
VAR_39=self.messages.retrieve_two_factor_code.format(VAR_140.auth_two_factor))
else:
for two_factor_method in VAR_431:
try:
VAR_140.auth_two_factor = two_factor_method(VAR_142, VAR_140.auth_two_factor)
except:
pass
else:
break
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='login', dbio=False,
VAR_135=onvalidation,
hideerror=VAR_224.hideerror):
VAR_370 = True
"""
The lists is executed after VAR_7 validation for each of the corresponding VAR_114.
For example, in your model:
In your models copy and paste:
VAR_65.settings.extra_fields['auth_user'] = [
VAR_1('motp_secret', 'password', length=512, VAR_5='', VAR_62='MOTP Secret'),
VAR_1('motp_pin', 'string', length=128, VAR_5='', VAR_62='MOTP PIN')]
OFFSET = 60 # Be sure is the same in your OTP Client
def _set_two_factor(VAR_142, auth_two_factor):
return None
def verify_otp(VAR_142, otp):
import .time
from hashlib import .md5
epoch_time = int(time.time())
time_start = int(str(epoch_time - OFFSET)[:-1])
time_end = int(str(epoch_time + OFFSET)[:-1])
for VAR_446 in range(time_start - 1, time_end + 1):
to_hash = str(VAR_446) + VAR_142.motp_secret + VAR_142.motp_pin
hash = md5(to_hash).hexdigest()[:6]
if otp == hash:
return hash
VAR_65.settings.auth_two_factor_enabled = True
VAR_65.messages.two_factor_comment = "Verify your OTP Client for the VAR_311."
VAR_65.settings.two_factor_methods = [lambda VAR_142,
auth_two_factor: _set_two_factor(VAR_142, auth_two_factor)]
VAR_65.settings.two_factor_onvalidation = [lambda VAR_142, otp: verify_otp(VAR_142, otp)]
"""
if self.settings.two_factor_onvalidation:
for two_factor_onvalidation in self.settings.two_factor_onvalidation:
try:
VAR_140.auth_two_factor = \
two_factor_onvalidation(VAR_140.auth_two_factor_user, VAR_7.vars['authentication_code'])
except:
pass
else:
break
if VAR_7.vars['authentication_code'] == str(VAR_140.auth_two_factor):
if VAR_142 is None or VAR_142 == VAR_140.auth_two_factor_user:
VAR_142 = VAR_140.auth_two_factor_user
elif VAR_142 != VAR_140.auth_two_factor_user:
VAR_142 = None
self._reset_two_factor_auth(VAR_140)
else:
VAR_140.auth_two_factor_tries_left -= 1
if VAR_140.auth_two_factor_enabled and VAR_140.auth_two_factor_tries_left < 1:
VAR_142 = None
VAR_370 = False
self._reset_two_factor_auth(VAR_140)
redirect(VAR_134, client_side=VAR_224.client_side)
VAR_246.flash = self.messages.invalid_two_factor_code.format(VAR_140.auth_two_factor_tries_left)
return VAR_7
else:
return VAR_7
if VAR_142:
VAR_142 = Row(VAR_256._filter_fields(VAR_142, VAR_216=True))
self.login_user(VAR_142)
VAR_140.auth.expiration = \
VAR_58.post_vars.remember_me and \
VAR_224.long_expiration or \
VAR_224.expiration
VAR_140.auth.remember_me = 'remember_me' in VAR_58.post_vars
self.log_event(VAR_137, VAR_142)
VAR_140.flash = self.messages.logged_in
if VAR_224.login_form == self:
if VAR_370:
VAR_27(VAR_136, VAR_7)
if VAR_134 == VAR_140._auth_next:
VAR_140._auth_next = None
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134, client_side=VAR_224.client_side)
VAR_256[VAR_124].requires = VAR_273
return VAR_7
elif VAR_142:
VAR_27(VAR_136, None)
if VAR_134 == VAR_140._auth_next:
del VAR_140._auth_next
redirect(VAR_134, client_side=VAR_224.client_side)
def FUNC_50(self, VAR_134=VAR_3, VAR_141=VAR_3, VAR_137=VAR_3):
self._reset_two_factor_auth(VAR_265.session)
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.logout_next
if VAR_141 is VAR_3:
VAR_141 = self.settings.logout_onlogout
if VAR_141:
onlogout(self.user)
if VAR_137 is VAR_3:
VAR_137 = self.messages['logout_log']
if self.user:
self.log_event(VAR_137, self.user)
if self.settings.login_form != self:
VAR_371 = self.settings.login_form
VAR_372 = VAR_371.get_user()
if VAR_372:
VAR_134 = VAR_371.logout_url(VAR_134)
VAR_265.session.auth = None
self.user = None
if self.settings.renew_session_onlogout:
VAR_265.session.renew(clear_session=not self.settings.keep_session_onlogout)
VAR_265.session.flash = self.messages.logged_out
if VAR_134 is not None:
redirect(VAR_134)
def FUNC_51(self):
self.logout(VAR_134=None, VAR_141=None, VAR_137=None)
def FUNC_52(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
if self.is_logged_in():
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.register_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.register_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.register_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['register_log']
VAR_256 = self.table_user()
if self.settings.login_userfield:
VAR_124 = self.settings.login_userfield
elif 'username' in VAR_256.fields:
VAR_124 = 'username'
else:
VAR_124 = 'email'
VAR_274 = IS_NOT_IN_DB(self.db, VAR_256[VAR_124])
if not VAR_256[VAR_124].requires:
VAR_256[VAR_124].requires = VAR_274
elif isinstance(VAR_256[VAR_124].requires, (list, tuple)):
if not any([isinstance(validator, IS_NOT_IN_DB) for validator in
VAR_256[VAR_124].requires]):
if isinstance(VAR_256[VAR_124].requires, list):
VAR_256[VAR_124].requires.append(VAR_274)
else:
VAR_256[VAR_124].requires += (VAR_274, )
elif not isinstance(VAR_256[VAR_124].requires, IS_NOT_IN_DB):
VAR_256[VAR_124].requires = [VAR_256[VAR_124].requires,
VAR_274]
VAR_263 = self.settings.password_field
VAR_275 = self.settings.formstyle
try: # Make sure we have our original minimum length as other VAR_65 forms change it
VAR_256[VAR_263].requires[-1].min_length = self.settings.password_min_length
except:
pass
if self.settings.register_verify_password:
if self.settings.register_fields is None:
self.settings.register_fields = [VAR_10.name for VAR_10 in VAR_256 if VAR_10.writable and not VAR_10.compute]
VAR_348 = self.settings.register_fields.index(VAR_263)
self.settings.register_fields.insert(VAR_348 + 1, "password_two")
VAR_361 = [
VAR_1("password_two", "password",
VAR_281=IS_EQUAL_TO(VAR_58.post_vars.get(VAR_263, None),
VAR_61=self.messages.mismatched_password),
VAR_62=VAR_265.T("Confirm Password"))]
else:
VAR_361 = []
VAR_7 = SQLFORM(VAR_256,
VAR_133=self.settings.register_fields,
hidden=dict(_next=VAR_134),
showid=self.settings.showid,
submit_button=self.messages.register_button,
delete_label=self.messages.delete_label,
VAR_275=formstyle,
separator=self.settings.label_separator,
VAR_361=extra_fields
)
VAR_234 = self.settings.register_captcha or self.settings.captcha
if VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234,
captcha.comment, self.settings.formstyle, 'captcha__row')
if self.settings.pre_registration_div:
FUNC_6(VAR_7, '',
DIV(VAR_17="pre-reg", *self.settings.pre_registration_div),
'', VAR_275, '')
VAR_200 = web2py_uuid()
if self.settings.registration_requires_approval:
VAR_200 = 'pending-' + VAR_200
VAR_256.registration_key.default = VAR_200
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='register',
VAR_135=onvalidation,
hideerror=self.settings.hideerror):
VAR_373 = self.messages.group_description % VAR_7.vars
if self.settings.create_user_groups:
VAR_148 = self.add_group(self.settings.create_user_groups % VAR_7.vars, VAR_373)
self.add_membership(VAR_148, VAR_7.vars.id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, VAR_7.vars.id)
if self.settings.registration_requires_verification:
VAR_278 = self.url(
self.settings.function, VAR_11=('verify_email', VAR_200), VAR_99=True)
VAR_28 = dict(VAR_7.vars)
VAR_28.update(dict(VAR_200=key, VAR_278=FUNC_173, VAR_124=VAR_7.vars[VAR_124],
firstname=VAR_7.vars['firstname'],
lastname=VAR_7.vars['lastname']))
if not (self.settings.mailer and self.settings.mailer.send(
VAR_37=VAR_7.vars.email,
VAR_38=self.messages.verify_email_subject,
VAR_39=self.messages.verify_email % VAR_28)):
self.db.rollback()
VAR_246.flash = self.messages.unable_send_email
return VAR_7
VAR_140.flash = self.messages.email_sent
if self.settings.registration_requires_approval and \
not self.settings.registration_requires_verification:
VAR_256[VAR_7.vars.id] = dict(registration_key='pending')
VAR_140.flash = self.messages.registration_pending
elif (not self.settings.registration_requires_verification or self.settings.login_after_registration):
if not self.settings.registration_requires_verification:
VAR_256[VAR_7.vars.id] = dict(registration_key='')
VAR_140.flash = self.messages.registration_successful
VAR_142 = VAR_256(**{VAR_124: VAR_7.vars[VAR_124]})
self.login_user(VAR_142)
VAR_140.flash = self.messages.logged_in
self.log_event(VAR_137, VAR_7.vars)
VAR_27(VAR_136, VAR_7)
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_53(self,
VAR_134=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_200 = FUNC_0(-1)
VAR_256 = self.table_user()
VAR_142 = VAR_256(registration_key=VAR_200)
if not VAR_142:
redirect(self.settings.login_url)
if self.settings.registration_requires_approval:
VAR_142.update_record(registration_key='pending')
VAR_265.session.flash = self.messages.registration_pending
else:
VAR_142.update_record(registration_key='')
VAR_265.session.flash = self.messages.email_verified
if VAR_265.session.auth and VAR_265.session.auth.user:
VAR_265.session.auth.user.registration_key = VAR_142.registration_key
if VAR_137 is VAR_3:
VAR_137 = self.messages['verify_email_log']
if VAR_134 is VAR_3:
VAR_134 = self.settings.verify_email_next
if VAR_136 is VAR_3:
VAR_136 = self.settings.verify_email_onaccept
self.log_event(VAR_137, VAR_142)
VAR_27(VAR_136, VAR_142)
redirect(VAR_134)
def FUNC_54(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
if 'username' not in VAR_256.fields:
raise HTTP(404)
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
VAR_234 = self.settings.retrieve_username_captcha or \
(self.settings.retrieve_username_captcha is not False and self.settings.captcha)
if not self.settings.mailer:
VAR_246.flash = self.messages.function_disabled
return ''
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.retrieve_username_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.retrieve_username_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.retrieve_username_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['retrieve_username_log']
VAR_273 = VAR_256.email.requires
VAR_256.email.requires = [IS_IN_DB(self.db, VAR_256.email,
VAR_61=self.messages.invalid_email)]
VAR_7 = SQLFORM(VAR_256,
VAR_133=['email'],
hidden=dict(_next=VAR_134),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234,
captcha.comment, self.settings.formstyle, 'captcha__row')
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='retrieve_username', dbio=False,
VAR_135=onvalidation, hideerror=self.settings.hideerror):
VAR_374 = VAR_256._db(VAR_256.email == VAR_7.vars.email).select()
if not VAR_374:
VAR_265.session.flash = \
self.messages.invalid_email
redirect(self.url(VAR_11=VAR_58.args))
VAR_124 = ', '.join(u.username for u in VAR_374)
self.settings.mailer.send(VAR_37=VAR_7.vars.email,
VAR_38=self.messages.retrieve_username_subject,
VAR_39=self.messages.retrieve_username % dict(VAR_124=username))
VAR_140.flash = self.messages.email_sent
for VAR_142 in VAR_374:
self.log_event(VAR_137, VAR_142)
VAR_27(VAR_136, VAR_7)
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134)
VAR_256.email.requires = VAR_273
return VAR_7
def FUNC_55(self):
import .string
import .random
VAR_132 = ''
VAR_276 = r'!#$*'
for i in range(0, 3):
VAR_132 += random.choice(VAR_82.ascii_lowercase)
VAR_132 += random.choice(VAR_82.ascii_uppercase)
VAR_132 += random.choice(VAR_82.digits)
VAR_132 += random.choice(VAR_276)
return ''.join(random.sample(VAR_132, len(VAR_132)))
def FUNC_56(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
if not self.settings.mailer:
VAR_246.flash = self.messages.function_disabled
return ''
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.retrieve_password_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.retrieve_password_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.retrieve_password_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['retrieve_password_log']
VAR_273 = VAR_256.email.requires
VAR_256.email.requires = [IS_IN_DB(self.db, VAR_256.email,
VAR_61=self.messages.invalid_email)]
VAR_7 = SQLFORM(VAR_256,
VAR_133=['email'],
hidden=dict(_next=VAR_134),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='retrieve_password', dbio=False,
VAR_135=onvalidation, hideerror=self.settings.hideerror):
VAR_142 = VAR_256(email=VAR_7.vars.email)
if not VAR_142:
VAR_265.session.flash = \
self.messages.invalid_email
redirect(self.url(VAR_11=VAR_58.args))
VAR_200 = VAR_142.registration_key
if VAR_200 in ('pending', 'disabled', 'blocked') or (VAR_200 or '').startswith('pending'):
VAR_265.session.flash = \
self.messages.registration_pending
redirect(self.url(VAR_11=VAR_58.args))
VAR_132 = self.random_password()
VAR_263 = self.settings.password_field
VAR_28 = {
VAR_263: str(VAR_256[VAR_263].validate(VAR_132)[0]),
'registration_key': ''
}
VAR_142.update_record(**VAR_28)
if self.settings.mailer and \
self.settings.mailer.send(VAR_37=VAR_7.vars.email,
VAR_38=self.messages.retrieve_password_subject,
VAR_39=self.messages.retrieve_password % dict(VAR_132=password)):
VAR_140.flash = self.messages.email_sent
else:
VAR_140.flash = self.messages.unable_send_email
self.log_event(VAR_137, VAR_142)
VAR_27(VAR_136, VAR_7)
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134)
VAR_256.email.requires = VAR_273
return VAR_7
def FUNC_57(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
VAR_200 = VAR_58.vars.key
if not VAR_200 and len(VAR_58.args) > 1:
VAR_200 = VAR_58.args[-1]
if VAR_200:
VAR_140._reset_password_key = VAR_200
if VAR_134:
VAR_453 = {'_next': VAR_134}
else:
VAR_453 = {}
redirect(self.url(VAR_11='confirm_registration',
VAR_98=VAR_453))
else:
VAR_200 = VAR_140._reset_password_key
else:
VAR_200 = VAR_58.vars.key or FUNC_0(-1)
try:
VAR_375 = int(VAR_200.split('-')[0])
if time.time() - VAR_375 > 60 * 60 * 24:
raise Exception
VAR_142 = VAR_256(VAR_277=VAR_200)
if not VAR_142:
raise Exception
except Exception as e:
VAR_140.flash = self.messages.invalid_reset_password
redirect(VAR_134, client_side=self.settings.client_side)
VAR_263 = self.settings.password_field
VAR_7 = SQLFORM.factory(
VAR_1('first_name',
VAR_62='First Name',
VAR_93=True),
VAR_1('last_name',
VAR_62='Last Name',
VAR_93=True),
VAR_1('new_password', 'password',
VAR_62=self.messages.new_password,
VAR_281=self.table_user()[VAR_263].requires),
VAR_1('new_password2', 'password',
VAR_62=self.messages.verify_password,
VAR_281=[IS_EXPR('value==%s' % repr(VAR_58.vars.new_password),
self.messages.mismatched_password)]),
submit_button='Confirm Registration',
hidden=dict(_next=VAR_134),
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.process().accepted:
VAR_142.update_record(
**{VAR_263: str(VAR_7.vars.new_password),
'first_name': str(VAR_7.vars.first_name),
'last_name': str(VAR_7.vars.last_name),
'registration_key': '',
'reset_password_key': ''})
VAR_140.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(VAR_142)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_58(self, VAR_38, VAR_84, VAR_142):
VAR_277 = str(int(time.time())) + '-' + web2py_uuid()
VAR_278 = self.url(self.settings.function,
VAR_11=('confirm_registration',), VAR_98={'key': VAR_277},
VAR_99=True)
VAR_28 = dict(VAR_142)
VAR_28.update(dict(VAR_200=VAR_277, VAR_278=FUNC_173, site=VAR_265.request.env.http_host))
if self.settings.mailer and self.settings.mailer.send(
VAR_37=VAR_142.email,
VAR_38=subject % VAR_28,
VAR_39=VAR_84 % VAR_28):
VAR_142.update_record(VAR_277=reset_password_key)
return True
return False
def FUNC_59(self, VAR_143=100):
if not self.user:
redirect(self.settings.login_url)
if not self.settings.bulk_register_enabled:
return HTTP(404)
VAR_7 = SQLFORM.factory(
VAR_1('subject', 'string', VAR_5=self.messages.bulk_invite_subject, VAR_281=IS_NOT_EMPTY()),
VAR_1('emails', 'text', VAR_281=IS_NOT_EMPTY()),
VAR_1('message', 'text', VAR_5=self.messages.bulk_invite_body, VAR_281=IS_NOT_EMPTY()),
VAR_275=self.settings.formstyle)
if VAR_7.process().accepted:
VAR_376 = re.compile('[^\VAR_280\'"@<>,;:]+\@[^\VAR_280\'"@<>,;:]+').findall(VAR_7.vars.emails)
VAR_377 = []
VAR_378 = []
VAR_379 = []
for email in VAR_376[:VAR_143]:
if self.table_user()(email=email):
VAR_379.append(email)
else:
VAR_142 = self.register_bare(email=email)
if self.email_registration(VAR_7.vars.subject, VAR_7.vars.message, VAR_142):
VAR_377.append(email)
else:
VAR_378.append(email)
VAR_378 += VAR_376[VAR_143:]
VAR_7 = DIV(H4('Emails sent'), UL(*[A(VAR_31, _href='mailto:' + VAR_31) for VAR_31 in VAR_377]),
H4('Emails failed'), UL(*[A(VAR_31, _href='mailto:' + VAR_31) for VAR_31 in VAR_378]),
H4('Emails existing'), UL(*[A(VAR_31, _href='mailto:' + VAR_31) for VAR_31 in VAR_379]))
return VAR_7
def FUNC_60(self):
if not self.user:
redirect(self.settings.login_url)
VAR_279 = self.table_token()
VAR_279.user_id.writable = False
VAR_279.user_id.default = self.user.id
VAR_279.token.writable = False
if VAR_265.request.args(1) == 'new':
VAR_279.token.readable = False
VAR_7 = SQLFORM.grid(VAR_279, VAR_11=['manage_tokens'])
return VAR_7
def FUNC_61(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
VAR_200 = VAR_58.vars.key
if VAR_200:
VAR_140._reset_password_key = VAR_200
redirect(self.url(VAR_11='reset_password'))
else:
VAR_200 = VAR_140._reset_password_key
else:
VAR_200 = VAR_58.vars.key
try:
VAR_375 = int(VAR_200.split('-')[0])
if time.time() - VAR_375 > 60 * 60 * 24:
raise Exception
VAR_142 = VAR_256(VAR_277=VAR_200)
if not VAR_142:
raise Exception
except Exception:
VAR_140.flash = self.messages.invalid_reset_password
redirect(VAR_134, client_side=self.settings.client_side)
VAR_200 = VAR_142.registration_key
if VAR_200 in ('pending', 'disabled', 'blocked') or (VAR_200 or '').startswith('pending'):
VAR_140.flash = self.messages.registration_pending
redirect(VAR_134, client_side=self.settings.client_side)
if VAR_135 is VAR_3:
VAR_135 = self.settings.reset_password_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.reset_password_onaccept
VAR_263 = self.settings.password_field
VAR_7 = SQLFORM.factory(
VAR_1('new_password', 'password',
VAR_62=self.messages.new_password,
VAR_281=self.table_user()[VAR_263].requires),
VAR_1('new_password2', 'password',
VAR_62=self.messages.verify_password,
VAR_281=[IS_EXPR('value==%s' % repr(VAR_58.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_reset_button,
hidden=dict(_next=VAR_134),
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_58, VAR_140, VAR_135=onvalidation,
hideerror=self.settings.hideerror):
VAR_142.update_record(
**{VAR_263: str(VAR_7.vars.new_password),
'registration_key': '',
'reset_password_key': ''})
VAR_140.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(VAR_142)
VAR_27(VAR_136, VAR_7)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_62(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
VAR_234 = self.settings.retrieve_password_captcha or \
(self.settings.retrieve_password_captcha is not False and self.settings.captcha)
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
VAR_246.flash = self.messages.function_disabled
return ''
if VAR_135 is VAR_3:
VAR_135 = self.settings.request_reset_password_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.request_reset_password_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['reset_password_log']
VAR_262 = self.settings.login_userfield or 'username' \
if self.settings.login_userfield or 'username' \
in VAR_256.fields else 'email'
if VAR_262 == 'email':
VAR_256.email.requires = [
IS_EMAIL(VAR_61=self.messages.invalid_email),
IS_IN_DB(self.db, VAR_256.email,
VAR_61=self.messages.invalid_email)]
if not self.settings.email_case_sensitive:
VAR_256.email.requires.insert(0, IS_LOWER())
elif VAR_262 == 'username':
VAR_256.username.requires = [
IS_IN_DB(self.db, VAR_256.username,
VAR_61=self.messages.invalid_username)]
if not self.settings.username_case_sensitive:
VAR_256.username.requires.insert(0, IS_LOWER())
VAR_7 = SQLFORM(VAR_256,
VAR_133=[VAR_262],
hidden=dict(_next=VAR_134),
showid=self.settings.showid,
submit_button=self.messages.password_reset_button,
delete_label=self.messages.delete_label,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234,
captcha.comment, self.settings.formstyle, 'captcha__row')
if VAR_7.accepts(VAR_58, VAR_140 if self.csrf_prevention else None,
VAR_172='reset_password', dbio=False,
VAR_135=onvalidation,
hideerror=self.settings.hideerror):
VAR_142 = VAR_256(**{VAR_262: VAR_7.vars.get(VAR_262)})
VAR_200 = VAR_142.registration_key
if not VAR_142:
VAR_140.flash = self.messages['invalid_%s' % VAR_262]
redirect(self.url(VAR_11=VAR_58.args),
client_side=self.settings.client_side)
elif VAR_200 in ('pending', 'disabled', 'blocked') or (VAR_200 or '').startswith('pending'):
VAR_140.flash = self.messages.registration_pending
redirect(self.url(VAR_11=VAR_58.args),
client_side=self.settings.client_side)
if self.email_reset_password(VAR_142):
VAR_140.flash = self.messages.email_sent
else:
VAR_140.flash = self.messages.unable_send_email
self.log_event(VAR_137, VAR_142)
VAR_27(VAR_136, VAR_7)
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_63(self, VAR_142):
VAR_277 = str(int(time.time())) + '-' + web2py_uuid()
VAR_278 = self.url(self.settings.function,
VAR_11=('reset_password',), VAR_98={'key': VAR_277},
VAR_99=True)
VAR_28 = dict(VAR_142)
VAR_28.update(dict(VAR_200=VAR_277, VAR_278=FUNC_173))
if self.settings.mailer and self.settings.mailer.send(
VAR_37=VAR_142.email,
VAR_38=self.messages.reset_password_subject,
VAR_39=self.messages.reset_password % VAR_28):
VAR_142.update_record(VAR_277=reset_password_key)
return True
return False
def FUNC_64(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
if self.settings.reset_password_requires_verification:
return self.request_reset_password(VAR_134, VAR_135, VAR_136, VAR_137)
else:
return self.reset_password_deprecated(VAR_134, VAR_135, VAR_136, VAR_137)
def FUNC_65(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
if self.settings.login_form != self:
VAR_371 = self.settings.login_form
if hasattr(VAR_371, 'change_password_url'):
VAR_134 = VAR_371.change_password_url(VAR_134)
if VAR_134 is not None:
redirect(VAR_134)
VAR_102 = self.db
VAR_256 = self.table_user()
VAR_280 = VAR_102(VAR_256.id == self.user.id)
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.change_password_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.change_password_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.change_password_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['change_password_log']
VAR_263 = self.settings.password_field
VAR_281 = VAR_256[VAR_263].requires
if not isinstance(VAR_281, (list, tuple)):
VAR_281 = [FUNC_73]
VAR_281 = [VAR_446 for VAR_446 in VAR_281 if isinstance(VAR_446, CRYPT)]
if VAR_281:
VAR_281[0] = CRYPT(**VAR_281[0].__dict__) # Copy the existing CRYPT VAR_173
VAR_281[0].min_length = 0 # But do not enforce minimum length for the old VAR_132
VAR_7 = SQLFORM.factory(
VAR_1('old_password', 'password', VAR_281=FUNC_73,
VAR_62=self.messages.old_password),
VAR_1('new_password', 'password',
VAR_62=self.messages.new_password,
VAR_281=VAR_256[VAR_263].requires),
VAR_1('new_password2', 'password',
VAR_62=self.messages.verify_password,
VAR_281=[IS_EXPR('value==%s' % repr(VAR_58.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_change_button,
hidden=dict(_next=VAR_134),
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_7.accepts(VAR_58, VAR_140,
VAR_172='change_password',
VAR_135=onvalidation,
hideerror=self.settings.hideerror):
VAR_380 = VAR_280.select(VAR_176=(0, 1), orderby_on_limitby=False).first()
if not VAR_7.vars['old_password'] == VAR_380[VAR_263]:
VAR_7.errors['old_password'] = self.messages.invalid_password
else:
VAR_28 = {VAR_263: str(VAR_7.vars.new_password)}
VAR_280.update(**VAR_28)
VAR_140.flash = self.messages.password_changed
self.log_event(VAR_137, self.user)
VAR_27(VAR_136, VAR_7)
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_66(self,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
):
VAR_256 = self.table_user()
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
VAR_263 = self.settings.password_field
VAR_256[VAR_263].writable = False
VAR_256['email'].writable = False
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
if VAR_134 is VAR_3:
VAR_134 = self.get_vars_next() or self.settings.profile_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.profile_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.profile_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['profile_log']
VAR_7 = SQLFORM(
VAR_256,
self.user.id,
VAR_133=self.settings.profile_fields,
hidden=dict(_next=VAR_134),
showid=self.settings.showid,
submit_button=self.messages.profile_save_button,
delete_label=self.messages.delete_label,
upload=self.settings.download_url,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator,
VAR_171=self.settings.allow_delete_accounts,
)
if VAR_7.accepts(VAR_58, VAR_140,
VAR_172='profile',
VAR_135=onvalidation,
hideerror=self.settings.hideerror):
VAR_361 = self.settings.extra_fields.get(self.settings.table_user_name, [])
if not VAR_7.deleted:
if any(VAR_10.compute for VAR_10 in VAR_361):
VAR_142 = VAR_256[self.user.id]
self._update_session_user(VAR_142)
self.update_groups()
else:
self.user.update(VAR_256._filter_fields(VAR_7.vars))
VAR_140.flash = self.messages.profile_updated
self.log_event(VAR_137, self.user)
VAR_27(VAR_136, VAR_7)
if VAR_7.deleted:
return self.logout()
if not VAR_134:
VAR_134 = self.url(VAR_11=VAR_58.args)
else:
VAR_134 = FUNC_4(VAR_134, VAR_7)
redirect(VAR_134, client_side=self.settings.client_side)
return VAR_7
def FUNC_67(self):
VAR_136 = self.settings.login_onaccept
if VAR_136:
VAR_7 = Storage(dict(VAR_98=self.user))
if not isinstance(VAR_136, (list, tuple)):
VAR_136 = [onaccept]
for VAR_27 in VAR_136:
VAR_27(VAR_7)
def VAR_112(self):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
VAR_381 = self.jwt_handler.jwt_token_manager()
raise HTTP(200, VAR_381, cookies=None, **VAR_265.response.headers)
def FUNC_69(self):
return self.is_logged_in() and 'impersonator' in VAR_265.session.auth
def FUNC_70(self, VAR_144=VAR_3):
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
VAR_65 = VAR_140.auth
VAR_256 = self.table_user()
if not self.is_logged_in():
raise HTTP(401, "Not Authorized")
VAR_282 = VAR_65.user.id
VAR_283 = VAR_144
VAR_142 = None
if VAR_144 is VAR_3:
VAR_144 = VAR_265.request.post_vars.user_id
if VAR_144 and VAR_144 != self.user.id and VAR_144 != '0':
if not self.has_permission('impersonate',
self.table_user(),
VAR_144):
raise HTTP(403, "Forbidden")
VAR_142 = VAR_256(VAR_144)
if not VAR_142:
raise HTTP(401, "Not Authorized")
VAR_65.impersonator = pickle.dumps(VAR_140, pickle.HIGHEST_PROTOCOL)
VAR_65.user.update(
VAR_256._filter_fields(VAR_142, True))
self.user = VAR_65.user
self.update_groups()
VAR_137 = self.messages['impersonate_log']
self.log_event(VAR_137, dict(VAR_216=VAR_282, other_id=VAR_65.user.id))
self.run_login_onaccept()
elif VAR_144 in (0, '0'):
if self.is_impersonating():
VAR_140.clear()
VAR_140.update(pickle.loads(VAR_65.impersonator))
self.user = VAR_140.auth.user
self.update_groups()
self.run_login_onaccept()
return None
if VAR_283 is VAR_3 and not VAR_58.post_vars:
return SQLFORM.factory(VAR_1('user_id', 'integer'))
elif not VAR_142:
return None
else:
return SQLFORM(VAR_256, VAR_142.id, readonly=True)
def VAR_168(self):
if not self.is_logged_in():
redirect(self.settings.login_url)
VAR_284 = self.table_membership()
VAR_285 = self.db(
VAR_284.user_id == self.user.id).select()
VAR_154 = TABLE()
for VAR_286 in VAR_285:
VAR_382 = self.table_group()
VAR_168 = self.db(VAR_382.id == VAR_286.group_id).select()
if VAR_168:
VAR_406 = VAR_168[0]
VAR_154.append(TR(H3(VAR_406.role, '(%VAR_280)' % VAR_406.id)))
VAR_154.append(TR(P(VAR_406.description)))
if not VAR_285:
return None
return VAR_154
def FUNC_72(self):
if VAR_265.request.ajax:
raise HTTP(403, 'ACCESS DENIED')
return self.messages.access_denied
def FUNC_28(self, VAR_92=None):
if not self.jwt_handler:
raise HTTP(401, "Not authorized")
else:
return self.jwt_handler.allows_jwt(VAR_92=otherwise)
def VAR_281(self, VAR_145, VAR_146=True, VAR_92=None):
def FUNC_153(VAR_114):
def VAR_10(*VAR_9, **VAR_14):
VAR_432, VAR_433, VAR_142 = self.basic()
VAR_142 = VAR_142 or self.user
VAR_434 = VAR_146
if callable(VAR_434):
login_required = VAR_434()
if VAR_434:
if not VAR_142:
if VAR_265.request.ajax:
raise HTTP(401, self.messages.ajax_failed_authentication)
elif VAR_92 is not None:
if callable(VAR_92):
return VAR_92()
redirect(VAR_92)
elif self.settings.allow_basic_login_only or \
VAR_433 or VAR_265.request.is_restful:
raise HTTP(403, "Not authorized")
else:
VAR_134 = self.here()
VAR_265.session.flash = VAR_265.response.flash
return FUNC_3(self.settings.on_failed_authentication,
self.settings.login_url + '?_next=' + urllib_quote(VAR_134))
if callable(VAR_145):
VAR_454 = VAR_145()
else:
VAR_454 = VAR_145
if not VAR_454:
VAR_265.session.flash = self.messages.access_denied
return FUNC_3(
self.settings.on_failed_authorization)
return VAR_114(*VAR_9, **VAR_14)
VAR_10.__doc__ = VAR_114.__doc__
VAR_10.__name__ = VAR_114.__name__
VAR_10.__dict__.update(VAR_114.__dict__)
return VAR_10
return FUNC_153
def VAR_146(self, VAR_92=None):
return self.requires(True, VAR_92=otherwise)
def FUNC_75(self, VAR_92=None):
if self.settings.enable_tokens is True:
VAR_142 = None
VAR_58 = VAR_265.request
VAR_87 = VAR_58.env.http_web2py_user_token or VAR_58.vars._token
VAR_279 = self.table_token()
VAR_256 = self.table_user()
from gluon.settings import global_settings
if global_settings.web2py_runtime_gae:
VAR_268 = VAR_279(VAR_87=token)
if VAR_268:
VAR_142 = VAR_256(VAR_268.user_id)
else:
VAR_268 = self.db(VAR_279.token == VAR_87)(VAR_256.id == VAR_279.user_id).select().first()
if VAR_268:
VAR_142 = VAR_268[VAR_256._tablename]
if VAR_142:
self.login_user(VAR_142)
return self.requires(True, VAR_92=otherwise)
def FUNC_76(self, VAR_147=None, VAR_148=None, VAR_92=None):
def FUNC_160(self=self, VAR_148=group_id, VAR_147=role):
return self.has_membership(VAR_148=group_id, VAR_147=role)
return self.requires(FUNC_160, VAR_92=otherwise)
def FUNC_77(self, VAR_149, VAR_150='', VAR_151=0,
VAR_92=None):
def FUNC_84(self=self, VAR_149=name, VAR_150=table_name, VAR_151=record_id):
return self.has_permission(VAR_149, VAR_150, VAR_151)
return self.requires(FUNC_84, VAR_92=otherwise)
def FUNC_78(self, VAR_92=None, VAR_152=True, VAR_153=True):
def FUNC_161():
return URL.verify(VAR_265.request, user_signature=True, VAR_152=hash_vars, VAR_153=True)
return self.requires(FUNC_161, VAR_92)
def FUNC_79(self, VAR_149, VAR_154, VAR_144=None):
if not VAR_144:
VAR_144 = self.user_id
VAR_102 = self.db
if isinstance(VAR_154, str) and VAR_154 in self.db.tables():
VAR_154 = self.db[VAR_154]
elif isinstance(VAR_154, (Set, Query)):
if isinstance(VAR_154, Set):
VAR_455 = VAR_154.query
else:
VAR_455 = VAR_154
VAR_435 = VAR_102._adapter.tables(VAR_455)
for VAR_8 in VAR_435:
VAR_455 &= self.accessible_query(VAR_149, VAR_8, VAR_144=user_id)
return VAR_455
if not isinstance(VAR_154, str) and \
self.has_permission(VAR_149, VAR_154, 0, VAR_144):
return VAR_154.id > 0
VAR_286 = self.table_membership()
VAR_287 = self.table_permission()
VAR_174 = VAR_154.id.belongs(
VAR_102(VAR_286.user_id == VAR_144)
(VAR_286.group_id == VAR_287.group_id)
(VAR_287.name == VAR_149)
(VAR_287.table_name == VAR_154)
._select(VAR_287.record_id))
if self.settings.everybody_group_id:
VAR_174 |= VAR_154.id.belongs(
VAR_102(VAR_287.group_id == self.settings.everybody_group_id)
(VAR_287.name == VAR_149)
(VAR_287.table_name == VAR_154)
._select(VAR_287.record_id))
return VAR_174
@staticmethod
def FUNC_80(VAR_7,
VAR_155=None,
VAR_122='current_record',
VAR_156=False,
VAR_133=None):
if not VAR_156 and not VAR_7.record:
return None
VAR_154 = VAR_7.table
if not VAR_155:
VAR_383 = '%s_archive' % VAR_154
if VAR_383 not in VAR_154._db:
VAR_154._db.define_table(
VAR_383,
VAR_1(VAR_122, VAR_154),
*[VAR_178.clone(unique=False) for VAR_178 in VAR_154])
VAR_155 = VAR_154._db[VAR_383]
VAR_288 = {VAR_122: VAR_7.vars.id}
for VAR_386 in VAR_155.fields:
if VAR_386 not in ['id', VAR_122]:
if VAR_156 and VAR_386 in VAR_7.vars:
VAR_288[VAR_386] = VAR_7.vars[VAR_386]
elif VAR_7.record and VAR_386 in VAR_7.record:
VAR_288[VAR_386] = VAR_7.record[VAR_386]
if VAR_133:
VAR_288.update(VAR_133)
VAR_216 = VAR_155.insert(**VAR_288)
return VAR_216
def VAR_289(self,
VAR_157=None,
VAR_158=None,
VAR_159='markmin',
VAR_160=False,
VAR_161='',
VAR_162=False,
VAR_163=True,
VAR_164=None,
VAR_165=None,
VAR_166=None,
VAR_126=True,
VAR_105=None,
VAR_106=None,
VAR_167=False,
VAR_168=None):
if VAR_105 and VAR_106:
VAR_163 = False
if not hasattr(self, '_wiki'):
self._wiki = CLASS_8(self, VAR_159=render,
VAR_160=manage_permissions,
VAR_161=force_prefix,
VAR_162=restrict_search,
VAR_158=env, VAR_164=extra or {},
VAR_165=menu_groups,
VAR_166=templates,
VAR_126=migrate,
VAR_105=controller,
VAR_106=function,
VAR_168=FUNC_71)
else:
self._wiki.settings.extra = VAR_164 or {}
self._wiki.env.update(VAR_158 or {})
VAR_289 = None
if VAR_163:
if VAR_157:
VAR_289 = self._wiki.read(VAR_157, VAR_167)
if isinstance(VAR_289, dict) and 'content' in VAR_289:
VAR_289 = VAR_289['content']
else:
VAR_289 = self._wiki()
if isinstance(VAR_289, basestring):
VAR_289 = XML(VAR_289)
return VAR_289
def FUNC_82(self):
if (hasattr(self, "_wiki") and
self._wiki.settings.controller and
self._wiki.settings.function):
self._wiki.automenu()
class CLASS_4(object): # pragma: no cover
VAR_95 = dict(
submit_button='Submit',
delete_label='Check VAR_37 delete',
record_created='Record Created',
record_updated='Record Updated',
record_deleted='Record Deleted',
update_log='Record %(VAR_216)VAR_280 updated',
create_log='Record %(VAR_216)VAR_280 created',
read_log='Record %(VAR_216)VAR_280 read',
delete_log='Record %(VAR_216)VAR_280 deleted',
)
def VAR_12(self, VAR_10=None, VAR_11=None, VAR_98=None):
if VAR_11 is None:
VAR_11 = []
if VAR_98 is None:
VAR_98 = {}
return URL(VAR_15=self.settings.controller, VAR_10=FUNC_118, VAR_11=args, VAR_98=vars)
def __init__(self, VAR_101, VAR_102=None, VAR_105='default'):
self.db = VAR_102
if not VAR_102 and VAR_101 and isinstance(VAR_101, DAL):
self.db = VAR_101
elif not VAR_102:
raise SyntaxError("must pass VAR_102 as first or second argument")
self.environment = VAR_265
VAR_224 = self.settings = Settings()
VAR_224.auth = None
VAR_224.logger = None
VAR_224.create_next = None
VAR_224.update_next = None
VAR_224.controller = VAR_105
VAR_224.delete_next = self.url()
VAR_224.download_url = self.url('download')
VAR_224.create_onvalidation = StorageList()
VAR_224.update_onvalidation = StorageList()
VAR_224.delete_onvalidation = StorageList()
VAR_224.create_onaccept = StorageList()
VAR_224.update_onaccept = StorageList()
VAR_224.update_ondelete = StorageList()
VAR_224.delete_onaccept = StorageList()
VAR_224.update_deletable = True
VAR_224.showid = False
VAR_224.keepvalues = False
VAR_224.create_captcha = None
VAR_224.update_captcha = None
VAR_224.captcha = None
VAR_224.formstyle = 'table3cols'
VAR_224.label_separator = ': '
VAR_224.hideerror = False
VAR_224.detect_record_change = True
VAR_224.hmac_key = None
VAR_224.lock_keys = True
VAR_251 = self.messages = Messages(VAR_265.T)
VAR_251.update(CLASS_4.default_messages)
VAR_251.lock_keys = True
def __call__(self):
VAR_11 = VAR_265.request.args
if len(VAR_11) < 1:
raise HTTP(404)
elif VAR_11[0] == 'tables':
return self.tables()
elif len(VAR_11) > 1 and not VAR_11(1) in self.db.tables:
raise HTTP(404)
VAR_154 = self.db[VAR_11(1)]
if VAR_11[0] == 'create':
return self.create(VAR_154)
elif VAR_11[0] == 'select':
return self.select(VAR_154, linkto=self.url(VAR_11='read'))
elif VAR_11[0] == 'search':
VAR_7, VAR_291 = self.search(VAR_154, linkto=self.url(VAR_11='read'))
return DIV(VAR_7, SQLTABLE(VAR_291))
elif VAR_11[0] == 'read':
return self.read(VAR_154, VAR_11(2))
elif VAR_11[0] == 'update':
return self.update(VAR_154, VAR_11(2))
elif VAR_11[0] == 'delete':
return self.delete(VAR_154, VAR_11(2))
else:
raise HTTP(404)
def FUNC_83(self, VAR_39, VAR_98):
if self.settings.logger:
self.settings.logger.log_event(VAR_39, VAR_98, origin='crud')
def FUNC_84(self, VAR_149, VAR_154, VAR_169=0):
if not self.settings.auth:
return True
try:
VAR_151 = VAR_169.id
except:
VAR_151 = VAR_169
return self.settings.auth.has_permission(VAR_149, str(VAR_154), VAR_151)
def VAR_119(self):
return TABLE(*[TR(A(VAR_149,
_href=self.url(VAR_11=('select', VAR_149))))
for VAR_149 in self.db.tables])
@staticmethod
def FUNC_80(VAR_7, VAR_155=None, VAR_122='current_record'):
return CLASS_3.archive(VAR_7, VAR_155=archive_table,
VAR_122=current_record)
def FUNC_86(self,
VAR_154,
VAR_169,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_170=VAR_3,
VAR_137=VAR_3,
VAR_39=VAR_3,
VAR_171=VAR_3,
VAR_172=VAR_3,
**VAR_173
):
if not (isinstance(VAR_154, VAR_0) or VAR_154 in self.db.tables) \
or (isinstance(VAR_169, str) and not str(VAR_169).isdigit()):
raise HTTP(404)
if not isinstance(VAR_154, VAR_0):
VAR_154 = self.db[VAR_154]
try:
VAR_151 = VAR_169.id
except:
VAR_151 = VAR_169 or 0
if VAR_151 and not self.has_permission('update', VAR_154, VAR_151):
redirect(self.settings.auth.settings.on_failed_authorization)
if not VAR_151 and not self.has_permission('create', VAR_154, VAR_151):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_140 = VAR_265.session
if VAR_58.extension == 'json' and VAR_58.vars.json:
VAR_58.vars.update(FUNC_98.loads(VAR_58.vars.json))
if VAR_134 is VAR_3:
VAR_134 = FUNC_5(VAR_58.get_vars._next) \
or FUNC_5(VAR_58.post_vars._next) \
or self.settings.update_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.update_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.update_onaccept
if VAR_170 is VAR_3:
VAR_170 = self.settings.update_ondelete
if VAR_137 is VAR_3:
VAR_137 = self.messages['update_log']
if VAR_171 is VAR_3:
VAR_171 = self.settings.update_deletable
if VAR_39 is VAR_3:
VAR_39 = self.messages.record_updated
if 'hidden' not in VAR_173:
attributes['hidden'] = {}
VAR_173['hidden']['_next'] = VAR_134
VAR_7 = SQLFORM(
VAR_154,
VAR_169,
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
VAR_171=deletable,
upload=self.settings.download_url,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator,
**VAR_173 # contains hidden
)
self.accepted = False
self.deleted = False
VAR_234 = self.settings.update_captcha or self.settings.captcha
if VAR_169 and VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234, VAR_234.comment, self.settings.formstyle, 'captcha__row')
VAR_234 = self.settings.create_captcha or self.settings.captcha
if not VAR_169 and VAR_234:
FUNC_6(VAR_7, VAR_234.label, VAR_234, VAR_234.comment, self.settings.formstyle, 'captcha__row')
if VAR_58.extension not in ('html', 'load'):
(VAR_384, VAR_385) = (None, None)
else:
(VAR_384, VAR_385) = (
VAR_140, '%VAR_280/%s' % (VAR_154._tablename, VAR_7.record_id))
if VAR_172 is not VAR_3:
VAR_385 = VAR_172
VAR_290 = self.settings.keepvalues
if VAR_58.vars.delete_this_record:
VAR_290 = False
if isinstance(VAR_135, StorageList):
VAR_135 = onvalidation.get(VAR_154._tablename, [])
if VAR_7.accepts(VAR_58, VAR_384, VAR_172=VAR_385,
VAR_135=onvalidation, VAR_290=keepvalues,
hideerror=self.settings.hideerror,
detect_record_change=self.settings.detect_record_change):
self.accepted = True
VAR_246.flash = VAR_39
if VAR_137:
self.log_event(VAR_137, VAR_7.vars)
if VAR_58.vars.delete_this_record:
self.deleted = True
VAR_39 = self.messages.record_deleted
VAR_27(VAR_170, VAR_7, VAR_154._tablename)
VAR_246.flash = VAR_39
VAR_27(VAR_136, VAR_7, VAR_154._tablename)
if VAR_58.extension not in ('html', 'load'):
raise HTTP(200, 'RECORD CREATED/UPDATED')
if isinstance(VAR_134, (list, tuple)): # fix issue with 2.6
VAR_134 = next[0]
if VAR_134: # Only redirect when explicit
VAR_134 = FUNC_4(VAR_134, VAR_7)
VAR_140.flash = VAR_246.flash
redirect(VAR_134)
elif VAR_58.extension not in ('html', 'load'):
raise HTTP(401, serializers.json(dict(errors=VAR_7.errors)))
return VAR_7
def VAR_339(self,
VAR_154,
VAR_134=VAR_3,
VAR_135=VAR_3,
VAR_136=VAR_3,
VAR_137=VAR_3,
VAR_39=VAR_3,
VAR_172=VAR_3,
**VAR_173
):
if VAR_134 is VAR_3:
VAR_134 = self.settings.create_next
if VAR_135 is VAR_3:
VAR_135 = self.settings.create_onvalidation
if VAR_136 is VAR_3:
VAR_136 = self.settings.create_onaccept
if VAR_137 is VAR_3:
VAR_137 = self.messages['create_log']
if VAR_39 is VAR_3:
VAR_39 = self.messages.record_created
return self.update(VAR_154,
None,
VAR_134=next,
VAR_135=onvalidation,
VAR_136=onaccept,
VAR_137=log,
VAR_39=message,
VAR_171=False,
VAR_172=formname,
**VAR_173
)
def FUNC_88(self, VAR_154, VAR_169):
if not (isinstance(VAR_154, VAR_0) or VAR_154 in self.db.tables) \
or (isinstance(VAR_169, str) and not str(VAR_169).isdigit()):
raise HTTP(404)
if not isinstance(VAR_154, VAR_0):
VAR_154 = self.db[VAR_154]
if not self.has_permission('read', VAR_154, VAR_169):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_7 = SQLFORM(
VAR_154,
VAR_169,
readonly=True,
comments=False,
upload=self.settings.download_url,
showid=self.settings.showid,
VAR_275=self.settings.formstyle,
separator=self.settings.label_separator
)
if VAR_265.request.extension not in ('html', 'load'):
return VAR_154._filter_fields(VAR_7.record, VAR_216=True)
return VAR_7
def FUNC_89(self,
VAR_154,
VAR_151,
VAR_134=VAR_3,
VAR_39=VAR_3,
):
if not (isinstance(VAR_154, VAR_0) or VAR_154 in self.db.tables):
raise HTTP(404)
if not isinstance(VAR_154, VAR_0):
VAR_154 = self.db[VAR_154]
if not self.has_permission('delete', VAR_154, VAR_151):
redirect(self.settings.auth.settings.on_failed_authorization)
VAR_58 = VAR_265.request
VAR_140 = VAR_265.session
if VAR_134 is VAR_3:
VAR_134 = FUNC_5(VAR_58.get_vars._next) \
or FUNC_5(VAR_58.post_vars._next) \
or self.settings.delete_next
if VAR_39 is VAR_3:
VAR_39 = self.messages.record_deleted
VAR_169 = VAR_154[VAR_151]
if VAR_169:
VAR_27(self.settings.delete_onvalidation, VAR_169)
del VAR_154[VAR_151]
VAR_27(self.settings.delete_onaccept, VAR_169, VAR_154._tablename)
VAR_140.flash = VAR_39
redirect(VAR_134)
def VAR_291(self,
VAR_154,
VAR_174=None,
VAR_133=None,
VAR_175=None,
VAR_176=None,
):
if not (isinstance(VAR_154, VAR_0) or VAR_154 in self.db.tables):
raise HTTP(404)
if not self.has_permission('select', VAR_154):
redirect(self.settings.auth.settings.on_failed_authorization)
if not isinstance(VAR_154, VAR_0):
VAR_154 = self.db[VAR_154]
if not VAR_174:
VAR_174 = VAR_154.id > 0
if not VAR_133:
VAR_133 = [VAR_178 for VAR_178 in VAR_154 if VAR_178.readable]
else:
VAR_133 = [VAR_154[VAR_10] if isinstance(VAR_10, str) else VAR_10 for VAR_10 in VAR_133]
VAR_291 = self.db(VAR_174).select(*VAR_133, **dict(VAR_175=orderby,
VAR_176=limitby))
return VAR_291
def FUNC_91(self,
VAR_154,
VAR_174=None,
VAR_133=None,
VAR_175=None,
VAR_176=None,
VAR_19=None,
**VAR_177
):
VAR_19 = VAR_19 or {}
VAR_291 = self.rows(VAR_154, VAR_174, VAR_133, VAR_175, VAR_176)
if not VAR_291:
return None # Nicer than an empty VAR_154.
if 'upload' not in VAR_177:
attr['upload'] = self.url('download')
if VAR_265.request.extension not in ('html', 'load'):
return VAR_291.as_list()
if not VAR_19:
if isinstance(VAR_154, str):
VAR_154 = self.db[VAR_154]
VAR_19 = dict((str(VAR_348), k.label) for VAR_348 in VAR_154)
return SQLTABLE(VAR_291, VAR_19=headers, **VAR_177)
def FUNC_92(self, VAR_178):
VAR_292 = VAR_178._db[VAR_178.type[10:]]
VAR_293 = VAR_292.get('_format', None)
if VAR_293 and isinstance(VAR_293, str):
return VAR_293[2:-2]
return VAR_178.name
def FUNC_93(self, VAR_178, VAR_179, VAR_180, VAR_181=False):
try:
if VAR_181:
VAR_293 = self.get_format(VAR_178)
if VAR_179 == 'equals':
if not VAR_181:
return VAR_178 == VAR_180
else:
return lambda VAR_268: VAR_268[VAR_178.name][VAR_293] == VAR_180
elif VAR_179 == 'not equal':
if not VAR_181:
return VAR_178 != VAR_180
else:
return lambda VAR_268: VAR_268[VAR_178.name][VAR_293] != VAR_180
elif VAR_179 == 'greater than':
if not VAR_181:
return VAR_178 > VAR_180
else:
return lambda VAR_268: VAR_268[VAR_178.name][VAR_293] > VAR_180
elif VAR_179 == 'less than':
if not VAR_181:
return VAR_178 < VAR_180
else:
return lambda VAR_268: VAR_268[VAR_178.name][VAR_293] < VAR_180
elif VAR_179 == 'starts with':
if not VAR_181:
return VAR_178.like(VAR_180 + '%')
else:
return lambda VAR_268: str(VAR_268[VAR_178.name][VAR_293]).startswith(VAR_180)
elif VAR_179 == 'ends with':
if not VAR_181:
return VAR_178.like('%' + VAR_180)
else:
return lambda VAR_268: str(VAR_268[VAR_178.name][VAR_293]).endswith(VAR_180)
elif VAR_179 == 'contains':
if not VAR_181:
return VAR_178.like('%' + VAR_180 + '%')
else:
return lambda VAR_268: VAR_180 in VAR_268[VAR_178.name][VAR_293]
except:
return None
def FUNC_94(self, *VAR_119, **VAR_11):
VAR_154 = VAR_119[0]
VAR_133 = VAR_11.get('fields', VAR_154.fields)
VAR_294 = VAR_11.get('validate', True)
VAR_58 = VAR_265.request
VAR_102 = self.db
if not (isinstance(VAR_154, VAR_0) or VAR_154 in VAR_102.tables):
raise HTTP(404)
VAR_173 = {}
for VAR_200 in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):
if VAR_200 in VAR_11:
VAR_173[VAR_200] = VAR_11[VAR_200]
VAR_295 = TABLE()
VAR_296 = []
VAR_181 = []
VAR_297 = []
VAR_298 = VAR_11.get('showall', False)
if VAR_298:
VAR_296 = VAR_133
VAR_299 = VAR_11.get('chkall', False)
if VAR_299:
for VAR_10 in VAR_133:
VAR_58.vars['chk%s' % VAR_10] = 'on'
VAR_300 = VAR_11.get('queries', [])
VAR_301 = VAR_11.get('zero', '')
if not VAR_300:
ops = ['equals', 'not equal', 'greater than',
'less than', 'starts with',
'ends with', 'contains']
VAR_300.insert(0, VAR_301)
VAR_302 = VAR_11.get('query_labels', {})
VAR_174 = VAR_11.get('query', VAR_154.id > 0)
VAR_303 = VAR_11.get('field_labels', {})
for VAR_178 in VAR_133:
VAR_178 = VAR_154[VAR_178]
if not VAR_178.readable:
continue
VAR_386 = VAR_178.name
VAR_387 = VAR_58.vars.get('chk' + VAR_386, None)
VAR_388 = VAR_58.vars.get('txt' + VAR_386, None)
VAR_389 = VAR_58.vars.get('op' + VAR_386, None)
VAR_268 = TR(TD(INPUT(_type="checkbox", _name="chk" + VAR_386,
_disabled=(VAR_178.type == 'id'),
VAR_180=(VAR_178.type == 'id' or VAR_387 == 'on'))),
TD(VAR_303.get(VAR_386, VAR_178.label)),
TD(SELECT([OPTION(VAR_302.get(VAR_179, op),
_value=VAR_179) for VAR_179 in VAR_300],
_name="op" + VAR_386,
VAR_180=VAR_389)),
TD(INPUT(_type="text", _name="txt" + VAR_386,
_value=VAR_388, VAR_17='txt' + VAR_386,
_class=str(VAR_178.type))))
VAR_295.append(VAR_268)
if VAR_58.post_vars and (VAR_387 or VAR_178.type == 'id'):
if VAR_388 and VAR_389 != '':
if VAR_178.type[0:10] == 'reference ':
VAR_181.append(self.get_query(VAR_178, VAR_389, VAR_388, VAR_181=True))
elif VAR_294:
VAR_180, VAR_395 = VAR_178.validate(VAR_388)
if not VAR_395:
VAR_174 &= self.get_query(VAR_178, VAR_389, VAR_180)
else:
VAR_268[3].append(DIV(VAR_395, _class='error'))
else:
VAR_174 &= self.get_query(VAR_178, VAR_389, VAR_388)
VAR_296.append(VAR_178)
VAR_7 = FORM(VAR_295, INPUT(_type="submit"))
if VAR_296:
try:
VAR_297 = VAR_102(VAR_174).select(*VAR_296, **VAR_173)
for VAR_392 in VAR_181:
VAR_297 = results.find(VAR_392)
except: # TODO: hmmm, we should do better FUNC_31
VAR_297 = None
return VAR_7, VAR_297
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor()))
def FUNC_7(VAR_12, VAR_18=None, VAR_19=None,
VAR_20=Cookie.SimpleCookie(),
VAR_21='Mozilla/5.0'):
VAR_19 = VAR_19 or {}
if VAR_18 is not None:
VAR_18 = urlencode(VAR_18)
if VAR_21:
VAR_19['User-agent'] = VAR_21
VAR_19['Cookie'] = ' '.join(
['%VAR_280=%s;' % (VAR_15.key, VAR_15.value) for VAR_15 in VAR_20.values()])
try:
from google.appengine.api import .urlfetch
except ImportError:
VAR_390 = urllib2.Request(VAR_12, VAR_18, VAR_19)
VAR_305 = urlopen(VAR_390).read()
else:
VAR_304 = ((VAR_18 is None) and urlfetch.GET) or urlfetch.POST
while VAR_12 is not None:
VAR_246 = urlfetch.fetch(VAR_12=VAR_12, VAR_83=VAR_18,
VAR_304=method, VAR_19=headers,
allow_truncated=False, follow_redirects=False,
deadline=10)
VAR_18 = None
VAR_304 = urlfetch.GET
VAR_20.load(VAR_246.headers.get('set-cookie', ''))
VAR_12 = VAR_246.headers.get('location')
VAR_305 = VAR_246.content
return VAR_305
VAR_22 = \
re.compile(r"""<geometry>[\W]*?<VAR_318>[\W]*?<VAR_24>(?P<VAR_308>[^<]*)</VAR_24>[\W]*?<VAR_25>(?P<VAR_309>[^<]*)</VAR_25>[\W]*?</VAR_318>""")
def FUNC_8(VAR_23):
try:
VAR_9 = urllib_quote(VAR_23)
VAR_306 = FUNC_7('http://VAR_362.googleapis.com/VAR_362/api/FUNC_8/VAR_405?sensor=false&VAR_23=%s' % VAR_9)
VAR_307 = VAR_22.search(VAR_306)
(VAR_308, VAR_309) = (float(VAR_307.group('la')), float(VAR_307.group('lo')))
return (VAR_308, VAR_309)
except:
return (0.0, 0.0)
def FUNC_9(VAR_24, VAR_25, VAR_26=None):
if not VAR_26:
lang = VAR_265.T.accepted_language
try:
return FUNC_98.loads(FUNC_7('http://VAR_362.googleapis.com/VAR_362/api/FUNC_8/FUNC_98?latlng=%(VAR_24)VAR_280,%(VAR_25)VAR_280&language=%(VAR_26)s' % locals()))['results'][0]['formatted_address']
except:
return ''
def FUNC_10(VAR_10, *VAR_9, **VAR_14):
VAR_15 = VAR_10.__code__.co_argcount
VAR_182 = VAR_10.__code__.co_varnames[:VAR_15]
VAR_183 = VAR_10.__defaults__ or []
VAR_184 = VAR_182[0:-len(VAR_183)]
VAR_185 = VAR_182[-len(VAR_183):]
VAR_186 = {}
for VAR_310, pos_val in enumerate(VAR_9[:VAR_15]):
VAR_186[VAR_182[VAR_310]] = pos_val # VAR_182[VAR_310] is the VAR_149 of the argument
for VAR_391 in VAR_184[len(VAR_186):]:
if VAR_391 in VAR_14:
VAR_186[VAR_391] = VAR_14[VAR_391]
if len(VAR_186) >= len(VAR_184):
for VAR_391 in VAR_185:
if VAR_391 in VAR_14:
VAR_186[VAR_391] = VAR_14[VAR_391]
return VAR_10(**VAR_186)
raise HTTP(404, "Object does not exist")
class CLASS_5(object):
def __init__(self, VAR_101=None, VAR_187=False):
self.check_args = VAR_187
self.run_procedures = {}
self.csv_procedures = {}
self.xml_procedures = {}
self.rss_procedures = {}
self.json_procedures = {}
self.jsonrpc_procedures = {}
self.jsonrpc2_procedures = {}
self.xmlrpc_procedures = {}
self.amfrpc_procedures = {}
self.amfrpc3_procedures = {}
self.soap_procedures = {}
def FUNC_95(self, VAR_10):
self.run_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def VAR_338(self, VAR_10):
self.csv_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def VAR_405(self, VAR_10):
self.xml_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_97(self, VAR_10):
self.rss_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_98(self, VAR_10):
self.json_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_99(self, VAR_10):
self.jsonrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_100(self, VAR_10):
self.jsonrpc2_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_101(self, VAR_10):
self.xmlrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_102(self, VAR_10):
self.amfrpc_procedures[VAR_10.__name__] = VAR_10
return VAR_10
def FUNC_103(self, VAR_188='default'):
if not isinstance(VAR_188, str):
raise SyntaxError("AMF3 VAR_281 VAR_9 VAR_188 for function")
def FUNC_162(VAR_10):
if VAR_188:
self.amfrpc3_procedures[VAR_188 + '.' + VAR_10.__name__] = VAR_10
else:
self.amfrpc3_procedures[VAR_10.__name__] = VAR_10
return VAR_10
return FUNC_162
def FUNC_104(self, VAR_149=None, VAR_189=None, VAR_11=None, VAR_190=None, VAR_191=None):
def FUNC_163(VAR_10):
self.soap_procedures[VAR_149 or VAR_10.__name__] = VAR_10, VAR_189, VAR_11, VAR_190, VAR_191
return VAR_10
return FUNC_163
def FUNC_105(self, VAR_11=None):
VAR_58 = VAR_265.request
if not VAR_11:
VAR_11 = VAR_58.args
if VAR_11 and VAR_11[0] in self.run_procedures:
return str(self.call_service_function(self.run_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_58.vars)))
self.error()
def FUNC_106(self, VAR_11=None):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_246.headers['Content-Type'] = 'text/VAR_31-csv'
if not VAR_11:
VAR_11 = VAR_58.args
def FUNC_164(VAR_180):
if isinstance(VAR_180, unicodeT):
return VAR_180.encode('utf8')
if hasattr(VAR_180, 'isoformat'):
return VAR_180.isoformat()[:19].replace('T', ' ')
if VAR_180 is None:
return '<NULL>'
return VAR_180
if VAR_11 and VAR_11[0] in self.csv_procedures:
import .types
VAR_392 = self.call_service_function(self.csv_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_58.vars))
VAR_280 = StringIO()
if hasattr(VAR_392, 'export_to_csv_file'):
VAR_392.export_to_csv_file(VAR_280)
elif VAR_392 and not isinstance(VAR_392, types.GeneratorType) and isinstance(VAR_392[0], (dict, Storage)):
import .csv
VAR_456 = VAR_338.writer(VAR_280)
VAR_456.writerow(list(VAR_392[0].keys()))
for line in VAR_392:
VAR_456.writerow([FUNC_164(v)
for v in line.values()])
else:
import .csv
VAR_456 = VAR_338.writer(VAR_280)
for line in VAR_392:
VAR_456.writerow(line)
return VAR_280.getvalue()
self.error()
def FUNC_107(self, VAR_11=None):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_246.headers['Content-Type'] = 'text/xml'
if not VAR_11:
VAR_11 = VAR_58.args
if VAR_11 and VAR_11[0] in self.xml_procedures:
VAR_280 = self.call_service_function(self.xml_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_58.vars))
if hasattr(VAR_280, 'as_list'):
VAR_280 = VAR_280.as_list()
return serializers.xml(VAR_280, quote=False)
self.error()
def FUNC_108(self, VAR_11=None):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
if not VAR_11:
VAR_11 = VAR_58.args
if VAR_11 and VAR_11[0] in self.rss_procedures:
VAR_393 = self.call_service_function(self.rss_procedures[VAR_11[0]],
*VAR_11[1:], **dict(VAR_58.vars))
else:
self.error()
VAR_246.headers['Content-Type'] = 'application/FUNC_97+xml'
return serializers.rss(VAR_393)
def FUNC_109(self, VAR_11=None):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_246.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
if not VAR_11:
VAR_11 = VAR_58.args
VAR_28 = dict(VAR_58.vars)
if VAR_11 and VAR_11[0] in self.json_procedures:
VAR_280 = self.call_service_function(self.json_procedures[VAR_11[0]], *VAR_11[1:], **VAR_28)
if hasattr(VAR_280, 'as_list'):
VAR_280 = VAR_280.as_list()
return VAR_246.json(VAR_280)
self.error()
class CLASS_11(Exception):
def __init__(self, VAR_311, VAR_312):
VAR_394 = CLASS_5.jsonrpc_errors.get(VAR_311)
if VAR_394:
self.message, self.description = VAR_394
self.code, self.info = VAR_311, VAR_312
VAR_192 = {
-32700: ("Parse VAR_395. Invalid JSON was received by the VAR_33.",
"An VAR_395 occurred on the VAR_33 while parsing the JSON VAR_214."),
-32600: ("Invalid Request", "The JSON sent is not VAR_9 valid Request object."),
-32601: ("Method not found", "The VAR_304 does not exist / is not available."),
-32602: ("Invalid params", "Invalid VAR_304 parameter(VAR_280)."),
-32603: ("Internal error", "Internal JSON-RPC VAR_395."),
-32099: ("Server error", "Reserved for implementation-defined VAR_33-errors.")}
def FUNC_110(self):
def FUNC_165(VAR_216, VAR_226):
return serializers.json({'version': '1.1', 'id': VAR_216, 'result': VAR_226, 'error': None})
def FUNC_166(VAR_216, VAR_311, VAR_39, VAR_18=None):
VAR_395 = {'name': 'JSONRPCError',
'code': VAR_311, 'message': VAR_39}
if VAR_18 is not None:
VAR_395['data'] = VAR_18
return serializers.json({'id': VAR_216,
'version': '1.1',
'error': VAR_395,
})
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_246.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
VAR_313 = self.jsonrpc_procedures
VAR_18 = FUNC_98.loads(VAR_58.body.read())
VAR_314 = VAR_18.get('jsonrpc')
if VAR_314: # hand over VAR_37 VAR_138 2 of the protocol
return self.serve_jsonrpc2(VAR_18)
VAR_216, VAR_304, VAR_229 = VAR_18.get('id'), VAR_18.get('method'), VAR_18.get('params', [])
if VAR_216 is None:
return FUNC_166(0, 100, 'missing id')
if VAR_304 not in VAR_313:
return FUNC_166(VAR_216, 100, 'method "%s" does not exist' % VAR_304)
try:
if isinstance(VAR_229, dict):
VAR_280 = VAR_313[VAR_304](**VAR_229)
else:
VAR_280 = VAR_313[VAR_304](*VAR_229)
if hasattr(VAR_280, 'as_list'):
VAR_280 = VAR_280.as_list()
return FUNC_165(VAR_216, VAR_280)
except CLASS_5.JsonRpcException as e:
return FUNC_166(VAR_216, e.code, e.info)
except:
VAR_436, VAR_437, VAR_438 = sys.exc_info()
VAR_39 = '%VAR_280: %s' % (VAR_436.__name__, VAR_437)
VAR_18 = VAR_58.is_local and traceback.format_tb(VAR_438)
VAR_2.warning('jsonrpc exception %VAR_280\VAR_182%s' % (VAR_39, traceback.format_tb(VAR_438)))
return FUNC_166(VAR_216, 100, VAR_39, VAR_18)
def FUNC_111(self, VAR_18=None, VAR_193=False):
def FUNC_165(VAR_216, VAR_226):
if not VAR_400:
return None
return serializers.json({'jsonrpc': '2.0', 'id': VAR_216, 'result': VAR_226})
def FUNC_166(VAR_216, VAR_311, VAR_39=None, VAR_18=None):
VAR_395 = {'code': VAR_311}
if VAR_311 in CLASS_5.jsonrpc_errors:
VAR_395['message'] = CLASS_5.jsonrpc_errors[VAR_311][0]
VAR_395['data'] = CLASS_5.jsonrpc_errors[VAR_311][1]
if VAR_39 is not None:
VAR_395['message'] = VAR_39
if VAR_18 is not None:
VAR_395['data'] = VAR_18
return serializers.json({'jsonrpc': '2.0', 'id': VAR_216, 'error': VAR_395})
def VAR_294(VAR_18):
VAR_396 = set(VAR_18.keys())
VAR_397 = set(['jsonrpc', 'method'])
VAR_398 = VAR_397 - VAR_396
if VAR_398:
raise CLASS_5.JsonRpcException(-32600, 'Missing arguments %VAR_280.' % list(VAR_398))
if VAR_18['jsonrpc'] != '2.0':
raise CLASS_5.JsonRpcException(-32603, 'Unsupported FUNC_99 VAR_138 "%s"' % VAR_18['jsonrpc'])
if 'id' not in VAR_396:
return False
return True
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
if not VAR_18:
VAR_246.headers['Content-Type'] = 'application/FUNC_98; charset=utf-8'
try:
VAR_18 = FUNC_98.loads(VAR_58.body.read())
except ValueError: # decoding VAR_395 in FUNC_98 lib
return FUNC_166(None, -32700)
if isinstance(VAR_18, list) and not VAR_193:
VAR_399 = []
for VAR_15 in VAR_18:
VAR_439 = self.serve_jsonrpc2(VAR_15, VAR_193=True)
if VAR_439: # do not add empty responses
VAR_399.append(VAR_439)
if len(VAR_399) == 0: # return nothing
return ''
else:
return "[" + ','.join(VAR_399) + "]"
VAR_313 = self.jsonrpc2_procedures
VAR_313.update(self.jsonrpc_procedures)
try:
VAR_400 = VAR_294(VAR_18)
except CLASS_5.JsonRpcException as e:
return FUNC_166(None, e.code, e.info)
VAR_216, VAR_304, VAR_229 = VAR_18.get('id'), VAR_18['method'], VAR_18.get('params', '')
if VAR_304 not in VAR_313:
return FUNC_166(VAR_216, -32601, VAR_18='Method "%s" does not exist' % VAR_304)
try:
if isinstance(VAR_229, dict):
VAR_280 = VAR_313[VAR_304](**VAR_229)
else:
VAR_280 = VAR_313[VAR_304](*VAR_229)
if hasattr(VAR_280, 'as_list'):
VAR_280 = VAR_280.as_list()
if VAR_400:
return FUNC_165(VAR_216, VAR_280)
else:
return ''
except HTTP as e:
raise e
except CLASS_5.JsonRpcException as e:
return FUNC_166(VAR_216, e.code, e.info)
except:
VAR_436, VAR_437, VAR_438 = sys.exc_info()
VAR_18 = '%VAR_280: %VAR_280\n' % (VAR_436.__name__, VAR_437) + str(VAR_58.is_local and traceback.format_tb(VAR_438))
VAR_2.warning('%VAR_280: %VAR_280\VAR_182%s' % (VAR_436.__name__, VAR_437, traceback.format_tb(VAR_438)))
return FUNC_166(VAR_216, -32099, VAR_18=data)
def FUNC_112(self):
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_315 = list(self.xmlrpc_procedures.values())
return VAR_246.xmlrpc(VAR_58, VAR_315)
def FUNC_113(self, VAR_138=0):
try:
import .pyamf
import .pyamf.remoting.gateway
except:
return "pyamf not installed or not in Python sys.path"
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
if VAR_138 == 3:
VAR_315 = self.amfrpc3_procedures
VAR_401 = pyamf.remoting.gateway.BaseGateway(VAR_315)
VAR_402 = pyamf.remoting.decode(VAR_58.body)
else:
VAR_315 = self.amfrpc_procedures
VAR_401 = pyamf.remoting.gateway.BaseGateway(VAR_315)
VAR_403 = pyamf.get_context(pyamf.AMF0)
VAR_402 = pyamf.remoting.decode(VAR_58.body, VAR_403)
VAR_316 = pyamf.remoting.Envelope(VAR_402.amfVersion)
for VAR_149, VAR_39 in VAR_402:
VAR_316[VAR_149] = VAR_401.getProcessor(VAR_39)(message)
VAR_246.headers['Content-Type'] = pyamf.remoting.CONTENT_TYPE
if VAR_138 == 3:
return pyamf.remoting.encode(VAR_316).getvalue()
else:
return pyamf.remoting.encode(VAR_316, VAR_403).getvalue()
def FUNC_114(self, VAR_138="1.1"):
try:
from gluon.contrib.pysimplesoap.server import SoapDispatcher
except:
return "pysimplesoap not installed in contrib"
VAR_58 = VAR_265.request
VAR_246 = VAR_265.response
VAR_317 = self.soap_procedures
VAR_318 = "%VAR_280://%VAR_280%s" % (VAR_58.env.wsgi_url_scheme,
VAR_58.env.http_host,
URL(VAR_392=VAR_58, VAR_10="call/soap", VAR_98={}))
VAR_319 = 'namespace' in VAR_246 and VAR_246.namespace or VAR_318
VAR_320 = VAR_246.description or ''
VAR_321 = SoapDispatcher(
VAR_149=VAR_246.title,
VAR_318=location,
VAR_114=VAR_318, # SOAPAction
VAR_319=namespace,
VAR_113='pys',
VAR_320=documentation,
ns=True)
for VAR_304, (VAR_106, VAR_189, VAR_11, VAR_190, resp_elem_name) in iteritems(VAR_317):
VAR_321.register_function(VAR_304, VAR_106, VAR_189, VAR_11, VAR_190, resp_elem_name)
if VAR_58.env.request_method == 'POST':
VAR_404 = {}
VAR_246.headers['Content-Type'] = 'text/xml'
VAR_405 = VAR_321.dispatch(VAR_58.body.read(), VAR_404=fault)
if VAR_404:
VAR_246.status = 500
return VAR_405
elif 'WSDL' in VAR_58.vars:
VAR_246.headers['Content-Type'] = 'text/xml'
return VAR_321.wsdl()
elif 'op' in VAR_58.vars:
VAR_246.headers['Content-Type'] = 'text/html'
VAR_304 = VAR_58.vars['op']
VAR_457, VAR_458, VAR_190 = VAR_321.help(VAR_304)
VAR_84 = [H1("Welcome VAR_37 Web2Py SOAP webservice gateway"),
A("See all webservice operations",
_href=URL(VAR_392=VAR_58, VAR_10="call/soap", VAR_98={})),
H2(VAR_304),
P(VAR_190),
UL(LI("Location: %s" % VAR_321.location),
LI("Namespace: %s" % VAR_321.namespace),
LI("SoapAction: %s" % VAR_321.action),
),
H3("Sample SOAP XML Request Message:"),
CODE(VAR_457, language="xml"),
H3("Sample SOAP XML Response Message:"),
CODE(VAR_458, language="xml"),
]
return {'body': VAR_84}
else:
VAR_246.headers['Content-Type'] = 'text/html'
VAR_84 = [H1("Welcome VAR_37 Web2Py SOAP webservice gateway"),
P(VAR_246.description),
P("The following operations are available"),
A("See WSDL for webservice description",
_href=URL(VAR_392=VAR_58, VAR_10="call/soap", VAR_98={"WSDL": None})),
UL([LI(A("%VAR_280: %s" % (VAR_304, VAR_190 or ''),
_href=URL(VAR_392=VAR_58, VAR_10="call/soap", VAR_98={'op': VAR_304})))
for VAR_304, VAR_190 in VAR_321.list_methods()]),
]
return {'body': VAR_84}
def __call__(self):
VAR_58 = VAR_265.request
if len(VAR_58.args) < 1:
raise HTTP(404, "Not Found")
VAR_322 = VAR_58.args(0)
if VAR_322 == 'run':
return self.serve_run(VAR_58.args[1:])
elif VAR_322 == 'rss':
return self.serve_rss(VAR_58.args[1:])
elif VAR_322 == 'csv':
return self.serve_csv(VAR_58.args[1:])
elif VAR_322 == 'xml':
return self.serve_xml(VAR_58.args[1:])
elif VAR_322 == 'json':
return self.serve_json(VAR_58.args[1:])
elif VAR_322 == 'jsonrpc':
return self.serve_jsonrpc()
elif VAR_322 == 'jsonrpc2':
return self.serve_jsonrpc2()
elif VAR_322 == 'xmlrpc':
return self.serve_xmlrpc()
elif VAR_322 == 'amfrpc':
return self.serve_amfrpc()
elif VAR_322 == 'amfrpc3':
return self.serve_amfrpc(3)
elif VAR_322 == 'soap':
return self.serve_soap()
else:
self.error()
def VAR_395(self):
raise HTTP(404, "Object does not exist")
def FUNC_116(self, VAR_10, *VAR_9, **VAR_14):
if self.check_args:
return FUNC_10(VAR_10, *VAR_9, **VAR_14)
else:
return VAR_10(*VAR_9, **VAR_14)
def FUNC_11(VAR_27):
def FUNC_117(VAR_10):
def FUNC_168(*VAR_9, **VAR_14):
VAR_28 = None
try:
VAR_28 = VAR_10(*VAR_9, **VAR_14)
return VAR_28
finally:
thread.start_new_thread(VAR_27, (VAR_28,))
return FUNC_168
return FUNC_117
def FUNC_12(VAR_28, VAR_29=lambda VAR_31: VAR_31, VAR_30=False):
VAR_194 = datetime.datetime.utcnow() if VAR_30 else datetime.datetime.now()
if isinstance(VAR_28, datetime.datetime):
VAR_323 = VAR_194 - VAR_28
elif isinstance(VAR_28, datetime.date):
VAR_323 = VAR_194.date() - VAR_28
elif not VAR_28:
return ''
else:
return '[invalid date]'
if VAR_323.days < 0:
VAR_324 = ' from now'
VAR_323 = -dt
else:
VAR_324 = ' ago'
if VAR_323.days >= 2 * 365:
return VAR_29('%VAR_28 years' + VAR_324) % int(VAR_323.days // 365)
elif VAR_323.days >= 365:
return VAR_29('1 year' + VAR_324)
elif VAR_323.days >= 60:
return VAR_29('%VAR_28 months' + VAR_324) % int(VAR_323.days // 30)
elif VAR_323.days >= 27: # 4 weeks ugly
return VAR_29('1 month' + VAR_324)
elif VAR_323.days >= 14:
return VAR_29('%VAR_28 weeks' + VAR_324) % int(VAR_323.days // 7)
elif VAR_323.days >= 7:
return VAR_29('1 week' + VAR_324)
elif VAR_323.days > 1:
return VAR_29('%VAR_28 days' + VAR_324) % VAR_323.days
elif VAR_323.days == 1:
return VAR_29('1 day' + VAR_324)
elif VAR_323.seconds >= 2 * 60 * 60:
return VAR_29('%VAR_28 hours' + VAR_324) % int(VAR_323.seconds // 3600)
elif VAR_323.seconds >= 60 * 60:
return VAR_29('1 hour' + VAR_324)
elif VAR_323.seconds >= 2 * 60:
return VAR_29('%VAR_28 minutes' + VAR_324) % int(VAR_323.seconds // 60)
elif VAR_323.seconds >= 60:
return VAR_29('1 minute' + VAR_324)
elif VAR_323.seconds > 1:
return VAR_29('%VAR_28 seconds' + VAR_324) % VAR_323.seconds
elif VAR_323.seconds == 1:
return VAR_29('1 second' + VAR_324)
else:
return VAR_29('now')
def FUNC_13():
def VAR_10():
VAR_15 = CLASS_6()
VAR_195.acquire()
VAR_196.acquire()
VAR_15.x = 7
VAR_195.release()
VAR_196.release()
VAR_195 = thread.allocate_lock()
VAR_196 = thread.allocate_lock()
VAR_195.acquire()
thread.start_new_thread(VAR_10, ())
VAR_9 = CLASS_6()
VAR_9.x = 5
VAR_195.release()
VAR_196.acquire()
return VAR_9.x
class CLASS_6(object):
VAR_197 = {}
def __new__(VAR_198, *VAR_9, **VAR_14):
VAR_216 = thread.get_ident()
VAR_325 = thread.allocate_lock()
try:
VAR_325.acquire()
try:
return VAR_198.instances[VAR_216]
except KeyError:
VAR_459 = object.__new__(VAR_198, *VAR_9, **VAR_14)
VAR_198.instances[VAR_216] = VAR_459
return VAR_459
finally:
VAR_325.release()
def __init__(self, VAR_199=None, **VAR_183):
if not VAR_199:
self.__dict__.clear()
VAR_224 = self.__getattr__(VAR_199)
VAR_224.installed = True
VAR_224.update(
(VAR_348, v) for VAR_348, v in VAR_183.items() if VAR_348 not in VAR_224)
def __getattr__(self, VAR_200):
if VAR_200 not in self.__dict__:
self.__dict__[VAR_200] = Storage()
return self.__dict__[VAR_200]
def VAR_128(self):
return list(self.__dict__.keys())
def __contains__(self, VAR_200):
return VAR_200 in self.__dict__
class CLASS_7(object):
def __init__(self, VAR_201=None, VAR_202=None, VAR_203=None,
VAR_204=True, VAR_205=False):
self.follow_symlink_out = VAR_205
self.base = self.normalize_path(
VAR_201 or VAR_419.path.join(VAR_265.request.folder, 'static'))
self.basename = VAR_202 or VAR_265.request.function
self.base = VAR_201 = VAR_419.path.realpath(VAR_201 or VAR_419.path.join(VAR_265.request.folder, 'static'))
VAR_202 = VAR_202 or VAR_265.request.function
self.basename = VAR_202
if VAR_265.request.raw_args:
self.args = [arg for arg in VAR_265.request.raw_args.split('/') if arg]
else:
self.args = [arg for arg in VAR_265.request.args if arg]
VAR_96 = VAR_419.path.join(self.base, *self.args)
if not VAR_419.path.exists(VAR_96):
raise HTTP(404, "FILE NOT FOUND")
if not self.in_base(VAR_96):
raise HTTP(401, "NOT AUTHORIZED")
if VAR_204 and not VAR_419.path.isdir(VAR_96):
VAR_265.response.headers['Content-Type'] = contenttype(VAR_96)
raise HTTP(200, open(VAR_96, 'rb'), **VAR_265.response.headers)
self.path = VAR_326 = VAR_419.path.join(VAR_96, '*')
VAR_327 = len(VAR_326) - 1
VAR_328 = [VAR_10 for VAR_10 in sorted(glob.glob(VAR_326))
if not any([self.isprivate(VAR_10), self.issymlink_out(VAR_10)])]
self.folders = [VAR_10[VAR_327:]
for VAR_10 in VAR_328 if VAR_419.path.isdir(VAR_10)]
self.filenames = [VAR_10[VAR_327:]
for VAR_10 in VAR_328 if not VAR_419.path.isdir(VAR_10)]
if 'README' in self.filenames:
with open(VAR_419.path.join(VAR_96, 'README')) as VAR_10:
VAR_440 = VAR_10.read()
self.paragraph = MARKMIN(VAR_440)
else:
self.paragraph = None
if VAR_203:
self.filenames = [VAR_10 for VAR_10 in self.filenames
if VAR_419.path.splitext(VAR_10)[-1] in VAR_203]
def FUNC_120(self, VAR_202):
VAR_326 = []
VAR_329 = SPAN()
VAR_329.append(A(VAR_202, _href=URL()))
for arg in self.args:
VAR_329.append('/')
VAR_326.append(arg)
VAR_329.append(A(arg, _href=URL(VAR_11='/'.join(VAR_326))))
return VAR_329
def FUNC_121(self):
if self.folders:
return SPAN(H3('Folders'),
TABLE(*[TR(TD(A(folder, _href=URL(VAR_11=self.args + [folder]))))
for folder in self.folders], **dict(_class="table")))
return ''
@staticmethod
def FUNC_122(VAR_206, VAR_207, VAR_208=VAR_419.path.sep):
VAR_280 = lambda VAR_10: '%VAR_280%s' % (VAR_10.rstrip(VAR_208), sep) # VAR_10 -> FUNC_118/
return VAR_280(VAR_206).startswith(VAR_280(VAR_207))
def FUNC_123(self, VAR_10):
return self.__in_base(self.normalize_path(VAR_10), self.base)
def FUNC_124(self, VAR_10):
if self.follow_symlink_out:
return VAR_419.path.normpath(VAR_10)
else:
return VAR_419.path.realpath(VAR_10)
def FUNC_125(self, VAR_10):
return VAR_419.path.islink(VAR_10) and not self.in_base(VAR_10)
@staticmethod
def FUNC_126(VAR_10):
if VAR_10.startswith('/private/'):
VAR_10 = FUNC_118[8:]
return 'private' in VAR_10 or VAR_10.startswith('.') or VAR_10.endswith('~')
@staticmethod
def FUNC_127(VAR_10):
return VAR_419.path.splitext(VAR_10)[-1].lower() in (
'.png', '.jpg', '.jpeg', '.gif', '.tiff')
def FUNC_128(self, VAR_209=160):
if self.filenames:
return SPAN(H3('Files'),
TABLE(*[TR(TD(A(VAR_10, _href=URL(VAR_11=self.args + [VAR_10]))),
TD(IMG(_src=URL(VAR_11=self.args + [VAR_10]),
_style='max-VAR_209:%spx' % VAR_209)
if VAR_209 and self.isimage(VAR_10) else ''))
for VAR_10 in self.filenames], **dict(_class="table")))
return ''
def VAR_405(self):
return DIV(
H2(self.breadcrumbs(self.basename)),
self.paragraph or '',
self.table_folders(),
self.table_files()).xml()
class CLASS_8(object):
VAR_210 = 'everybody'
VAR_211 = 25
def FUNC_129(self, VAR_84):
return MARKMIN(VAR_84, VAR_164=self.settings.extra,
VAR_12=True, VAR_101=self.env,
autolinks=lambda VAR_278: expand_one(VAR_278, {})).xml()
def FUNC_130(self, VAR_212):
return DIV(
_class='w2p_wiki_tags',
*[A(VAR_446.strip(), _href=URL(VAR_11='_search', VAR_98=dict(q=VAR_446)))
for VAR_446 in VAR_212 or [] if VAR_446.strip()])
def FUNC_131(self, VAR_213):
return self.markmin_base(VAR_213.body) + self.render_tags(VAR_213.tags).xml()
def FUNC_132(self, VAR_213):
VAR_305 = VAR_213.body
VAR_305 = replace_at_urls(VAR_305, URL)
VAR_305 = replace_autolinks(VAR_305, lambda VAR_278: expand_one(VAR_278, {}))
VAR_305 = replace_components(VAR_305, self.env)
VAR_305 = VAR_305 + self.render_tags(VAR_213.tags).xml()
return VAR_305
@staticmethod
def FUNC_133(VAR_214):
VAR_252 = VAR_214.split('/')
VAR_105, VAR_106, VAR_11 = VAR_252[0], VAR_252[1], VAR_252[2:]
return LOAD(VAR_105, VAR_106, VAR_11=args, ajax=True).xml()
def FUNC_134(self):
if isinstance(self.settings.render, basestring):
VAR_392 = getattr(self, "%s_render" % self.settings.render)
elif callable(self.settings.render):
VAR_392 = self.settings.render
elif isinstance(self.settings.render, dict):
def FUNC_174(VAR_213):
if VAR_213.render:
if VAR_213.render in self.settings.render.keys():
VAR_473 = self.settings.render[VAR_213.render]
else:
VAR_473 = getattr(self, "%s_render" % VAR_213.render)
else:
VAR_473 = self.markmin_render
return VAR_473(VAR_213)
VAR_392 = FUNC_174
else:
raise ValueError(
"Invalid VAR_159 type %s" % type(self.settings.render))
return VAR_392
def __init__(self, VAR_65, VAR_158=None, VAR_159='markmin',
VAR_160=False, VAR_161='',
VAR_162=False, VAR_164=None,
VAR_165=None, VAR_166=None, VAR_126=True,
VAR_105=None, VAR_106=None, VAR_168=None):
VAR_224 = self.settings = VAR_65.settings.wiki
"""
Args:
VAR_159:
- "markmin"
- "html"
- `<VAR_106>` : Sets VAR_9 custom VAR_159 VAR_106
- `dict(VAR_305=<VAR_106>, markmin=...)`: dict(...) allows
multiple custom VAR_159 functions
- "multiple" : Is the same as `{}`. It enables per-VAR_169
formats using builtins
"""
VAR_330 = set(['markmin', 'html'])
VAR_331 = False
if VAR_159 == "multiple":
VAR_159 = {}
if isinstance(VAR_159, dict):
[VAR_330.add(VAR_200) for VAR_200 in VAR_159]
VAR_331 = True
VAR_224.render = VAR_159
VAR_332 = VAR_224.manage_permissions = VAR_160
VAR_224.force_prefix = VAR_161
VAR_224.restrict_search = VAR_162
VAR_224.extra = VAR_164 or {}
VAR_224.menu_groups = VAR_165
VAR_224.templates = VAR_166
VAR_224.controller = VAR_105
VAR_224.function = VAR_106
VAR_224.groups = list(VAR_65.user_groups.values()) \
if VAR_168 is None else VAR_168
VAR_102 = VAR_65.db
self.env = VAR_158 or {}
self.env['component'] = CLASS_8.component
self.auth = VAR_65
self.wiki_menu_items = None
if self.auth.user:
self.settings.force_prefix = VAR_161 % self.auth.user
else:
self.settings.force_prefix = VAR_161
self.host = VAR_265.request.env.http_host
VAR_333 = [
('wiki_page', {
'args': [
VAR_1('slug',
VAR_281=[IS_SLUG(),
IS_NOT_IN_DB(VAR_102, 'wiki_page.slug')],
writable=False),
VAR_1('title', length=255, unique=True),
VAR_1('body', 'text', notnull=True),
VAR_1('tags', 'list:string'),
VAR_1('can_read', 'list:string',
writable=VAR_332,
readable=VAR_332,
VAR_5=[CLASS_8.everybody]),
VAR_1('can_edit', 'list:string',
writable=VAR_332, readable=VAR_332,
VAR_5=[CLASS_8.everybody]),
VAR_1('changelog'),
VAR_1('html', 'text',
compute=self.get_renderer(),
readable=False, writable=False),
VAR_1('render', VAR_5="markmin",
readable=VAR_331,
writable=VAR_331,
VAR_281=IS_EMPTY_OR(
IS_IN_SET(VAR_330))),
VAR_65.signature],
'vars': {'format': '%(VAR_442)s', 'migrate': VAR_126}}),
('wiki_tag', {
'args': [
VAR_1('name'),
VAR_1('wiki_page', 'reference wiki_page'),
VAR_65.signature],
'vars':{'format': '%(VAR_442)s', 'migrate': VAR_126}}),
('wiki_media', {
'args': [
VAR_1('wiki_page', 'reference wiki_page'),
VAR_1('title', VAR_93=True),
VAR_1('filename', 'upload', VAR_93=True),
VAR_65.signature],
'vars': {'format': '%(VAR_442)s', 'migrate': VAR_126}}),
]
for VAR_200, VAR_180 in VAR_333:
VAR_11 = []
if VAR_200 not in VAR_102.tables():
VAR_361 = VAR_65.settings.extra_fields
if VAR_361:
if VAR_200 in VAR_361:
if VAR_361[VAR_200]:
for VAR_178 in VAR_361[VAR_200]:
VAR_11.append(VAR_178)
VAR_11 += VAR_180['args']
VAR_102.define_table(VAR_200, *VAR_11, **VAR_180['vars'])
if self.settings.templates is None and not self.settings.manage_permissions:
self.settings.templates = \
VAR_102.wiki_page.tags.contains('template') & VAR_102.wiki_page.can_read.contains('everybody')
def FUNC_169(VAR_213, VAR_216, VAR_102=db):
for VAR_441 in VAR_213.tags or []:
VAR_441 = tag.strip().lower()
if VAR_441:
VAR_102.wiki_tag.insert(VAR_149=VAR_441, wiki_page=VAR_216)
def FUNC_170(VAR_334, VAR_213, VAR_102=db):
VAR_213 = VAR_334.select(VAR_176=(0, 1)).first()
VAR_102(VAR_102.wiki_tag.wiki_page == VAR_213.id).delete()
for VAR_441 in VAR_213.tags or []:
VAR_441 = tag.strip().lower()
if VAR_441:
VAR_102.wiki_tag.insert(VAR_149=VAR_441, wiki_page=VAR_213.id)
VAR_102.wiki_page._after_insert.append(FUNC_169)
VAR_102.wiki_page._after_update.append(FUNC_170)
if (VAR_65.user and
check_credentials(VAR_265.request, gae_login=False) and
'wiki_editor' not in VAR_65.user_groups.values() and
self.settings.groups == list(VAR_65.user_groups.values())):
VAR_406 = VAR_102.auth_group(VAR_147='wiki_editor')
VAR_407 = VAR_406.id if VAR_406 else VAR_102.auth_group.insert(
VAR_147='wiki_editor')
VAR_65.add_membership(VAR_407)
VAR_224.lock_keys = True
def FUNC_72(self, VAR_213=None):
raise HTTP(401)
def FUNC_135(self, VAR_213):
if 'everybody' in VAR_213.can_read or not self.settings.manage_permissions:
return True
elif self.auth.user:
VAR_168 = self.settings.groups
if ('wiki_editor' in VAR_168 or
set(VAR_168).intersection(set(VAR_213.can_read + VAR_213.can_edit)) or
VAR_213.created_by == self.auth.user.id):
return True
return False
def FUNC_136(self, VAR_213=None):
if not self.auth.user:
redirect(self.auth.settings.login_url)
VAR_168 = self.settings.groups
return ('wiki_editor' in VAR_168 or
(VAR_213 is None and 'wiki_author' in VAR_168) or
VAR_213 is not None and (set(VAR_168).intersection(set(VAR_213.can_edit)) or
VAR_213.created_by == self.auth.user.id))
def FUNC_137(self):
if not self.auth.user:
return False
VAR_168 = self.settings.groups
return 'wiki_editor' in VAR_168
def FUNC_138(self):
return True
def FUNC_139(self):
if self.auth.user:
if self.settings.menu_groups is None:
return True
else:
VAR_168 = self.settings.groups
if any(VAR_446 in self.settings.menu_groups for VAR_446 in VAR_168):
return True
return False
def FUNC_140(self):
if (not self.wiki_menu_items and self.settings.controller and self.settings.function):
self.wiki_menu_items = self.menu(self.settings.controller,
self.settings.function)
VAR_265.response.menu += self.wiki_menu_items
def __call__(self):
VAR_58 = VAR_265.request
VAR_224 = self.settings
VAR_224.controller = VAR_224.controller or VAR_58.controller
VAR_224.function = VAR_224.function or VAR_58.function
self.automenu()
VAR_301 = VAR_58.args(0) or 'index'
if VAR_301 and VAR_301.isdigit():
return self.media(int(VAR_301))
elif not VAR_301 or not VAR_301.startswith('_'):
return self.read(VAR_301)
elif VAR_301 == '_edit':
return self.edit(VAR_58.args(1) or 'index', VAR_58.args(2) or 0)
elif VAR_301 == '_editmedia':
return self.editmedia(VAR_58.args(1) or 'index')
elif VAR_301 == '_create':
return self.create()
elif VAR_301 == '_pages':
return self.pages()
elif VAR_301 == '_search':
return self.search()
elif VAR_301 == '_recent':
VAR_474 = int(VAR_58.vars.page or 0)
VAR_174 = self.auth.db.wiki_page.created_by == VAR_58.args(
1, cast=int)
return self.search(VAR_174=query,
VAR_175=~self.auth.db.wiki_page.created_on,
VAR_176=(VAR_474 * self.rows_page,
(VAR_474 + 1) * self.rows_page),
)
elif VAR_301 == '_cloud':
return self.cloud()
elif VAR_301 == '_preview':
return self.preview(self.get_renderer())
def FUNC_141(self, VAR_213):
if not self.can_read(VAR_213):
VAR_408 = (VAR_213.body or '').replace('\r', '')
VAR_409 = [VAR_447 for VAR_447 in VAR_408.split('\VAR_182\n') if not VAR_447.startswith('#') and VAR_447.strip()]
if VAR_409:
return VAR_409[0]
return ''
def FUNC_142(self, VAR_84):
return (VAR_84 or '').replace('://HOSTNAME', '://%s' % self.host)
def FUNC_88(self, VAR_157, VAR_167=False):
if VAR_157 in '_cloud':
return self.cloud()
elif VAR_157 in '_search':
return self.search()
VAR_213 = self.auth.db.wiki_page(VAR_157=slug)
if VAR_213 and (not self.can_read(VAR_213)):
return self.not_authorized(VAR_213)
if VAR_265.request.extension == 'html':
if not VAR_213:
VAR_12 = URL(VAR_11=('_create', VAR_157))
return dict(VAR_231=A('Create VAR_213 "%s"' % VAR_157, _href=VAR_12, _class="btn"))
else:
VAR_305 = VAR_213.html if not VAR_167 else self.get_renderer()(VAR_213)
VAR_231 = XML(self.fix_hostname(VAR_305))
return dict(VAR_442=VAR_213.title,
VAR_157=VAR_213.slug,
VAR_213=page,
VAR_231=content,
VAR_212=VAR_213.tags,
created_on=VAR_213.created_on,
modified_on=VAR_213.modified_on)
elif VAR_265.request.extension == 'load':
return self.fix_hostname(VAR_213.html) if VAR_213 else ''
else:
if not VAR_213:
raise HTTP(404)
else:
return dict(VAR_442=VAR_213.title,
VAR_157=VAR_213.slug,
VAR_213=page,
VAR_231=VAR_213.body,
VAR_212=VAR_213.tags,
created_on=VAR_213.created_on,
modified_on=VAR_213.modified_on)
def FUNC_143(self, VAR_157, VAR_215=0):
VAR_65 = self.auth
VAR_102 = VAR_65.db
VAR_213 = VAR_102.wiki_page(VAR_157=slug)
if not self.can_edit(VAR_213):
return self.not_authorized(VAR_213)
VAR_335 = ' '.join(VAR_15.capitalize() for VAR_15 in VAR_157.split('-'))
if not VAR_213:
if not (self.can_manage() or
VAR_157.startswith(self.settings.force_prefix)):
VAR_265.session.flash = 'slug must have "%s" prefix' \
% self.settings.force_prefix
redirect(URL(VAR_11=('_create')))
VAR_102.wiki_page.can_read.default = [CLASS_8.everybody]
VAR_102.wiki_page.can_edit.default = [VAR_65.user_group_role()]
VAR_102.wiki_page.title.default = VAR_335
VAR_102.wiki_page.slug.default = VAR_157
if VAR_157 == 'wiki-menu':
VAR_102.wiki_page.body.default = \
'- Menu Item > @////index\VAR_182- - Submenu > http://web2py.com'
else:
VAR_102.wiki_page.body.default = VAR_102(VAR_102.wiki_page.id == VAR_215).select(VAR_102.wiki_page.body)[0].body \
if int(VAR_215) > 0 else '## %VAR_280\VAR_182\npage content' % VAR_335
VAR_98 = VAR_265.request.post_vars
if VAR_98.body:
VAR_98.body = VAR_98.body.replace('://%s' % self.host, '://HOSTNAME')
VAR_7 = SQLFORM(VAR_102.wiki_page, VAR_213, VAR_171=True,
VAR_275='table2cols', showid=False).process()
if VAR_7.deleted:
VAR_265.session.flash = 'page deleted'
redirect(URL())
elif VAR_7.accepted:
VAR_265.session.flash = 'page created'
redirect(URL(VAR_11=VAR_157))
VAR_336 = """
jQuery(VAR_106() {
if (!jQuery('#wiki_page_body').length) return;
var pagecontent = jQuery('#wiki_page_body');
pagecontent.css('font-family',
'Monaco,Menlo,Consolas,"Courier New",monospace');
var prevbutton = jQuery('<button class="btn nopreview">Preview</button>');
var VAR_218 = jQuery('<div VAR_216="preview"></div>').hide();
var previewmedia = jQuery('<div VAR_216="previewmedia"></div>');
var VAR_7 = pagecontent.closest('form');
VAR_218.insertBefore(VAR_7);
prevbutton.insertBefore(VAR_7);
if(%(link_media)VAR_280) {
var mediabutton = jQuery('<button class="btn nopreview">Media</button>');
mediabutton.insertBefore(VAR_7);
previewmedia.insertBefore(VAR_7);
mediabutton.click(VAR_106() {
if (mediabutton.hasClass('nopreview')) {
web2py_component('%(urlmedia)s', 'previewmedia');
} else {
previewmedia.empty();
}
mediabutton.toggleClass('nopreview');
});
}
prevbutton.click(VAR_106(e) {
e.preventDefault();
if (prevbutton.hasClass('nopreview')) {
prevbutton.addClass('preview').removeClass(
'nopreview').html('Edit Source');
try{var wiki_render = jQuery('#wiki_page_render').val()}
catch(e){var wiki_render = null;}
web2py_ajax_page('post', \
'%(VAR_12)s', {VAR_84: jQuery('#wiki_page_body').val(), \
VAR_159: wiki_render}, 'preview');
VAR_7.fadeOut('fast', VAR_106() {VAR_218.fadeIn()});
} else {
prevbutton.addClass(
'nopreview').removeClass('preview').html('Preview');
VAR_218.fadeOut('fast', VAR_106() {VAR_7.fadeIn()});
}
})
})
""" % dict(VAR_12=URL(VAR_11=('_preview', VAR_157)), link_media=('true' if VAR_213 else 'false'),
urlmedia=URL(extension='load',
VAR_11=('_editmedia', VAR_157),
VAR_98=dict(embedded=1)))
return dict(VAR_231=TAG[''](VAR_7, SCRIPT(VAR_336)))
def FUNC_144(self, VAR_157):
VAR_65 = self.auth
VAR_102 = VAR_65.db
VAR_213 = VAR_102.wiki_page(VAR_157=slug)
if not (VAR_213 and self.can_edit(VAR_213)):
return self.not_authorized(VAR_213)
self.auth.db.wiki_media.id.represent = lambda VAR_216, VAR_268: \
VAR_216 if not VAR_268.filename else \
SPAN('@////%i/%VAR_280.%s' % (VAR_216, IS_SLUG.urlify(VAR_268.title.split('.')[0]), VAR_268.filename.split('.')[-1]))
self.auth.db.wiki_media.wiki_page.default = VAR_213.id
self.auth.db.wiki_media.wiki_page.writable = False
VAR_337 = []
VAR_338 = True
VAR_339 = True
if VAR_265.request.vars.embedded:
VAR_336 = "var VAR_15 = jQuery('#wiki_page_body'); VAR_15.val(VAR_15.val() + jQuery('%s').text()); return false;"
VAR_410 = self.auth.db.wiki_media.id.represent
VAR_338 = False
VAR_339 = False
VAR_337 = [lambda VAR_268: A('copy into source', _href='#', _onclick=VAR_336 % (VAR_410(VAR_268.id, VAR_268)))]
VAR_231 = SQLFORM.grid(
self.auth.db.wiki_media.wiki_page == VAR_213.id,
VAR_175=self.auth.db.wiki_media.title,
VAR_337=links,
VAR_338=FUNC_96,
VAR_339=FUNC_87,
VAR_11=['_editmedia', VAR_157],
user_signature=False)
return dict(VAR_231=content)
def VAR_339(self):
if not self.can_edit():
return self.not_authorized()
VAR_102 = self.auth.db
VAR_340 = VAR_102(VAR_102.wiki_page.id > 0).select(VAR_102.wiki_page.id, VAR_102.wiki_page.slug)
VAR_63 = [OPTION(VAR_268.slug, _value=VAR_268.id) for VAR_268 in VAR_340]
VAR_63.insert(0, OPTION('', _value=''))
VAR_133 = [VAR_1("slug", VAR_5=VAR_265.request.args(1) or
self.settings.force_prefix,
VAR_281=(IS_SLUG(), IS_NOT_IN_DB(VAR_102, VAR_102.wiki_page.slug))), ]
if self.settings.templates:
VAR_133.append(
VAR_1("from_template", "reference wiki_page",
VAR_281=IS_EMPTY_OR(IS_IN_DB(VAR_102(self.settings.templates), VAR_102.wiki_page._id, '%(VAR_157)s')),
VAR_64=VAR_265.T("Choose Template or empty for new Page")))
VAR_7 = SQLFORM.factory(*VAR_133, **dict(_class="well"))
VAR_7.element("[type=submit]").attributes["_value"] = \
VAR_265.T("Create Page from Slug")
if VAR_7.process().accepted:
VAR_7.vars.from_template = 0 if not VAR_7.vars.from_template else VAR_7.vars.from_template
redirect(URL(VAR_11=('_edit', VAR_7.vars.slug, VAR_7.vars.from_template or 0))) # added param
return dict(VAR_231=VAR_7)
def VAR_415(self):
if not self.can_manage():
return self.not_authorized()
self.auth.db.wiki_page.slug.represent = lambda VAR_157, VAR_268: SPAN(
'@////%s' % VAR_157)
self.auth.db.wiki_page.title.represent = lambda VAR_442, VAR_268: \
A(VAR_442, _href=URL(VAR_11=VAR_268.slug))
VAR_341 = self.auth.db.wiki_page
VAR_231 = SQLFORM.grid(
VAR_341,
VAR_133=[VAR_341.slug,
VAR_341.title, VAR_341.tags,
VAR_341.can_read, VAR_341.can_edit],
VAR_337=[
lambda VAR_268:
A('edit', _href=URL(VAR_11=('_edit', VAR_268.slug)), _class='btn'),
lambda VAR_268:
A('media', _href=URL(VAR_11=('_editmedia', VAR_268.slug)), _class='btn')],
details=False, editable=False, VAR_171=False, VAR_339=False,
VAR_175=self.auth.db.wiki_page.title,
VAR_11=['_pages'],
user_signature=False)
return dict(VAR_231=content)
def VAR_342(self, VAR_216):
VAR_58, VAR_246, VAR_102 = VAR_265.request, VAR_265.response, self.auth.db
VAR_342 = VAR_102.wiki_media(VAR_216)
if VAR_342:
if self.settings.manage_permissions:
VAR_213 = VAR_102.wiki_page(VAR_342.wiki_page)
if not self.can_read(VAR_213):
return self.not_authorized(VAR_213)
VAR_58.args = [VAR_342.filename]
VAR_411 = VAR_246.download(VAR_58, VAR_102)
VAR_265.session.forget() # VAR_130 rid of the VAR_20
VAR_246.headers['Last-Modified'] = \
VAR_58.utcnow.strftime("%VAR_9, %VAR_28 %VAR_14 %Y %H:%M:%S GMT")
if 'Content-Disposition' in VAR_246.headers:
del VAR_246.headers['Content-Disposition']
VAR_246.headers['Pragma'] = 'cache'
VAR_246.headers['Cache-Control'] = 'private'
return VAR_411
else:
raise HTTP(404)
def VAR_344(self, VAR_105='default', VAR_106='index'):
VAR_102 = self.auth.db
VAR_58 = VAR_265.request
VAR_343 = VAR_102.wiki_page(VAR_157='wiki-menu')
VAR_344 = []
if VAR_343:
VAR_412 = {'': VAR_344}
VAR_413 = re.compile('[\VAR_392\VAR_182\VAR_446]*(?P<VAR_201>(\VAR_280*\-\s*)+)(?P<VAR_442>\w.*?)\VAR_280+\>\s+(?P<VAR_278>\S+)')
for match in VAR_413.finditer(self.fix_hostname(VAR_343.body)):
VAR_201 = match.group('base').replace(' ', '')
VAR_442 = match.group('title')
VAR_278 = match.group('link')
VAR_443 = None
if VAR_278.startswith('@'):
VAR_252 = VAR_278[2:].split('/')
if len(VAR_252) > 3:
VAR_443 = VAR_252[3]
VAR_278 = URL(VAR_9=VAR_252[0] or None, VAR_15=VAR_252[1] or VAR_105,
VAR_10=VAR_252[2] or VAR_106, VAR_11=VAR_252[3:])
VAR_444 = VAR_412.get(VAR_201[1:], VAR_412[''])
VAR_445 = []
VAR_412[VAR_201] = VAR_445
VAR_444.append((VAR_265.T(VAR_442),
VAR_58.args(0) == VAR_443,
VAR_278, VAR_445))
if self.can_see_menu():
VAR_414 = []
VAR_344.append((VAR_265.T('[CLASS_8]'), None, None, VAR_414))
if URL() == URL(VAR_105, VAR_106):
if not str(VAR_58.args(0)).startswith('_'):
VAR_157 = VAR_58.args(0) or 'index'
VAR_118 = 1
elif VAR_58.args(0) == '_edit':
VAR_157 = VAR_58.args(1) or 'index'
VAR_118 = 2
elif VAR_58.args(0) == '_editmedia':
VAR_157 = VAR_58.args(1) or 'index'
VAR_118 = 3
else:
VAR_118 = 0
if VAR_118 in (2, 3):
VAR_414.append((VAR_265.T('View Page'), None,
URL(VAR_105, VAR_106, VAR_11=VAR_157)))
if VAR_118 in (1, 3):
VAR_414.append((VAR_265.T('Edit Page'), None,
URL(VAR_105, VAR_106, VAR_11=('_edit', VAR_157))))
if VAR_118 in (1, 2):
VAR_414.append((VAR_265.T('Edit Page Media'), None,
URL(VAR_105, VAR_106, VAR_11=('_editmedia', VAR_157))))
VAR_414.append((VAR_265.T('Create New Page'), None,
URL(VAR_105, VAR_106, VAR_11=('_create'))))
if self.can_manage():
VAR_414.append((VAR_265.T('Manage Pages'), None,
URL(VAR_105, VAR_106, VAR_11=('_pages'))))
VAR_414.append((VAR_265.T('Edit Menu'), None,
URL(VAR_105, VAR_106, VAR_11=('_edit', 'wiki-menu'))))
VAR_414.append((VAR_265.T('Search Pages'), None,
URL(VAR_105, VAR_106, VAR_11=('_search'))))
return VAR_344
def FUNC_94(self, VAR_212=None, VAR_174=None, VAR_217=True, VAR_218=True,
VAR_176=(0, 100), VAR_175=None):
if not self.can_search():
return self.not_authorized()
VAR_58 = VAR_265.request
VAR_231 = CAT()
if VAR_212 is None and VAR_174 is None:
VAR_7 = FORM(INPUT(_name='q', VAR_281=IS_NOT_EMPTY(),
VAR_180=VAR_58.vars.q),
INPUT(_type="submit", _value=VAR_265.T('Search')),
_method='GET')
VAR_231.append(DIV(VAR_7, _class='w2p_wiki_form'))
if VAR_58.vars.q:
VAR_212 = [v.strip() for v in VAR_58.vars.q.split(',')]
VAR_212 = [v.lower() for v in VAR_212 if v]
if VAR_212 or VAR_174 is not None:
VAR_102 = self.auth.db
VAR_345 = VAR_102.wiki_tag.wiki_page.count()
VAR_133 = [VAR_102.wiki_page.id, VAR_102.wiki_page.slug,
VAR_102.wiki_page.title, VAR_102.wiki_page.tags,
VAR_102.wiki_page.can_read, VAR_102.wiki_page.can_edit]
if VAR_218:
VAR_133.append(VAR_102.wiki_page.body)
if VAR_174 is None:
VAR_174 = (VAR_102.wiki_page.id == VAR_102.wiki_tag.wiki_page) &\
(VAR_102.wiki_tag.name.belongs(VAR_212))
VAR_174 = VAR_174 | VAR_102.wiki_page.title.contains(VAR_58.vars.q)
if self.settings.restrict_search and not self.can_manage():
VAR_174 = VAR_174 & (VAR_102.wiki_page.created_by == self.auth.user_id)
VAR_415 = VAR_102(VAR_174).select(VAR_345,
*VAR_133, **dict(VAR_175=orderby or ~VAR_345,
groupby=reduce(lambda VAR_9, VAR_14: VAR_9 | VAR_14, VAR_133),
distinct=True,
VAR_176=limitby))
if VAR_58.extension in ('html', 'load'):
if not VAR_415:
VAR_231.append(DIV(VAR_265.T("No results"),
_class='w2p_wiki_form'))
def VAR_278(VAR_446):
return A(VAR_446, _href=URL(VAR_11='_search', VAR_98=dict(q=VAR_446)))
VAR_252 = [DIV(H3(A(VAR_447.wiki_page.title, _href=URL(
VAR_11=VAR_447.wiki_page.slug))),
MARKMIN(self.first_paragraph(VAR_447.wiki_page))
if VAR_218 else '',
DIV(_class='w2p_wiki_tags',
*[VAR_278(VAR_446.strip()) for VAR_446 in
VAR_447.wiki_page.tags or [] if VAR_446.strip()]),
_class='w2p_wiki_search_item')
for VAR_447 in VAR_415]
VAR_231.append(DIV(_class='w2p_wiki_pages', *VAR_252))
else:
VAR_217 = False
VAR_231 = [VAR_447.wiki_page.as_dict() for VAR_447 in VAR_415]
elif VAR_217:
VAR_231.append(self.cloud()['content'])
if VAR_58.extension == 'load':
return VAR_231
return dict(VAR_231=content)
def VAR_217(self):
VAR_102 = self.auth.db
VAR_345 = VAR_102.wiki_tag.wiki_page.count(distinct=True)
VAR_346 = VAR_102(VAR_102.wiki_tag).select(
VAR_102.wiki_tag.name, VAR_345,
distinct=True,
groupby=VAR_102.wiki_tag.name,
VAR_175=~VAR_345, VAR_176=(0, 20))
if VAR_346:
VAR_9, VAR_14 = VAR_346[0](VAR_345), VAR_346[-1](VAR_345)
def VAR_16(VAR_15):
VAR_416 = 'padding:0 0.2em;line-height:%.2fem;font-VAR_417:%.2fem'
VAR_417 = (1.5 * (VAR_15 - VAR_14) / max(VAR_9 - VAR_14, 1) + 1.3)
return VAR_416 % (1.3, VAR_417)
VAR_252 = []
for VAR_307 in VAR_346:
VAR_252.append(A(VAR_307.wiki_tag.name,
_style=VAR_16(VAR_307(VAR_345)),
_href=URL(VAR_11='_search',
VAR_98=dict(q=VAR_307.wiki_tag.name))))
VAR_252.append(' ')
return dict(VAR_231=DIV(_class='w2p_cloud', *VAR_252))
def VAR_218(self, VAR_159):
VAR_58 = VAR_265.request
if 'render' not in VAR_58.post_vars:
VAR_58.post_vars.render = None
return VAR_159(VAR_58.post_vars)
class CLASS_9(object):
def __init__(
self,
VAR_96,
VAR_219,
VAR_220={}
):
self.config = configparser.ConfigParser(VAR_220)
self.config.read(VAR_96)
if not self.config.has_section(VAR_219):
self.config.add_section(VAR_219)
self.section = VAR_219
self.filename = VAR_96
def FUNC_88(self):
if not(isinstance(VAR_265.session['settings_%s' % self.section], dict)):
VAR_224 = dict(self.config.items(self.section))
else:
VAR_224 = VAR_265.session['settings_%s' % self.section]
return VAR_224
def FUNC_150(self, VAR_63):
for option, VAR_180 in VAR_63:
self.config.set(self.section, option, VAR_180)
try:
self.config.write(open(self.filename, 'w'))
VAR_226 = True
except:
VAR_265.session['settings_%s' % self.section] = dict(self.config.items(self.section))
VAR_226 = False
return VAR_226
if __name__ == '__main__':
import .doctest
doctest.testmod()
| [
1,
2,
3,
8,
12,
36,
38,
50,
52,
55,
58,
59,
61,
63,
64,
73,
74,
82,
83,
92,
93,
99,
100,
107,
108,
113,
115,
122,
124,
126,
130,
135,
137,
139,
150,
175,
178,
180,
185,
189,
202,
206,
209,
211,
216,
218,
220,
224,
230,
233,
249,
261,
263,
286,
312,
318,
323,
325,
343,
368,
372,
374,
378,
380,
385,
388,
394,
396,
402,
405,
407,
412,
413,
415,
421,
422,
427,
429,
434,
436,
439,
448,
449,
450,
477,
479,
494,
495,
497,
505,
507,
508,
511,
513,
522,
523,
524,
525,
530,
531,
532,
535,
541,
542,
545,
546,
547,
557,
566,
569,
575,
577,
581,
586,
587,
588,
595,
610,
613,
627,
628,
629,
636,
638,
641,
642,
644,
647,
648,
656,
657,
659,
693,
694,
700,
701,
708,
719,
720,
734,
736,
805,
806,
822,
826,
839,
840,
845,
853,
860,
862,
864,
867,
869,
871,
874,
876,
879,
881,
884,
906,
937,
974,
975,
976,
1011,
1012,
1016,
1054,
1057,
1059,
1061,
1064,
1067,
1069,
1074,
1080,
1082,
1083,
1085,
1135,
1140,
1149,
1165,
1169,
1175,
1186,
1195,
1203,
1204,
1205,
1206,
1217,
1223,
1227,
1238,
1246,
1251,
1255,
1263,
1276,
1297,
1305,
1310,
1328,
1331,
1336,
1338,
1342,
1362,
1364,
1369,
1371,
1372,
1374,
1423,
1489,
1491,
1498,
1500,
1507,
1509,
1517,
1521,
1523,
1533,
1536,
1538,
1541,
1544,
1546,
1548,
1550,
1554,
1556,
1559,
1561,
1564,
1566,
1568,
1570,
1573,
1575,
1576,
1578,
1580,
1592,
1601,
1604,
1623,
1629,
1630,
1643,
1644,
1645,
1646,
1651,
1663,
1664,
1668,
1669,
1728,
1741,
1742,
1745,
1752,
1761,
1764,
1765,
1773,
1776,
1779,
1784,
1786,
1791,
1794,
1796,
1832,
1838,
1841,
1849,
1862,
1866,
1879,
1904,
1910,
1932,
1936,
1939,
1940,
1945,
1977,
1979,
1980,
1982,
1983,
1987,
1990,
2006,
2008,
2010,
2015,
2032,
2034,
2043,
2046,
2049,
2054,
2057,
2060,
2066,
2077,
2082,
2085,
2086,
2087,
2089,
2090,
2092,
2094,
2104,
2143,
2166,
2178,
2194,
2195,
2203,
2212,
2215,
2232,
2236,
2240,
2243,
2250,
2272,
2282,
2298,
2304,
2311,
2312,
2321,
2324,
2340,
2366,
2372,
2384,
2390,
2426,
2436,
2438,
2449,
2463,
2464,
2466,
2471,
2472,
2474,
2487,
2489,
2490,
2501,
2507,
2514,
2516,
2518,
2519,
2532,
2533,
2534,
2535,
2537,
2538,
2542,
2543,
2544,
2546,
2547,
2548,
2549,
2550,
2551,
2552,
2553,
2555,
2562,
2563,
2575,
2582,
2584,
2589,
2591,
2594,
2599,
2612,
2613,
2620,
2626,
2628,
2630,
2633,
2635,
2641,
2649,
2653,
2657,
2666,
2670,
2671,
2672,
2673,
2674,
2675,
2676,
2677,
2678,
2679,
2685,
2696,
2697,
2698,
2700,
2701,
2702,
2707,
2709,
2711,
2717,
2720,
2726,
2732,
2736,
2738,
2739,
2743,
2745,
2746,
2747,
2750,
2762,
2768,
2771,
2780,
2782,
2783,
2784,
2785,
2786,
2789,
2790,
2791,
2792,
2795,
2796,
2797,
2798,
2802,
2803,
2805,
2806,
2810,
2811,
2812,
2818,
2819,
2820,
2823,
2824,
2833,
2834,
2842,
2847,
2851,
2856,
2857,
2858,
2860,
2876,
2884,
2887,
2897,
2913,
2921,
2922,
2936,
2943,
2966,
2971,
2972,
2977,
2981,
3026,
3028,
3037,
3049,
3061,
3072,
3107,
3131,
3143,
3153,
3217,
3227,
3230,
3232,
3235,
3294,
3312,
3321,
3327,
3330,
3348,
3360,
3370,
3373,
3375,
3378,
3398,
3403,
3408,
3435,
3451,
3479,
3517,
3519,
3534,
3545,
3555,
3559,
3560,
3563,
3568,
3572,
3610,
3626,
3636,
3654,
3691,
3700,
3705,
3707,
3709,
3711,
3715,
3717,
3719,
3721,
3723,
3726,
3728,
3730,
3732,
3734,
3736,
3744,
3747,
3752,
3754,
3756,
3758,
3801,
3806,
3823,
3831,
3837,
3842,
3844,
3846,
3852,
3869,
3883,
3885,
3891,
3911,
3919,
3923,
3931,
3935,
3946,
3952,
3955,
3957,
3965,
3992,
4002,
4004,
4006,
4008,
4014,
4016,
4019,
4023,
4027,
4033,
4036,
4039,
4042,
4044,
4049,
4073,
4090,
4093,
4109,
4110,
4111,
4117,
4124,
4131,
4132,
4134,
4146,
4157,
4168,
4193,
4197,
4222,
4226,
4235,
4240,
4245,
4272,
4354,
4365,
4387,
4409,
4437,
4449,
4450,
4462,
4485,
4492,
4534,
4540,
4550,
4611,
4627,
4629,
4630,
4653,
4656,
4661,
4664,
4665,
4675,
4676,
4685,
4686,
4690,
4694,
4696,
4697,
4700,
4701,
4702,
4706,
4708,
4709,
4713,
4715,
4716,
4718,
4719,
4721,
4724,
4736,
4741,
4748,
4750,
4752,
4756,
4761,
4768,
4770,
4772,
4776,
4781,
4788,
4790,
4792,
4796,
4801,
4810,
4812,
4814,
4818,
4823,
4830,
4832,
4834,
4838,
4843,
4850,
4852,
4854,
4858,
4863,
4870,
4872,
4877,
4881,
4886,
4893,
4895,
4897,
4901,
4906,
4913,
4914,
4916,
4918,
4922,
4927,
4934,
4936,
4938,
4942,
4950,
4955,
4962,
4964,
4969,
4973,
4978,
4987,
4994,
5024,
5038,
5051,
5065,
5067,
5073,
5074,
5083,
5087,
5097,
5127,
5129,
5134,
5145,
5149,
5152,
5156,
5159,
5161,
5165,
5172,
5174,
5183,
5184,
5197,
5202,
5226,
5232,
5258,
5267,
5285,
5289,
5291,
5294,
5298,
5318,
5330,
5334,
5344,
5346,
5349,
5351,
5360,
5362,
5391,
5394,
5395,
5396,
5402,
5403,
5407,
5410,
5415,
5429,
5430,
5476,
5477,
5495,
5496,
5499,
5507,
5510,
5513,
5515,
5517,
5519,
5527,
5529,
5553,
5556,
5570,
5578,
5583,
5586,
5589,
5590,
5592,
5598,
5601,
5603,
5607,
5617,
5626,
5631,
5657,
5667,
5674,
5679,
5680,
5681,
5683,
5689,
5695,
5699,
5702,
5706,
5711,
5721,
5728,
5729,
5733,
5738,
5744,
5747,
5750,
5752,
5754,
5758,
5768,
5789,
5795,
5797,
5801,
5809,
5820,
5830,
5836,
5841,
5843,
5886,
5887,
5891,
5900,
5904,
5910,
5920,
5929,
5931,
5932,
5933,
5936,
5947,
5956,
5962,
5965,
5975,
5976,
5977,
5984,
5991,
6020,
6028,
6031,
6067,
6150,
6180,
6199,
6204,
6227,
6229,
6250,
6300,
6303,
6309,
6313,
6352,
6373,
6384,
6397,
6400,
6401,
6405,
6406,
6408,
6421,
6428,
6439,
6443,
4,
5,
6,
7,
8,
9,
10,
11,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
842,
843,
844,
845,
846,
847,
848,
849,
850,
851,
852,
853,
854,
855,
856,
857,
858,
859,
860,
861,
862,
863,
864,
865,
866,
867,
868,
869,
870,
871,
872,
873,
874,
875,
876,
877,
878,
879,
880,
1014,
1015,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
1025,
1026,
1027,
1028,
1029,
1030,
1031,
1032,
1033,
1034,
1035,
1036,
1037,
1038,
1039,
1040,
1041,
1042,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1075,
1076,
1077,
1078,
1079,
1080,
1081,
1082,
1083,
1084,
4678,
5405,
5406,
5407,
5408,
5409,
5410,
5411,
5412,
5413,
5414,
5415,
5416,
5417,
5418,
5498,
5499,
5500,
5501,
5502,
5503,
5504,
5505,
5506,
5507,
5508,
5509,
5510,
5511,
5512,
5513,
5514,
5515,
5516,
5517,
5518,
5519,
5520,
5521,
5522,
5523,
5524,
5525,
5526,
5527,
5528,
5529,
5530,
5531,
5532,
5533,
5534,
5535,
5536,
5537,
5538,
5539,
5540,
5541,
5542,
5543,
5544,
5545,
5546,
5547,
5548,
5549,
5550,
5551,
5552,
5553,
5554,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
1143,
1144,
1145,
1146,
1197,
1198,
1199,
1200,
1201,
1202,
1248,
1249,
1250,
1251,
1252,
1253,
1254,
1255,
1256,
1257,
1258,
1259,
1299,
1300,
1301,
1307,
1308,
1309,
1310,
1311,
1312,
1333,
1334,
1335,
1336,
1337,
1338,
1339,
1340,
1341,
1342,
1343,
1606,
1607,
1608,
1609,
1610,
1781,
1782,
1783,
1788,
1789,
1790,
1791,
1792,
1793,
1794,
1795,
1836,
1837,
1838,
1839,
1840,
1841,
1842,
2041,
2042,
2043,
2044,
2045,
2046,
2047,
2048,
2049,
2050,
2051,
2052,
2053,
2054,
2055,
2056,
2057,
2058,
2059,
2060,
2061,
2062,
2063,
2064,
2065,
2066,
2067,
2080,
2081,
2082,
2083,
2084,
2085,
2086,
2087,
2088,
2089,
2090,
2091,
2092,
2093,
2169,
2170,
2171,
2172,
2173,
2234,
2235,
2236,
2237,
2238,
2239,
2240,
2241,
2242,
2243,
2244,
2245,
2246,
2247,
2248,
2249,
2250,
2251,
2284,
2285,
2286,
2306,
2307,
2308,
2309,
2428,
2429,
2430,
2431,
2432,
2440,
2441,
2442,
2443,
2444,
2456,
2457,
2458,
2853,
2854,
2855,
2894,
2895,
2896,
3034,
3035,
3036,
3068,
3069,
3070,
3071,
3150,
3151,
3152,
3224,
3225,
3226,
3296,
3297,
3298,
3314,
3315,
3316,
3367,
3368,
3369,
3442,
3443,
3444,
3552,
3553,
3554,
3633,
3634,
3635,
3702,
3703,
3704,
3705,
3706,
3707,
3708,
3709,
3710,
3711,
3712,
3713,
3714,
3715,
3716,
3717,
3718,
3719,
3720,
3721,
3722,
3723,
3724,
3725,
3726,
3727,
3728,
3729,
3730,
3731,
3732,
3733,
3734,
3735,
3736,
3737,
3738,
3749,
3750,
3751,
3752,
3753,
3754,
3755,
3756,
3757,
3758,
3759,
3803,
3804,
3805,
3825,
3826,
3827,
3839,
3840,
3841,
3887,
3888,
3889,
3913,
3914,
3915,
3916,
3917,
3918,
3926,
3927,
3928,
3929,
3930,
3937,
3938,
3939,
3940,
3941,
3942,
3948,
3949,
3950,
3951,
3952,
3953,
3954,
3955,
3956,
3957,
3958,
3999,
4000,
4001,
4002,
4003,
4004,
4005,
4006,
4007,
4008,
4009,
4010,
4011,
4012,
4013,
4014,
4015,
4016,
4017,
4018,
4019,
4020,
4021,
4022,
4023,
4024,
4025,
4026,
4027,
4028,
4029,
4030,
4031,
4032,
4033,
4034,
4035,
4036,
4037,
4038,
4039,
4040,
4041,
4042,
4043,
4044,
4045,
4046,
4047,
4048,
4049,
4050,
4126,
4148,
4149,
4150,
4151,
4536,
4537,
4538,
4539,
4540,
4541,
4542,
4543,
4544,
4545,
4546,
4547,
4548,
4549,
4550,
4551,
4738,
4739,
4740,
4741,
4742,
4743,
4744,
4745,
4746,
4747,
4748,
4749,
4750,
4751,
4752,
4753,
4758,
4759,
4760,
4761,
4762,
4763,
4764,
4765,
4766,
4767,
4768,
4769,
4770,
4771,
4772,
4773,
4778,
4779,
4780,
4781,
4782,
4783,
4784,
4785,
4786,
4787,
4788,
4789,
4790,
4791,
4792,
4793,
4798,
4799,
4800,
4801,
4802,
4803,
4804,
4805,
4806,
4807,
4808,
4809,
4810,
4811,
4812,
4813,
4814,
4815,
4820,
4821,
4822,
4823,
4824,
4825,
4826,
4827,
4828,
4829,
4830,
4831,
4832,
4833,
4834,
4835,
4840,
4841,
4842,
4843,
4844,
4845,
4846,
4847,
4848,
4849,
4850,
4851,
4852,
4853,
4854,
4855,
4860,
4861,
4862,
4863,
4864,
4865,
4866,
4867,
4868,
4869,
4870,
4871,
4872,
4873,
4874,
4875,
4876,
4877,
4878,
4883,
4884,
4885,
4886,
4887,
4888,
4889,
4890,
4891,
4892,
4893,
4894,
4895,
4896,
4897,
4898,
4903,
4904,
4905,
4906,
4907,
4908,
4909,
4910,
4911,
4912,
4913,
4914,
4915,
4916,
4917,
4918,
4919,
4924,
4925,
4926,
4927,
4928,
4929,
4930,
4931,
4932,
4933,
4934,
4935,
4936,
4937,
4938,
4939,
4952,
4953,
4954,
4955,
4956,
4957,
4958,
4959,
4960,
4961,
4962,
4963,
4964,
4965,
4966,
4967,
4968,
4969,
4970,
4971,
4972,
5332,
5333,
5334,
5335,
5336,
5337,
5338,
5339,
5340,
5341,
5342,
5343,
5344,
5345,
5346,
5347,
5348,
5349,
5350,
5351,
5352,
5353,
5354,
5355,
5356,
5357,
5358,
5359,
5360,
5361,
5595,
5596,
5597,
5598,
5599,
5600,
5601,
5602,
5603,
5604,
5605,
5606,
5607,
5608,
5609,
5610,
5611,
5612,
5613,
5614,
5615,
5616,
5617,
5618,
5677,
5685,
5686,
5687,
5697,
5761,
5762,
5763,
5764,
5979,
1934,
1935,
1936,
1937,
1938,
1939,
1940,
1941,
1942,
1943,
1944,
1945,
1946,
1947,
1948,
1949,
1950,
1951,
1952,
1953,
1954,
1955,
1956,
1957,
1958,
1959,
1960,
1961,
1962,
1963,
1964,
1965,
1966,
1967,
1968,
1969,
1970,
1971,
1972,
1973,
1974,
1975,
1976,
1977,
1978,
1979,
1980,
1981,
1982,
1983,
1984,
1985,
1986,
1987,
1988,
5147,
5148,
5149,
5150,
5151,
5152,
5153,
5154,
5155,
5156,
5157,
5158,
5159,
5160
] | [
1,
2,
3,
8,
12,
36,
38,
50,
52,
55,
58,
59,
61,
63,
64,
73,
74,
82,
83,
92,
93,
99,
100,
107,
109,
111,
112,
123,
124,
129,
131,
138,
140,
142,
146,
151,
153,
155,
166,
191,
194,
196,
201,
205,
218,
222,
225,
227,
232,
234,
236,
240,
246,
249,
265,
277,
279,
302,
328,
334,
339,
341,
359,
384,
388,
390,
394,
396,
401,
404,
410,
412,
418,
421,
423,
428,
429,
431,
437,
438,
443,
445,
450,
452,
455,
464,
465,
466,
493,
495,
510,
511,
513,
521,
523,
524,
527,
529,
538,
539,
540,
541,
546,
547,
548,
551,
557,
558,
561,
562,
563,
573,
582,
585,
591,
593,
597,
602,
603,
604,
611,
626,
629,
643,
644,
645,
652,
654,
657,
658,
660,
663,
664,
672,
673,
675,
709,
710,
716,
717,
724,
735,
736,
750,
752,
821,
822,
838,
842,
855,
856,
861,
869,
876,
878,
880,
883,
885,
887,
890,
892,
895,
897,
900,
922,
953,
990,
991,
992,
1027,
1028,
1032,
1070,
1073,
1075,
1077,
1080,
1083,
1085,
1090,
1096,
1098,
1099,
1101,
1151,
1156,
1165,
1181,
1185,
1191,
1202,
1211,
1219,
1220,
1221,
1222,
1233,
1239,
1243,
1254,
1262,
1267,
1271,
1279,
1292,
1313,
1321,
1326,
1344,
1347,
1352,
1354,
1358,
1378,
1380,
1385,
1387,
1388,
1390,
1439,
1505,
1507,
1514,
1516,
1523,
1525,
1533,
1537,
1539,
1549,
1552,
1554,
1557,
1560,
1562,
1564,
1566,
1570,
1572,
1575,
1577,
1580,
1582,
1584,
1586,
1589,
1591,
1592,
1594,
1596,
1608,
1617,
1620,
1639,
1645,
1646,
1659,
1660,
1661,
1662,
1667,
1679,
1680,
1684,
1685,
1744,
1757,
1758,
1761,
1768,
1776,
1779,
1782,
1787,
1789,
1794,
1797,
1799,
1835,
1841,
1844,
1852,
1865,
1869,
1882,
1907,
1913,
1935,
1939,
1942,
1943,
1948,
1980,
1982,
1983,
1985,
1986,
1990,
1993,
2009,
2011,
2013,
2018,
2035,
2037,
2046,
2049,
2052,
2057,
2060,
2063,
2069,
2080,
2085,
2088,
2089,
2090,
2092,
2093,
2095,
2097,
2107,
2146,
2169,
2181,
2197,
2198,
2206,
2215,
2218,
2235,
2239,
2243,
2246,
2253,
2275,
2285,
2301,
2307,
2314,
2315,
2324,
2327,
2343,
2369,
2375,
2387,
2393,
2429,
2439,
2441,
2452,
2466,
2467,
2469,
2474,
2475,
2477,
2490,
2492,
2493,
2504,
2510,
2517,
2519,
2521,
2522,
2535,
2536,
2537,
2538,
2540,
2541,
2545,
2546,
2547,
2549,
2550,
2551,
2552,
2553,
2554,
2555,
2556,
2558,
2565,
2566,
2578,
2585,
2587,
2592,
2594,
2597,
2602,
2615,
2616,
2623,
2629,
2631,
2633,
2636,
2638,
2644,
2652,
2656,
2660,
2669,
2673,
2674,
2675,
2676,
2677,
2678,
2679,
2680,
2681,
2682,
2688,
2699,
2700,
2701,
2703,
2704,
2705,
2710,
2712,
2714,
2720,
2723,
2729,
2735,
2739,
2741,
2742,
2746,
2748,
2749,
2750,
2753,
2765,
2771,
2774,
2783,
2785,
2786,
2787,
2788,
2789,
2792,
2793,
2794,
2795,
2798,
2799,
2800,
2801,
2805,
2806,
2808,
2809,
2813,
2814,
2815,
2821,
2822,
2823,
2826,
2827,
2836,
2837,
2845,
2850,
2854,
2859,
2860,
2861,
2863,
2879,
2887,
2890,
2900,
2916,
2924,
2925,
2939,
2946,
2969,
2974,
2975,
2980,
2984,
3029,
3031,
3040,
3052,
3064,
3075,
3110,
3134,
3146,
3156,
3220,
3230,
3233,
3235,
3238,
3297,
3315,
3324,
3330,
3333,
3351,
3363,
3373,
3376,
3378,
3381,
3401,
3406,
3411,
3438,
3454,
3482,
3520,
3522,
3537,
3548,
3558,
3562,
3563,
3566,
3571,
3575,
3613,
3629,
3639,
3657,
3694,
3703,
3708,
3710,
3712,
3714,
3718,
3720,
3722,
3724,
3726,
3729,
3731,
3733,
3735,
3737,
3739,
3747,
3750,
3755,
3757,
3759,
3761,
3804,
3809,
3826,
3834,
3840,
3845,
3847,
3849,
3855,
3872,
3886,
3888,
3894,
3914,
3922,
3926,
3934,
3938,
3949,
3955,
3958,
3960,
3968,
3995,
4005,
4007,
4009,
4011,
4017,
4019,
4022,
4026,
4030,
4036,
4039,
4042,
4045,
4047,
4052,
4076,
4093,
4096,
4112,
4113,
4114,
4120,
4127,
4134,
4135,
4137,
4149,
4160,
4171,
4196,
4200,
4225,
4229,
4238,
4243,
4248,
4275,
4357,
4368,
4390,
4412,
4440,
4452,
4453,
4465,
4488,
4495,
4537,
4543,
4553,
4614,
4630,
4632,
4633,
4656,
4659,
4664,
4667,
4668,
4678,
4679,
4688,
4689,
4693,
4697,
4699,
4700,
4703,
4704,
4705,
4709,
4711,
4712,
4716,
4718,
4719,
4721,
4722,
4724,
4727,
4739,
4744,
4751,
4753,
4755,
4759,
4764,
4771,
4773,
4775,
4779,
4784,
4791,
4793,
4795,
4799,
4804,
4813,
4815,
4817,
4821,
4826,
4833,
4835,
4837,
4841,
4846,
4853,
4855,
4857,
4861,
4866,
4873,
4875,
4880,
4884,
4889,
4896,
4898,
4900,
4904,
4909,
4916,
4917,
4919,
4921,
4925,
4930,
4937,
4939,
4941,
4945,
4953,
4958,
4965,
4967,
4972,
4976,
4981,
4990,
4997,
5027,
5041,
5054,
5068,
5070,
5076,
5077,
5086,
5090,
5100,
5130,
5132,
5137,
5148,
5152,
5155,
5159,
5162,
5164,
5168,
5175,
5177,
5186,
5187,
5200,
5205,
5229,
5235,
5261,
5270,
5288,
5292,
5294,
5297,
5301,
5321,
5333,
5337,
5347,
5349,
5352,
5354,
5363,
5365,
5394,
5397,
5398,
5399,
5405,
5406,
5410,
5413,
5418,
5432,
5433,
5479,
5480,
5498,
5499,
5502,
5510,
5513,
5516,
5518,
5520,
5522,
5530,
5532,
5556,
5559,
5573,
5581,
5586,
5589,
5592,
5593,
5595,
5601,
5604,
5606,
5610,
5620,
5629,
5634,
5660,
5670,
5677,
5682,
5683,
5684,
5686,
5692,
5698,
5702,
5705,
5709,
5714,
5724,
5731,
5732,
5736,
5741,
5747,
5750,
5753,
5755,
5757,
5761,
5771,
5792,
5798,
5800,
5804,
5812,
5823,
5833,
5839,
5844,
5846,
5889,
5890,
5894,
5903,
5907,
5913,
5923,
5932,
5934,
5935,
5936,
5939,
5950,
5959,
5965,
5968,
5978,
5979,
5980,
5987,
5994,
6023,
6031,
6034,
6070,
6153,
6183,
6202,
6207,
6230,
6232,
6253,
6303,
6306,
6312,
6316,
6355,
6376,
6387,
6400,
6403,
6404,
6408,
6409,
6411,
6424,
6431,
6442,
6446,
4,
5,
6,
7,
8,
9,
10,
11,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
858,
859,
860,
861,
862,
863,
864,
865,
866,
867,
868,
869,
870,
871,
872,
873,
874,
875,
876,
877,
878,
879,
880,
881,
882,
883,
884,
885,
886,
887,
888,
889,
890,
891,
892,
893,
894,
895,
896,
1030,
1031,
1032,
1033,
1034,
1035,
1036,
1037,
1038,
1039,
1040,
1041,
1042,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1075,
1076,
1077,
1078,
1079,
1080,
1081,
1082,
1083,
1084,
1085,
1086,
1087,
1088,
1089,
1090,
1091,
1092,
1093,
1094,
1095,
1096,
1097,
1098,
1099,
1100,
4681,
5408,
5409,
5410,
5411,
5412,
5413,
5414,
5415,
5416,
5417,
5418,
5419,
5420,
5421,
5501,
5502,
5503,
5504,
5505,
5506,
5507,
5508,
5509,
5510,
5511,
5512,
5513,
5514,
5515,
5516,
5517,
5518,
5519,
5520,
5521,
5522,
5523,
5524,
5525,
5526,
5527,
5528,
5529,
5530,
5531,
5532,
5533,
5534,
5535,
5536,
5537,
5538,
5539,
5540,
5541,
5542,
5543,
5544,
5545,
5546,
5547,
5548,
5549,
5550,
5551,
5552,
5553,
5554,
5555,
5556,
5557,
203,
204,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
248,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
1159,
1160,
1161,
1162,
1213,
1214,
1215,
1216,
1217,
1218,
1264,
1265,
1266,
1267,
1268,
1269,
1270,
1271,
1272,
1273,
1274,
1275,
1315,
1316,
1317,
1323,
1324,
1325,
1326,
1327,
1328,
1349,
1350,
1351,
1352,
1353,
1354,
1355,
1356,
1357,
1358,
1359,
1622,
1623,
1624,
1625,
1626,
1784,
1785,
1786,
1791,
1792,
1793,
1794,
1795,
1796,
1797,
1798,
1839,
1840,
1841,
1842,
1843,
1844,
1845,
2044,
2045,
2046,
2047,
2048,
2049,
2050,
2051,
2052,
2053,
2054,
2055,
2056,
2057,
2058,
2059,
2060,
2061,
2062,
2063,
2064,
2065,
2066,
2067,
2068,
2069,
2070,
2083,
2084,
2085,
2086,
2087,
2088,
2089,
2090,
2091,
2092,
2093,
2094,
2095,
2096,
2172,
2173,
2174,
2175,
2176,
2237,
2238,
2239,
2240,
2241,
2242,
2243,
2244,
2245,
2246,
2247,
2248,
2249,
2250,
2251,
2252,
2253,
2254,
2287,
2288,
2289,
2309,
2310,
2311,
2312,
2431,
2432,
2433,
2434,
2435,
2443,
2444,
2445,
2446,
2447,
2459,
2460,
2461,
2856,
2857,
2858,
2897,
2898,
2899,
3037,
3038,
3039,
3071,
3072,
3073,
3074,
3153,
3154,
3155,
3227,
3228,
3229,
3299,
3300,
3301,
3317,
3318,
3319,
3370,
3371,
3372,
3445,
3446,
3447,
3555,
3556,
3557,
3636,
3637,
3638,
3705,
3706,
3707,
3708,
3709,
3710,
3711,
3712,
3713,
3714,
3715,
3716,
3717,
3718,
3719,
3720,
3721,
3722,
3723,
3724,
3725,
3726,
3727,
3728,
3729,
3730,
3731,
3732,
3733,
3734,
3735,
3736,
3737,
3738,
3739,
3740,
3741,
3752,
3753,
3754,
3755,
3756,
3757,
3758,
3759,
3760,
3761,
3762,
3806,
3807,
3808,
3828,
3829,
3830,
3842,
3843,
3844,
3890,
3891,
3892,
3916,
3917,
3918,
3919,
3920,
3921,
3929,
3930,
3931,
3932,
3933,
3940,
3941,
3942,
3943,
3944,
3945,
3951,
3952,
3953,
3954,
3955,
3956,
3957,
3958,
3959,
3960,
3961,
4002,
4003,
4004,
4005,
4006,
4007,
4008,
4009,
4010,
4011,
4012,
4013,
4014,
4015,
4016,
4017,
4018,
4019,
4020,
4021,
4022,
4023,
4024,
4025,
4026,
4027,
4028,
4029,
4030,
4031,
4032,
4033,
4034,
4035,
4036,
4037,
4038,
4039,
4040,
4041,
4042,
4043,
4044,
4045,
4046,
4047,
4048,
4049,
4050,
4051,
4052,
4053,
4129,
4151,
4152,
4153,
4154,
4539,
4540,
4541,
4542,
4543,
4544,
4545,
4546,
4547,
4548,
4549,
4550,
4551,
4552,
4553,
4554,
4741,
4742,
4743,
4744,
4745,
4746,
4747,
4748,
4749,
4750,
4751,
4752,
4753,
4754,
4755,
4756,
4761,
4762,
4763,
4764,
4765,
4766,
4767,
4768,
4769,
4770,
4771,
4772,
4773,
4774,
4775,
4776,
4781,
4782,
4783,
4784,
4785,
4786,
4787,
4788,
4789,
4790,
4791,
4792,
4793,
4794,
4795,
4796,
4801,
4802,
4803,
4804,
4805,
4806,
4807,
4808,
4809,
4810,
4811,
4812,
4813,
4814,
4815,
4816,
4817,
4818,
4823,
4824,
4825,
4826,
4827,
4828,
4829,
4830,
4831,
4832,
4833,
4834,
4835,
4836,
4837,
4838,
4843,
4844,
4845,
4846,
4847,
4848,
4849,
4850,
4851,
4852,
4853,
4854,
4855,
4856,
4857,
4858,
4863,
4864,
4865,
4866,
4867,
4868,
4869,
4870,
4871,
4872,
4873,
4874,
4875,
4876,
4877,
4878,
4879,
4880,
4881,
4886,
4887,
4888,
4889,
4890,
4891,
4892,
4893,
4894,
4895,
4896,
4897,
4898,
4899,
4900,
4901,
4906,
4907,
4908,
4909,
4910,
4911,
4912,
4913,
4914,
4915,
4916,
4917,
4918,
4919,
4920,
4921,
4922,
4927,
4928,
4929,
4930,
4931,
4932,
4933,
4934,
4935,
4936,
4937,
4938,
4939,
4940,
4941,
4942,
4955,
4956,
4957,
4958,
4959,
4960,
4961,
4962,
4963,
4964,
4965,
4966,
4967,
4968,
4969,
4970,
4971,
4972,
4973,
4974,
4975,
5335,
5336,
5337,
5338,
5339,
5340,
5341,
5342,
5343,
5344,
5345,
5346,
5347,
5348,
5349,
5350,
5351,
5352,
5353,
5354,
5355,
5356,
5357,
5358,
5359,
5360,
5361,
5362,
5363,
5364,
5598,
5599,
5600,
5601,
5602,
5603,
5604,
5605,
5606,
5607,
5608,
5609,
5610,
5611,
5612,
5613,
5614,
5615,
5616,
5617,
5618,
5619,
5620,
5621,
5680,
5688,
5689,
5690,
5700,
5764,
5765,
5766,
5767,
5982,
1937,
1938,
1939,
1940,
1941,
1942,
1943,
1944,
1945,
1946,
1947,
1948,
1949,
1950,
1951,
1952,
1953,
1954,
1955,
1956,
1957,
1958,
1959,
1960,
1961,
1962,
1963,
1964,
1965,
1966,
1967,
1968,
1969,
1970,
1971,
1972,
1973,
1974,
1975,
1976,
1977,
1978,
1979,
1980,
1981,
1982,
1983,
1984,
1985,
1986,
1987,
1988,
1989,
1990,
1991,
5150,
5151,
5152,
5153,
5154,
5155,
5156,
5157,
5158,
5159,
5160,
5161,
5162,
5163
] |
0CWE-22
| ########################################################################
# File name: xhu.py
# This file is part of: xmpp-http-upload
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import pathlib
import typing
import flask
app = flask.Flask("xmpp-http-upload")
app.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
application = app
if app.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(app)
def sanitized_join(path: str, root: pathlib.Path) -> pathlib.Path:
result = (root / path).absolute()
if not str(result).startswith(str(root) + "/"):
raise ValueError("resulting path is outside root")
return result
def get_paths(base_path: pathlib.Path):
data_file = pathlib.Path(str(base_path) + ".data")
metadata_file = pathlib.Path(str(base_path) + ".meta")
return data_file, metadata_file
def load_metadata(metadata_file):
with metadata_file.open("r") as f:
return json.load(f)
def get_info(path: str, root: pathlib.Path) -> typing.Tuple[
pathlib.Path,
dict]:
dest_path = sanitized_join(
path,
pathlib.Path(app.config["DATA_ROOT"]),
)
data_file, metadata_file = get_paths(dest_path)
return data_file, load_metadata(metadata_file)
@contextlib.contextmanager
def write_file(at: pathlib.Path):
with at.open("xb") as f:
try:
yield f
except: # NOQA
at.unlink()
raise
@app.route("/")
def index():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def stream_file(src, dest, nbytes):
while nbytes > 0:
data = src.read(min(nbytes, 4096))
if not data:
break
dest.write(data)
nbytes -= len(data)
if nbytes > 0:
raise EOFError
@app.route("/<path:path>", methods=["PUT"])
def put_file(path):
try:
dest_path = sanitized_join(
path,
pathlib.Path(app.config["DATA_ROOT"]),
)
except ValueError:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
verification_key = flask.request.args.get("v", "")
length = int(flask.request.headers.get("Content-Length", 0))
hmac_input = "{} {}".format(path, length).encode("utf-8")
key = app.config["SECRET_KEY"]
mac = hmac.new(key, hmac_input, hashlib.sha256)
digest = mac.hexdigest()
if not hmac.compare_digest(digest, verification_key):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
content_type = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
dest_path.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
data_file, metadata_file = get_paths(dest_path)
try:
with write_file(data_file) as fout:
stream_file(flask.request.stream, fout, length)
with metadata_file.open("x") as f:
json.dump(
{
"headers": {"Content-Type": content_type},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def generate_headers(response_headers, metadata_headers):
for key, value in metadata_headers.items():
response_headers[key] = value
content_type = metadata_headers["Content-Type"]
for mimetype_glob in app.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(content_type, mimetype_glob):
break
else:
response_headers["Content-Disposition"] = "attachment"
response_headers["X-Content-Type-Options"] = "nosniff"
response_headers["X-Frame-Options"] = "DENY"
response_headers["Content-Security-Policy"] = "default-src 'none'; frame-ancestors 'none'; sandbox"
@app.route("/<path:path>", methods=["HEAD"])
def head_file(path):
try:
data_file, metadata = get_info(
path,
pathlib.Path(app.config["DATA_ROOT"])
)
stat = data_file.stat()
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.Response()
response.headers["Content-Length"] = str(stat.st_size)
generate_headers(
response.headers,
metadata["headers"],
)
return response
@app.route("/<path:path>", methods=["GET"])
def get_file(path):
try:
data_file, metadata = get_info(
path,
pathlib.Path(app.config["DATA_ROOT"])
)
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.make_response(flask.send_file(
str(data_file),
))
generate_headers(
response.headers,
metadata["headers"],
)
return response
| ########################################################################
# File name: xhu.py
# This file is part of: xmpp-http-upload
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import pathlib
import typing
import flask
import werkzeug.exceptions
app = flask.Flask("xmpp-http-upload")
app.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
application = app
if app.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(app)
def get_paths(root: str, sub_path: str) \
-> typing.Tuple[pathlib.Path, pathlib.Path]:
base_path = flask.safe_join(root, sub_path)
data_file = pathlib.Path(base_path + ".data")
metadata_file = pathlib.Path(base_path + ".meta")
return data_file, metadata_file
def load_metadata(metadata_file):
with metadata_file.open("r") as f:
return json.load(f)
def get_info(path: str) -> typing.Tuple[
pathlib.Path,
dict]:
data_file, metadata_file = get_paths(app.config["DATA_ROOT"], path)
return data_file, load_metadata(metadata_file)
@contextlib.contextmanager
def write_file(at: pathlib.Path):
with at.open("xb") as f:
try:
yield f
except: # NOQA
at.unlink()
raise
@app.route("/")
def index():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def stream_file(src, dest, nbytes):
while nbytes > 0:
data = src.read(min(nbytes, 4096))
if not data:
break
dest.write(data)
nbytes -= len(data)
if nbytes > 0:
raise EOFError
@app.route("/<path:path>", methods=["PUT"])
def put_file(path):
try:
data_file, metadata_file = get_paths(app.config["DATA_ROOT"], path)
except werkzeug.exceptions.NotFound:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
verification_key = flask.request.args.get("v", "")
length = int(flask.request.headers.get("Content-Length", 0))
hmac_input = "{} {}".format(path, length).encode("utf-8")
key = app.config["SECRET_KEY"]
mac = hmac.new(key, hmac_input, hashlib.sha256)
digest = mac.hexdigest()
if not hmac.compare_digest(digest, verification_key):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
content_type = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
data_file.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
try:
with write_file(data_file) as fout:
stream_file(flask.request.stream, fout, length)
with metadata_file.open("x") as f:
json.dump(
{
"headers": {"Content-Type": content_type},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def generate_headers(response_headers, metadata_headers):
for key, value in metadata_headers.items():
response_headers[key] = value
content_type = metadata_headers["Content-Type"]
for mimetype_glob in app.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(content_type, mimetype_glob):
break
else:
response_headers["Content-Disposition"] = "attachment"
response_headers["X-Content-Type-Options"] = "nosniff"
response_headers["X-Frame-Options"] = "DENY"
response_headers["Content-Security-Policy"] = "default-src 'none'; frame-ancestors 'none'; sandbox"
@app.route("/<path:path>", methods=["HEAD"])
def head_file(path):
try:
data_file, metadata = get_info(path)
stat = data_file.stat()
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.Response()
response.headers["Content-Length"] = str(stat.st_size)
generate_headers(
response.headers,
metadata["headers"],
)
return response
@app.route("/<path:path>", methods=["GET"])
def get_file(path):
try:
data_file, metadata = get_info(path)
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.make_response(flask.send_file(
str(data_file),
))
generate_headers(
response.headers,
metadata["headers"],
)
return response
| path_disclosure | {
"code": [
"def sanitized_join(path: str, root: pathlib.Path) -> pathlib.Path:",
" result = (root / path).absolute()",
" if not str(result).startswith(str(root) + \"/\"):",
" raise ValueError(\"resulting path is outside root\")",
" return result",
"def get_paths(base_path: pathlib.Path):",
" data_file = pathlib.Path(str(base_path) + \".data\")",
" metadata_file = pathlib.Path(str(base_path) + \".meta\")",
"def get_info(path: str, root: pathlib.Path) -> typing.Tuple[",
" dest_path = sanitized_join(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"]),",
" )",
" data_file, metadata_file = get_paths(dest_path)",
" dest_path = sanitized_join(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"]),",
" )",
" except ValueError:",
" dest_path.parent.mkdir(parents=True, exist_ok=True, mode=0o770)",
" data_file, metadata_file = get_paths(dest_path)",
" data_file, metadata = get_info(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"])",
" )",
" except (OSError, ValueError):",
" data_file, metadata = get_info(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"])",
" )",
" except (OSError, ValueError):"
],
"line_no": [
42,
43,
44,
45,
46,
49,
50,
51,
61,
64,
65,
66,
67,
69,
107,
108,
109,
110,
111,
137,
138,
192,
193,
194,
195,
198,
217,
218,
219,
220,
221
]
} | {
"code": [
"def get_paths(root: str, sub_path: str) \\",
" -> typing.Tuple[pathlib.Path, pathlib.Path]:",
" base_path = flask.safe_join(root, sub_path)",
" data_file = pathlib.Path(base_path + \".data\")",
"def get_info(path: str) -> typing.Tuple[",
" data_file, metadata_file = get_paths(app.config[\"DATA_ROOT\"], path)",
" data_file.parent.mkdir(parents=True, exist_ok=True, mode=0o770)",
" data_file, metadata = get_info(path)",
" except (OSError, werkzeug.exceptions.NotFound):",
" data_file, metadata = get_info(path)",
" except (OSError, werkzeug.exceptions.NotFound):"
],
"line_no": [
43,
44,
45,
46,
57,
98,
125,
179,
182,
201,
202
]
} |
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import .pathlib
import typing
import flask
VAR_0 = flask.Flask("xmpp-http-upload")
VAR_0.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
VAR_1 = VAR_0
if VAR_0.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(VAR_0)
def FUNC_0(VAR_2: str, VAR_3: pathlib.Path) -> pathlib.Path:
VAR_12 = (VAR_3 / VAR_2).absolute()
if not str(VAR_12).startswith(str(VAR_3) + "/"):
raise ValueError("resulting VAR_2 is outside root")
return VAR_12
def FUNC_1(VAR_4: pathlib.Path):
VAR_13 = pathlib.Path(str(VAR_4) + ".data")
VAR_5 = pathlib.Path(str(VAR_4) + ".meta")
return VAR_13, VAR_5
def FUNC_2(VAR_5):
with VAR_5.open("r") as f:
return json.load(f)
def FUNC_3(VAR_2: str, VAR_3: pathlib.Path) -> typing.Tuple[
pathlib.Path,
dict]:
VAR_14 = FUNC_0(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"]),
)
VAR_13, VAR_5 = FUNC_1(VAR_14)
return VAR_13, FUNC_2(VAR_5)
@contextlib.contextmanager
def FUNC_4(VAR_6: pathlib.Path):
with VAR_6.open("xb") as f:
try:
yield f
except: # NOQA
VAR_6.unlink()
raise
@VAR_0.route("/")
def FUNC_5():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def FUNC_6(VAR_7, VAR_8, VAR_9):
while VAR_9 > 0:
VAR_23 = VAR_7.read(min(VAR_9, 4096))
if not VAR_23:
break
VAR_8.write(VAR_23)
VAR_9 -= len(VAR_23)
if VAR_9 > 0:
raise EOFError
@VAR_0.route("/<VAR_2:path>", methods=["PUT"])
def FUNC_7(VAR_2):
try:
VAR_14 = FUNC_0(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"]),
)
except ValueError:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_15 = flask.request.args.get("v", "")
VAR_16 = int(flask.request.headers.get("Content-Length", 0))
VAR_17 = "{} {}".format(VAR_2, VAR_16).encode("utf-8")
VAR_18 = VAR_0.config["SECRET_KEY"]
VAR_19 = hmac.new(VAR_18, VAR_17, hashlib.sha256)
VAR_20 = VAR_19.hexdigest()
if not hmac.compare_digest(VAR_20, VAR_15):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
VAR_21 = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
VAR_14.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
VAR_13, VAR_5 = FUNC_1(VAR_14)
try:
with FUNC_4(VAR_13) as fout:
FUNC_6(flask.request.stream, fout, VAR_16)
with VAR_5.open("x") as f:
json.dump(
{
"headers": {"Content-Type": VAR_21},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def FUNC_8(VAR_10, VAR_11):
for VAR_18, value in VAR_11.items():
VAR_10[VAR_18] = value
VAR_21 = VAR_11["Content-Type"]
for mimetype_glob in VAR_0.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(VAR_21, mimetype_glob):
break
else:
VAR_10["Content-Disposition"] = "attachment"
VAR_10["X-Content-Type-Options"] = "nosniff"
VAR_10["X-Frame-Options"] = "DENY"
VAR_10["Content-Security-Policy"] = "default-VAR_7 'none'; frame-ancestors 'none'; sandbox"
@VAR_0.route("/<VAR_2:path>", methods=["HEAD"])
def FUNC_9(VAR_2):
try:
VAR_13, VAR_24 = FUNC_3(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"])
)
VAR_25 = VAR_13.stat()
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_22 = flask.Response()
VAR_22.headers["Content-Length"] = str(VAR_25.st_size)
FUNC_8(
VAR_22.headers,
VAR_24["headers"],
)
return VAR_22
@VAR_0.route("/<VAR_2:path>", methods=["GET"])
def FUNC_10(VAR_2):
try:
VAR_13, VAR_24 = FUNC_3(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"])
)
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_22 = flask.make_response(flask.send_file(
str(VAR_13),
))
FUNC_8(
VAR_22.headers,
VAR_24["headers"],
)
return VAR_22
|
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import .pathlib
import typing
import flask
import werkzeug.exceptions
VAR_0 = flask.Flask("xmpp-http-upload")
VAR_0.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
VAR_1 = VAR_0
if VAR_0.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(VAR_0)
def FUNC_0(VAR_2: str, VAR_3: str) \
-> typing.Tuple[pathlib.Path, pathlib.Path]:
VAR_12 = flask.safe_join(VAR_2, VAR_3)
VAR_13 = pathlib.Path(VAR_12 + ".data")
VAR_4 = pathlib.Path(VAR_12 + ".meta")
return VAR_13, VAR_4
def FUNC_1(VAR_4):
with VAR_4.open("r") as f:
return json.load(f)
def FUNC_2(VAR_5: str) -> typing.Tuple[
pathlib.Path,
dict]:
VAR_13, VAR_4 = FUNC_0(VAR_0.config["DATA_ROOT"], VAR_5)
return VAR_13, FUNC_1(VAR_4)
@contextlib.contextmanager
def FUNC_3(VAR_6: pathlib.Path):
with VAR_6.open("xb") as f:
try:
yield f
except: # NOQA
VAR_6.unlink()
raise
@VAR_0.route("/")
def FUNC_4():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def FUNC_5(VAR_7, VAR_8, VAR_9):
while VAR_9 > 0:
VAR_22 = VAR_7.read(min(VAR_9, 4096))
if not VAR_22:
break
VAR_8.write(VAR_22)
VAR_9 -= len(VAR_22)
if VAR_9 > 0:
raise EOFError
@VAR_0.route("/<VAR_5:path>", methods=["PUT"])
def FUNC_6(VAR_5):
try:
VAR_13, VAR_4 = FUNC_0(VAR_0.config["DATA_ROOT"], VAR_5)
except werkzeug.exceptions.NotFound:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_14 = flask.request.args.get("v", "")
VAR_15 = int(flask.request.headers.get("Content-Length", 0))
VAR_16 = "{} {}".format(VAR_5, VAR_15).encode("utf-8")
VAR_17 = VAR_0.config["SECRET_KEY"]
VAR_18 = hmac.new(VAR_17, VAR_16, hashlib.sha256)
VAR_19 = VAR_18.hexdigest()
if not hmac.compare_digest(VAR_19, VAR_14):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
VAR_20 = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
VAR_13.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
try:
with FUNC_3(VAR_13) as fout:
FUNC_5(flask.request.stream, fout, VAR_15)
with VAR_4.open("x") as f:
json.dump(
{
"headers": {"Content-Type": VAR_20},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def FUNC_7(VAR_10, VAR_11):
for VAR_17, value in VAR_11.items():
VAR_10[VAR_17] = value
VAR_20 = VAR_11["Content-Type"]
for mimetype_glob in VAR_0.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(VAR_20, mimetype_glob):
break
else:
VAR_10["Content-Disposition"] = "attachment"
VAR_10["X-Content-Type-Options"] = "nosniff"
VAR_10["X-Frame-Options"] = "DENY"
VAR_10["Content-Security-Policy"] = "default-VAR_7 'none'; frame-ancestors 'none'; sandbox"
@VAR_0.route("/<VAR_5:path>", methods=["HEAD"])
def FUNC_8(VAR_5):
try:
VAR_13, VAR_23 = FUNC_2(VAR_5)
VAR_24 = VAR_13.stat()
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_21 = flask.Response()
VAR_21.headers["Content-Length"] = str(VAR_24.st_size)
FUNC_7(
VAR_21.headers,
VAR_23["headers"],
)
return VAR_21
@VAR_0.route("/<VAR_5:path>", methods=["GET"])
def FUNC_9(VAR_5):
try:
VAR_13, VAR_23 = FUNC_2(VAR_5)
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_21 = flask.make_response(flask.send_file(
str(VAR_13),
))
FUNC_7(
VAR_21.headers,
VAR_23["headers"],
)
return VAR_21
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
30,
32,
36,
40,
41,
47,
48,
52,
54,
55,
59,
60,
68,
70,
72,
73,
82,
83,
90,
91,
99,
102,
103,
117,
124,
131,
136,
139,
143,
165,
171,
172,
176,
183,
187,
188,
196,
204,
212,
213,
227,
236
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
30,
33,
37,
41,
42,
48,
50,
51,
55,
56,
61,
63,
64,
73,
74,
81,
82,
90,
93,
94,
105,
112,
119,
124,
126,
130,
152,
158,
159,
163,
170,
174,
175,
180,
188,
196,
197,
208,
217
] |
1CWE-79
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
from synapse.config import ConfigError
from ._base import Config
DEFAULT_CONFIG = """\
# User Consent configuration
#
# for detailed instructions, see
# https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md
#
# Parts of this section are required if enabling the 'consent' resource under
# 'listeners', in particular 'template_dir' and 'version'.
#
# 'template_dir' gives the location of the templates for the HTML forms.
# This directory should contain one subdirectory per language (eg, 'en', 'fr'),
# and each language directory should contain the policy document (named as
# '<version>.html') and a success page (success.html).
#
# 'version' specifies the 'current' version of the policy document. It defines
# the version to be served by the consent resource if there is no 'v'
# parameter.
#
# 'server_notice_content', if enabled, will send a user a "Server Notice"
# asking them to consent to the privacy policy. The 'server_notices' section
# must also be configured for this to work. Notices will *not* be sent to
# guest users unless 'send_server_notice_to_guests' is set to true.
#
# 'block_events_error', if set, will block any attempts to send events
# until the user consents to the privacy policy. The value of the setting is
# used as the text of the error.
#
# 'require_at_registration', if enabled, will add a step to the registration
# process, similar to how captcha works. Users will be required to accept the
# policy before their account is created.
#
# 'policy_name' is the display name of the policy users will see when registering
# for an account. Has no effect unless `require_at_registration` is enabled.
# Defaults to "Privacy Policy".
#
#user_consent:
# template_dir: res/templates/privacy
# version: 1.0
# server_notice_content:
# msgtype: m.text
# body: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# send_server_notice_to_guests: true
# block_events_error: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# require_at_registration: false
# policy_name: Privacy Policy
#
"""
class ConsentConfig(Config):
section = "consent"
def __init__(self, *args):
super().__init__(*args)
self.user_consent_version = None
self.user_consent_template_dir = None
self.user_consent_server_notice_content = None
self.user_consent_server_notice_to_guests = False
self.block_events_without_consent_error = None
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
def read_config(self, config, **kwargs):
consent_config = config.get("user_consent")
self.terms_template = self.read_templates(["terms.html"], autoescape=True)[0]
if consent_config is None:
return
self.user_consent_version = str(consent_config["version"])
self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
if not path.isdir(self.user_consent_template_dir):
raise ConfigError(
"Could not find template directory '%s'"
% (self.user_consent_template_dir,)
)
self.user_consent_server_notice_content = consent_config.get(
"server_notice_content"
)
self.block_events_without_consent_error = consent_config.get(
"block_events_error"
)
self.user_consent_server_notice_to_guests = bool(
consent_config.get("send_server_notice_to_guests", False)
)
self.user_consent_at_registration = bool(
consent_config.get("require_at_registration", False)
)
self.user_consent_policy_name = consent_config.get(
"policy_name", "Privacy Policy"
)
def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
from synapse.config import ConfigError
from ._base import Config
DEFAULT_CONFIG = """\
# User Consent configuration
#
# for detailed instructions, see
# https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md
#
# Parts of this section are required if enabling the 'consent' resource under
# 'listeners', in particular 'template_dir' and 'version'.
#
# 'template_dir' gives the location of the templates for the HTML forms.
# This directory should contain one subdirectory per language (eg, 'en', 'fr'),
# and each language directory should contain the policy document (named as
# '<version>.html') and a success page (success.html).
#
# 'version' specifies the 'current' version of the policy document. It defines
# the version to be served by the consent resource if there is no 'v'
# parameter.
#
# 'server_notice_content', if enabled, will send a user a "Server Notice"
# asking them to consent to the privacy policy. The 'server_notices' section
# must also be configured for this to work. Notices will *not* be sent to
# guest users unless 'send_server_notice_to_guests' is set to true.
#
# 'block_events_error', if set, will block any attempts to send events
# until the user consents to the privacy policy. The value of the setting is
# used as the text of the error.
#
# 'require_at_registration', if enabled, will add a step to the registration
# process, similar to how captcha works. Users will be required to accept the
# policy before their account is created.
#
# 'policy_name' is the display name of the policy users will see when registering
# for an account. Has no effect unless `require_at_registration` is enabled.
# Defaults to "Privacy Policy".
#
#user_consent:
# template_dir: res/templates/privacy
# version: 1.0
# server_notice_content:
# msgtype: m.text
# body: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# send_server_notice_to_guests: true
# block_events_error: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# require_at_registration: false
# policy_name: Privacy Policy
#
"""
class ConsentConfig(Config):
section = "consent"
def __init__(self, *args):
super().__init__(*args)
self.user_consent_version = None
self.user_consent_template_dir = None
self.user_consent_server_notice_content = None
self.user_consent_server_notice_to_guests = False
self.block_events_without_consent_error = None
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
def read_config(self, config, **kwargs):
consent_config = config.get("user_consent")
self.terms_template = self.read_template("terms.html")
if consent_config is None:
return
self.user_consent_version = str(consent_config["version"])
self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
if not path.isdir(self.user_consent_template_dir):
raise ConfigError(
"Could not find template directory '%s'"
% (self.user_consent_template_dir,)
)
self.user_consent_server_notice_content = consent_config.get(
"server_notice_content"
)
self.block_events_without_consent_error = consent_config.get(
"block_events_error"
)
self.user_consent_server_notice_to_guests = bool(
consent_config.get("send_server_notice_to_guests", False)
)
self.user_consent_at_registration = bool(
consent_config.get("require_at_registration", False)
)
self.user_consent_policy_name = consent_config.get(
"policy_name", "Privacy Policy"
)
def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
| xss | {
"code": [
" self.terms_template = self.read_templates([\"terms.html\"], autoescape=True)[0]"
],
"line_no": [
92
]
} | {
"code": [
" self.terms_template = self.read_template(\"terms.html\")"
],
"line_no": [
92
]
} |
from os import path
from synapse.config import ConfigError
from ._base import Config
VAR_0 = """\
"""
class CLASS_0(Config):
VAR_1 = "consent"
def __init__(self, *VAR_2):
super().__init__(*VAR_2)
self.user_consent_version = None
self.user_consent_template_dir = None
self.user_consent_server_notice_content = None
self.user_consent_server_notice_to_guests = False
self.block_events_without_consent_error = None
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
def FUNC_0(self, VAR_3, **VAR_4):
VAR_5 = VAR_3.get("user_consent")
self.terms_template = self.read_templates(["terms.html"], autoescape=True)[0]
if VAR_5 is None:
return
self.user_consent_version = str(VAR_5["version"])
self.user_consent_template_dir = self.abspath(VAR_5["template_dir"])
if not path.isdir(self.user_consent_template_dir):
raise ConfigError(
"Could not find template directory '%s'"
% (self.user_consent_template_dir,)
)
self.user_consent_server_notice_content = VAR_5.get(
"server_notice_content"
)
self.block_events_without_consent_error = VAR_5.get(
"block_events_error"
)
self.user_consent_server_notice_to_guests = bool(
VAR_5.get("send_server_notice_to_guests", False)
)
self.user_consent_at_registration = bool(
VAR_5.get("require_at_registration", False)
)
self.user_consent_policy_name = VAR_5.get(
"policy_name", "Privacy Policy"
)
def FUNC_1(self, **VAR_4):
return VAR_0
|
from os import path
from synapse.config import ConfigError
from ._base import Config
VAR_0 = """\
"""
class CLASS_0(Config):
VAR_1 = "consent"
def __init__(self, *VAR_2):
super().__init__(*VAR_2)
self.user_consent_version = None
self.user_consent_template_dir = None
self.user_consent_server_notice_content = None
self.user_consent_server_notice_to_guests = False
self.block_events_without_consent_error = None
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
def FUNC_0(self, VAR_3, **VAR_4):
VAR_5 = VAR_3.get("user_consent")
self.terms_template = self.read_template("terms.html")
if VAR_5 is None:
return
self.user_consent_version = str(VAR_5["version"])
self.user_consent_template_dir = self.abspath(VAR_5["template_dir"])
if not path.isdir(self.user_consent_template_dir):
raise ConfigError(
"Could not find template directory '%s'"
% (self.user_consent_template_dir,)
)
self.user_consent_server_notice_content = VAR_5.get(
"server_notice_content"
)
self.block_events_without_consent_error = VAR_5.get(
"block_events_error"
)
self.user_consent_server_notice_to_guests = bool(
VAR_5.get("send_server_notice_to_guests", False)
)
self.user_consent_at_registration = bool(
VAR_5.get("require_at_registration", False)
)
self.user_consent_policy_name = VAR_5.get(
"policy_name", "Privacy Policy"
)
def FUNC_1(self, **VAR_4):
return VAR_0
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
21,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
73,
74,
76,
78,
81,
89,
93,
118,
121
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
21,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
73,
74,
76,
78,
81,
89,
93,
118,
121
] |
1CWE-79
| """A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
except ImportError:
# Python 3
from urllib.parse import urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', link)
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| """A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
from urllib import unquote_plus
except ImportError:
# Python 3
from urllib.parse import urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', unquote_plus(link))
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| xss | {
"code": [
" from urllib.parse import urlsplit",
" new = _substitute_whitespace('', link)"
],
"line_no": [
13,
485
]
} | {
"code": [
" from urllib import unquote_plus",
" from urllib.parse import urlsplit, unquote_plus",
" new = _substitute_whitespace('', unquote_plus(link))"
],
"line_no": [
11,
14,
486
]
} |
import re
import copy
try:
from urlparse import .urlsplit
except ImportError:
from urllib.parse import .urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_69
except NameError:
VAR_69 = chr
try:
VAR_70
except NameError:
VAR_70 = str
try:
VAR_71
except NameError:
VAR_71 = str
try:
VAR_72
except NameError:
VAR_72 = (str, VAR_71)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_4*\(.*?\)', re.S|re.I)
VAR_1 = re.compile(
r'@\VAR_4*import', re.I)
VAR_2 = re.compile(
r'^data:image/.+;base64', re.I).search
VAR_3 = re.compile(
r'(?:VAR_33|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def FUNC_0(VAR_4):
if VAR_2(VAR_4):
return None
return VAR_3(VAR_4)
VAR_5 = re.compile(r'[\VAR_4\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_6 = re.compile(
r'\[if[\VAR_4\n\r]+.*?][\VAR_4\n\r]*>', re.I|re.S)
VAR_7 = etree.XPath(
"descendant-or-self::*[@VAR_35]")
VAR_8 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_32 = True
VAR_33 = True
VAR_34 = True
VAR_35 = False
VAR_36 = None
VAR_37 = True
VAR_38 = True
VAR_39 = True
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = None
VAR_46 = None
VAR_47 = None
VAR_48 = True
VAR_49 = True
VAR_50 = defs.safe_attrs
VAR_51 = False
VAR_52 = ()
VAR_53 = set(['iframe', 'embed'])
def __init__(self, **VAR_24):
for name, value in VAR_24.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %VAR_4=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_24:
self.inline_style = self.style
VAR_54 = dict(
script='src',
VAR_60='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_55):
if hasattr(VAR_55, 'getroot'):
VAR_55 = VAR_55.getroot()
xhtml_to_html(VAR_55)
for VAR_15 in VAR_55.iter('image'):
VAR_15.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_55)
VAR_47 = set(self.kill_tags or ())
VAR_45 = set(self.remove_tags or ())
VAR_46 = set(self.allow_tags or ())
if self.scripts:
VAR_47.add('script')
if self.safe_attrs_only:
VAR_50 = set(self.safe_attrs)
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname not in VAR_50:
del VAR_96[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname.startswith('on'):
del VAR_96[aname]
VAR_55.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_15 in VAR_7(VAR_55):
VAR_102 = VAR_15.get('style')
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_82)
if self._has_sneaky_javascript(VAR_82):
del VAR_15.attrib['style']
elif VAR_82 != VAR_102:
VAR_15.set('style', VAR_82)
if not self.style:
for VAR_15 in list(VAR_55.iter('style')):
if VAR_15.get('type', '').lower().strip() == 'text/javascript':
VAR_15.drop_tree()
continue
VAR_102 = VAR_15.text or ''
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_102)
if self._has_sneaky_javascript(VAR_82):
VAR_15.text = '/* deleted */'
elif VAR_82 != VAR_102:
VAR_15.text = VAR_82
if self.comments or self.processing_instructions:
VAR_47.add(etree.Comment)
if self.processing_instructions:
VAR_47.add(etree.ProcessingInstruction)
if self.style:
VAR_47.add('style')
if self.inline_style:
etree.strip_attributes(VAR_55, 'style')
if self.links:
VAR_47.add('link')
elif self.style or self.javascript:
for VAR_15 in list(VAR_55.iter('link')):
if 'stylesheet' in VAR_15.get('rel', '').lower():
if not self.allow_element(VAR_15):
VAR_15.drop_tree()
if self.meta:
VAR_47.add('meta')
if self.page_structure:
VAR_45.update(('head', 'html', 'title'))
if self.embedded:
for VAR_15 in list(VAR_55.iter('param')):
VAR_97 = False
VAR_98 = VAR_15.getparent()
while VAR_98 is not None and VAR_98.tag not in ('applet', 'object'):
VAR_98 = VAR_98.getparent()
if VAR_98 is None:
VAR_15.drop_tree()
VAR_47.update(('applet',))
VAR_45.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_47.update(defs.frame_tags)
if self.forms:
VAR_45.add('form')
VAR_47.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_45.update(('blink', 'marquee'))
VAR_73 = []
VAR_74 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag in VAR_47:
if self.allow_element(VAR_15):
continue
VAR_74.append(VAR_15)
elif VAR_15.tag in VAR_45:
if self.allow_element(VAR_15):
continue
VAR_73.append(VAR_15)
if VAR_73 and VAR_73[0] == VAR_55:
VAR_15 = VAR_73.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
elif VAR_74 and VAR_74[0] == VAR_55:
VAR_15 = VAR_74.pop(0)
if VAR_15.tag != 'html':
VAR_15.tag = 'div'
VAR_15.clear()
VAR_74.reverse() # VAR_90 with innermost tags
for VAR_15 in VAR_74:
VAR_15.drop_tree()
for VAR_15 in VAR_73:
VAR_15.drop_tag()
if self.remove_unknown_tags:
if VAR_46:
raise ValueError(
"It does not make sense to pass in both VAR_46 and remove_unknown_tags")
VAR_46 = set(defs.tags)
if VAR_46:
VAR_81 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag not in VAR_46:
VAR_81.append(VAR_15)
if VAR_81:
if VAR_81[0] is VAR_55:
VAR_15 = VAR_81.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
for VAR_15 in VAR_81:
VAR_15.drop_tag()
if self.add_nofollow:
for VAR_15 in VAR_8(VAR_55):
if not self.allow_follow(VAR_15):
VAR_103 = VAR_15.get('rel')
if VAR_103:
if ('nofollow' in VAR_103
and ' nofollow ' in (' %VAR_4 ' % VAR_103)):
continue
VAR_103 = '%VAR_4 nofollow' % VAR_103
else:
VAR_103 = 'nofollow'
VAR_15.set('rel', VAR_103)
def FUNC_8(self, VAR_56):
return False
def FUNC_9(self, VAR_15):
if VAR_15.tag not in self._tag_link_attrs:
return False
VAR_75 = self._tag_link_attrs[VAR_15.tag]
if isinstance(VAR_75, (list, tuple)):
for one_attr in VAR_75:
VAR_57 = VAR_15.get(one_attr)
if not VAR_57:
return False
if not self.allow_embedded_url(VAR_15, VAR_57):
return False
return True
else:
VAR_57 = VAR_15.get(VAR_75)
if not VAR_57:
return False
return self.allow_embedded_url(VAR_15, VAR_57)
def FUNC_10(self, VAR_15, VAR_57):
if (self.whitelist_tags is not None
and VAR_15.tag not in self.whitelist_tags):
return False
VAR_76, VAR_77, VAR_78, VAR_79, VAR_80 = urlsplit(VAR_57)
VAR_77 = netloc.lower().split(':', 1)[0]
if VAR_76 not in ('http', 'https'):
return False
if VAR_77 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_55):
VAR_81 = []
self._kill_elements(
VAR_55, lambda VAR_15: VAR_6.search(VAR_15.text),
etree.Comment)
def FUNC_12(self, VAR_55, VAR_58, VAR_59=None):
VAR_81 = []
for VAR_15 in VAR_55.iter(VAR_59):
if VAR_58(VAR_15):
VAR_81.append(VAR_15)
for VAR_15 in VAR_81:
VAR_15.drop_tree()
def FUNC_13(self, VAR_60):
VAR_82 = VAR_5('', VAR_60)
if FUNC_0(VAR_82):
return ''
return VAR_60
VAR_61 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_35):
VAR_35 = self._substitute_comments('', VAR_35)
VAR_35 = VAR_35.replace('\\', '')
VAR_35 = VAR_5('', VAR_35)
VAR_35 = VAR_35.lower()
if 'javascript:' in VAR_35:
return True
if 'expression(' in VAR_35:
return True
return False
def VAR_10(self, VAR_22):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
self(VAR_55)
return _transform_result(VAR_65, VAR_55)
VAR_9 = CLASS_0()
VAR_10 = VAR_9.clean_html
VAR_11 = [
re.compile(r'(?P<VAR_88>https?://(?P<VAR_101>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_88>[a-z0-9._-]+@(?P<VAR_101>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_12 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_13 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_14 = ['nolink']
def FUNC_1(VAR_15, VAR_16=VAR_11,
VAR_17=VAR_12,
VAR_18=VAR_13,
VAR_19=VAR_14):
if VAR_15.tag in VAR_17:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_62 = VAR_62.split()
for match_class in VAR_19:
if match_class in VAR_62:
return
for child in list(VAR_15):
FUNC_1(child, VAR_16=link_regexes,
VAR_17=avoid_elements,
VAR_18=avoid_hosts,
VAR_19=avoid_classes)
if child.tail:
VAR_20, VAR_92 = FUNC_2(
child.tail, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_92:
child.tail = VAR_20
VAR_99 = VAR_15.index(child)
VAR_15[VAR_99+1:VAR_99+1] = VAR_92
if VAR_15.text:
VAR_20, VAR_83 = FUNC_2(
VAR_15.text, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_83:
VAR_15.text = VAR_20
VAR_15[:0] = VAR_83
def FUNC_2(VAR_20, VAR_16, VAR_18, VAR_21):
VAR_63 = ''
VAR_37 = []
VAR_64 = 0
while 1:
VAR_84, VAR_85 = None, None
for regex in VAR_16:
VAR_93 = VAR_64
while 1:
VAR_100 = regex.search(VAR_20, pos=VAR_93)
if VAR_100 is None:
break
VAR_101 = VAR_100.group('host')
for host_regex in VAR_18:
if host_regex.search(VAR_101):
VAR_93 = VAR_100.end()
break
else:
break
if VAR_100 is None:
continue
if VAR_85 is None or VAR_100.start() < VAR_85:
VAR_84 = VAR_100
VAR_85 = VAR_100.start()
if VAR_84 is None:
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_20
else:
assert not VAR_63
leading_text = VAR_20
break
VAR_60 = VAR_84.group(0)
VAR_86 = VAR_84.end()
if VAR_60.endswith('.') or VAR_60.endswith(','):
VAR_86 -= 1
VAR_60 = link[:-1]
VAR_87 = VAR_20[:VAR_84.start()]
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_87
else:
assert not VAR_63
leading_text = VAR_87
VAR_56 = VAR_21('a')
VAR_56.set('href', VAR_60)
VAR_88 = VAR_84.group('body')
if not VAR_88:
VAR_88 = VAR_60
if VAR_88.endswith('.') or VAR_88.endswith(','):
VAR_88 = body[:-1]
VAR_56.text = VAR_88
VAR_37.append(VAR_56)
VAR_20 = text[VAR_86:]
return VAR_63, VAR_37
def FUNC_3(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
FUNC_1(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_25 = ['pre', 'textarea', 'code']
VAR_26 = ['nobreak']
def FUNC_4(VAR_15, VAR_27=40,
VAR_17=VAR_25,
VAR_19=VAR_26,
VAR_28=VAR_69(0x200b)):
if VAR_15.tag in VAR_25:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_89 = False
VAR_62 = VAR_62.split()
for avoid in VAR_19:
if avoid in VAR_62:
VAR_89 = True
break
if VAR_89:
return
if VAR_15.text:
VAR_15.text = FUNC_6(VAR_15.text, VAR_27, VAR_28)
for child in VAR_15:
FUNC_4(child, VAR_27=max_width,
VAR_17=avoid_elements,
VAR_19=avoid_classes,
VAR_28=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_27, VAR_28)
def FUNC_5(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
VAR_55 = fromstring(VAR_22)
FUNC_4(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
def FUNC_6(VAR_20, VAR_27, VAR_28):
VAR_66 = VAR_20.split()
for VAR_30 in VAR_66:
if len(VAR_30) > VAR_27:
VAR_94 = FUNC_7(VAR_30, VAR_27, VAR_28)
VAR_20 = VAR_20.replace(VAR_30, VAR_94)
return VAR_20
VAR_29 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_30, VAR_31, VAR_28):
VAR_67 = VAR_30
VAR_68 = ''
while len(VAR_30) > VAR_31:
VAR_90 = VAR_30[:VAR_31]
VAR_91 = list(VAR_29.finditer(VAR_90))
if VAR_91:
VAR_95 = VAR_91[-1]
if VAR_95.end() > VAR_31-10:
VAR_90 = VAR_30[:VAR_95.end()]
VAR_68 += VAR_90 + VAR_28
VAR_30 = word[len(VAR_90):]
VAR_68 += VAR_30
return VAR_68
|
import re
import copy
try:
from urlparse import .urlsplit
from urllib import unquote_plus
except ImportError:
from urllib.parse import .urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_69
except NameError:
VAR_69 = chr
try:
VAR_70
except NameError:
VAR_70 = str
try:
VAR_71
except NameError:
VAR_71 = str
try:
VAR_72
except NameError:
VAR_72 = (str, VAR_71)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_4*\(.*?\)', re.S|re.I)
VAR_1 = re.compile(
r'@\VAR_4*import', re.I)
VAR_2 = re.compile(
r'^data:image/.+;base64', re.I).search
VAR_3 = re.compile(
r'(?:VAR_33|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def FUNC_0(VAR_4):
if VAR_2(VAR_4):
return None
return VAR_3(VAR_4)
VAR_5 = re.compile(r'[\VAR_4\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_6 = re.compile(
r'\[if[\VAR_4\n\r]+.*?][\VAR_4\n\r]*>', re.I|re.S)
VAR_7 = etree.XPath(
"descendant-or-self::*[@VAR_35]")
VAR_8 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_32 = True
VAR_33 = True
VAR_34 = True
VAR_35 = False
VAR_36 = None
VAR_37 = True
VAR_38 = True
VAR_39 = True
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = None
VAR_46 = None
VAR_47 = None
VAR_48 = True
VAR_49 = True
VAR_50 = defs.safe_attrs
VAR_51 = False
VAR_52 = ()
VAR_53 = set(['iframe', 'embed'])
def __init__(self, **VAR_24):
for name, value in VAR_24.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %VAR_4=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_24:
self.inline_style = self.style
VAR_54 = dict(
script='src',
VAR_60='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_55):
if hasattr(VAR_55, 'getroot'):
VAR_55 = VAR_55.getroot()
xhtml_to_html(VAR_55)
for VAR_15 in VAR_55.iter('image'):
VAR_15.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_55)
VAR_47 = set(self.kill_tags or ())
VAR_45 = set(self.remove_tags or ())
VAR_46 = set(self.allow_tags or ())
if self.scripts:
VAR_47.add('script')
if self.safe_attrs_only:
VAR_50 = set(self.safe_attrs)
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname not in VAR_50:
del VAR_96[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname.startswith('on'):
del VAR_96[aname]
VAR_55.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_15 in VAR_7(VAR_55):
VAR_102 = VAR_15.get('style')
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_82)
if self._has_sneaky_javascript(VAR_82):
del VAR_15.attrib['style']
elif VAR_82 != VAR_102:
VAR_15.set('style', VAR_82)
if not self.style:
for VAR_15 in list(VAR_55.iter('style')):
if VAR_15.get('type', '').lower().strip() == 'text/javascript':
VAR_15.drop_tree()
continue
VAR_102 = VAR_15.text or ''
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_102)
if self._has_sneaky_javascript(VAR_82):
VAR_15.text = '/* deleted */'
elif VAR_82 != VAR_102:
VAR_15.text = VAR_82
if self.comments or self.processing_instructions:
VAR_47.add(etree.Comment)
if self.processing_instructions:
VAR_47.add(etree.ProcessingInstruction)
if self.style:
VAR_47.add('style')
if self.inline_style:
etree.strip_attributes(VAR_55, 'style')
if self.links:
VAR_47.add('link')
elif self.style or self.javascript:
for VAR_15 in list(VAR_55.iter('link')):
if 'stylesheet' in VAR_15.get('rel', '').lower():
if not self.allow_element(VAR_15):
VAR_15.drop_tree()
if self.meta:
VAR_47.add('meta')
if self.page_structure:
VAR_45.update(('head', 'html', 'title'))
if self.embedded:
for VAR_15 in list(VAR_55.iter('param')):
VAR_97 = False
VAR_98 = VAR_15.getparent()
while VAR_98 is not None and VAR_98.tag not in ('applet', 'object'):
VAR_98 = VAR_98.getparent()
if VAR_98 is None:
VAR_15.drop_tree()
VAR_47.update(('applet',))
VAR_45.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_47.update(defs.frame_tags)
if self.forms:
VAR_45.add('form')
VAR_47.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_45.update(('blink', 'marquee'))
VAR_73 = []
VAR_74 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag in VAR_47:
if self.allow_element(VAR_15):
continue
VAR_74.append(VAR_15)
elif VAR_15.tag in VAR_45:
if self.allow_element(VAR_15):
continue
VAR_73.append(VAR_15)
if VAR_73 and VAR_73[0] == VAR_55:
VAR_15 = VAR_73.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
elif VAR_74 and VAR_74[0] == VAR_55:
VAR_15 = VAR_74.pop(0)
if VAR_15.tag != 'html':
VAR_15.tag = 'div'
VAR_15.clear()
VAR_74.reverse() # VAR_90 with innermost tags
for VAR_15 in VAR_74:
VAR_15.drop_tree()
for VAR_15 in VAR_73:
VAR_15.drop_tag()
if self.remove_unknown_tags:
if VAR_46:
raise ValueError(
"It does not make sense to pass in both VAR_46 and remove_unknown_tags")
VAR_46 = set(defs.tags)
if VAR_46:
VAR_81 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag not in VAR_46:
VAR_81.append(VAR_15)
if VAR_81:
if VAR_81[0] is VAR_55:
VAR_15 = VAR_81.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
for VAR_15 in VAR_81:
VAR_15.drop_tag()
if self.add_nofollow:
for VAR_15 in VAR_8(VAR_55):
if not self.allow_follow(VAR_15):
VAR_103 = VAR_15.get('rel')
if VAR_103:
if ('nofollow' in VAR_103
and ' nofollow ' in (' %VAR_4 ' % VAR_103)):
continue
VAR_103 = '%VAR_4 nofollow' % VAR_103
else:
VAR_103 = 'nofollow'
VAR_15.set('rel', VAR_103)
def FUNC_8(self, VAR_56):
return False
def FUNC_9(self, VAR_15):
if VAR_15.tag not in self._tag_link_attrs:
return False
VAR_75 = self._tag_link_attrs[VAR_15.tag]
if isinstance(VAR_75, (list, tuple)):
for one_attr in VAR_75:
VAR_57 = VAR_15.get(one_attr)
if not VAR_57:
return False
if not self.allow_embedded_url(VAR_15, VAR_57):
return False
return True
else:
VAR_57 = VAR_15.get(VAR_75)
if not VAR_57:
return False
return self.allow_embedded_url(VAR_15, VAR_57)
def FUNC_10(self, VAR_15, VAR_57):
if (self.whitelist_tags is not None
and VAR_15.tag not in self.whitelist_tags):
return False
VAR_76, VAR_77, VAR_78, VAR_79, VAR_80 = urlsplit(VAR_57)
VAR_77 = netloc.lower().split(':', 1)[0]
if VAR_76 not in ('http', 'https'):
return False
if VAR_77 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_55):
VAR_81 = []
self._kill_elements(
VAR_55, lambda VAR_15: VAR_6.search(VAR_15.text),
etree.Comment)
def FUNC_12(self, VAR_55, VAR_58, VAR_59=None):
VAR_81 = []
for VAR_15 in VAR_55.iter(VAR_59):
if VAR_58(VAR_15):
VAR_81.append(VAR_15)
for VAR_15 in VAR_81:
VAR_15.drop_tree()
def FUNC_13(self, VAR_60):
VAR_82 = VAR_5('', unquote_plus(VAR_60))
if FUNC_0(VAR_82):
return ''
return VAR_60
VAR_61 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_35):
VAR_35 = self._substitute_comments('', VAR_35)
VAR_35 = VAR_35.replace('\\', '')
VAR_35 = VAR_5('', VAR_35)
VAR_35 = VAR_35.lower()
if 'javascript:' in VAR_35:
return True
if 'expression(' in VAR_35:
return True
return False
def VAR_10(self, VAR_22):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
self(VAR_55)
return _transform_result(VAR_65, VAR_55)
VAR_9 = CLASS_0()
VAR_10 = VAR_9.clean_html
VAR_11 = [
re.compile(r'(?P<VAR_88>https?://(?P<VAR_101>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_88>[a-z0-9._-]+@(?P<VAR_101>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_12 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_13 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_14 = ['nolink']
def FUNC_1(VAR_15, VAR_16=VAR_11,
VAR_17=VAR_12,
VAR_18=VAR_13,
VAR_19=VAR_14):
if VAR_15.tag in VAR_17:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_62 = VAR_62.split()
for match_class in VAR_19:
if match_class in VAR_62:
return
for child in list(VAR_15):
FUNC_1(child, VAR_16=link_regexes,
VAR_17=avoid_elements,
VAR_18=avoid_hosts,
VAR_19=avoid_classes)
if child.tail:
VAR_20, VAR_92 = FUNC_2(
child.tail, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_92:
child.tail = VAR_20
VAR_99 = VAR_15.index(child)
VAR_15[VAR_99+1:VAR_99+1] = VAR_92
if VAR_15.text:
VAR_20, VAR_83 = FUNC_2(
VAR_15.text, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_83:
VAR_15.text = VAR_20
VAR_15[:0] = VAR_83
def FUNC_2(VAR_20, VAR_16, VAR_18, VAR_21):
VAR_63 = ''
VAR_37 = []
VAR_64 = 0
while 1:
VAR_84, VAR_85 = None, None
for regex in VAR_16:
VAR_93 = VAR_64
while 1:
VAR_100 = regex.search(VAR_20, pos=VAR_93)
if VAR_100 is None:
break
VAR_101 = VAR_100.group('host')
for host_regex in VAR_18:
if host_regex.search(VAR_101):
VAR_93 = VAR_100.end()
break
else:
break
if VAR_100 is None:
continue
if VAR_85 is None or VAR_100.start() < VAR_85:
VAR_84 = VAR_100
VAR_85 = VAR_100.start()
if VAR_84 is None:
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_20
else:
assert not VAR_63
leading_text = VAR_20
break
VAR_60 = VAR_84.group(0)
VAR_86 = VAR_84.end()
if VAR_60.endswith('.') or VAR_60.endswith(','):
VAR_86 -= 1
VAR_60 = link[:-1]
VAR_87 = VAR_20[:VAR_84.start()]
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_87
else:
assert not VAR_63
leading_text = VAR_87
VAR_56 = VAR_21('a')
VAR_56.set('href', VAR_60)
VAR_88 = VAR_84.group('body')
if not VAR_88:
VAR_88 = VAR_60
if VAR_88.endswith('.') or VAR_88.endswith(','):
VAR_88 = body[:-1]
VAR_56.text = VAR_88
VAR_37.append(VAR_56)
VAR_20 = text[VAR_86:]
return VAR_63, VAR_37
def FUNC_3(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
FUNC_1(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_25 = ['pre', 'textarea', 'code']
VAR_26 = ['nobreak']
def FUNC_4(VAR_15, VAR_27=40,
VAR_17=VAR_25,
VAR_19=VAR_26,
VAR_28=VAR_69(0x200b)):
if VAR_15.tag in VAR_25:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_89 = False
VAR_62 = VAR_62.split()
for avoid in VAR_19:
if avoid in VAR_62:
VAR_89 = True
break
if VAR_89:
return
if VAR_15.text:
VAR_15.text = FUNC_6(VAR_15.text, VAR_27, VAR_28)
for child in VAR_15:
FUNC_4(child, VAR_27=max_width,
VAR_17=avoid_elements,
VAR_19=avoid_classes,
VAR_28=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_27, VAR_28)
def FUNC_5(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
VAR_55 = fromstring(VAR_22)
FUNC_4(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
def FUNC_6(VAR_20, VAR_27, VAR_28):
VAR_66 = VAR_20.split()
for VAR_30 in VAR_66:
if len(VAR_30) > VAR_27:
VAR_94 = FUNC_7(VAR_30, VAR_27, VAR_28)
VAR_20 = VAR_20.replace(VAR_30, VAR_94)
return VAR_20
VAR_29 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_30, VAR_31, VAR_28):
VAR_67 = VAR_30
VAR_68 = ''
while len(VAR_30) > VAR_31:
VAR_90 = VAR_30[:VAR_31]
VAR_91 = list(VAR_29.finditer(VAR_90))
if VAR_91:
VAR_95 = VAR_91[-1]
if VAR_95.end() > VAR_31-10:
VAR_90 = VAR_30[:VAR_95.end()]
VAR_68 += VAR_90 + VAR_28
VAR_30 = word[len(VAR_90):]
VAR_68 += VAR_30
return VAR_68
| [
2,
6,
12,
18,
22,
27,
32,
38,
39,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
66,
67,
70,
71,
72,
82,
84,
85,
86,
89,
92,
97,
98,
104,
107,
111,
114,
117,
120,
123,
126,
129,
132,
135,
138,
141,
144,
148,
152,
155,
158,
162,
166,
169,
178,
181,
183,
190,
193,
216,
225,
226,
227,
231,
232,
237,
238,
239,
240,
241,
242,
243,
244,
247,
253,
255,
257,
258,
262,
263,
265,
269,
282,
290,
291,
298,
309,
312,
317,
318,
319,
330,
331,
334,
342,
343,
344,
353,
356,
357,
358,
365,
377,
379,
380,
385,
386,
391,
397,
427,
433,
451,
463,
474,
482,
484,
487,
490,
492,
498,
513,
522,
525,
526,
527,
528,
529,
532,
535,
537,
543,
545,
552,
555,
560,
590,
616,
627,
648,
657,
659,
660,
661,
662,
663,
666,
673,
676,
681,
684,
685,
707,
713,
721,
723,
732,
734,
735,
741,
742,
1,
2,
3,
4,
5,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
550,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
249,
250,
251,
429,
430,
431,
465,
466,
467,
468,
469,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503
] | [
2,
6,
13,
19,
23,
28,
33,
39,
40,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
67,
68,
71,
72,
73,
83,
85,
86,
87,
90,
93,
98,
99,
105,
108,
112,
115,
118,
121,
124,
127,
130,
133,
136,
139,
142,
145,
149,
153,
156,
159,
163,
167,
170,
179,
182,
184,
191,
194,
217,
226,
227,
228,
232,
233,
238,
239,
240,
241,
242,
243,
244,
245,
248,
254,
256,
258,
259,
263,
264,
266,
270,
283,
291,
292,
299,
310,
313,
318,
319,
320,
331,
332,
335,
343,
344,
345,
354,
357,
358,
359,
366,
378,
380,
381,
386,
387,
392,
398,
428,
434,
452,
464,
475,
483,
485,
488,
491,
493,
499,
514,
523,
526,
527,
528,
529,
530,
533,
536,
538,
544,
546,
553,
556,
561,
591,
617,
628,
649,
658,
660,
661,
662,
663,
664,
667,
674,
677,
682,
685,
686,
708,
714,
722,
724,
733,
735,
736,
742,
743,
1,
2,
3,
4,
5,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
564,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
250,
251,
252,
430,
431,
432,
466,
467,
468,
469,
470,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504
] |
2CWE-601
| # -*- coding: utf-8 -*-
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from django.shortcuts import redirect
from spirit.core.conf import settings
def moderator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_moderator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def administrator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_administrator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def guest_only(view_func):
# TODO: test!
@wraps(view_func)
def wrapper(request, *args, **kwargs):
if request.user.is_authenticated:
return redirect(request.GET.get('next', request.user.st.get_absolute_url()))
return view_func(request, *args, **kwargs)
return wrapper
| # -*- coding: utf-8 -*-
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
def moderator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_moderator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def administrator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_administrator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def guest_only(view_func):
# TODO: test!
@wraps(view_func)
def wrapper(request, *args, **kwargs):
if request.user.is_authenticated:
return safe_redirect(request, 'next', request.user.st.get_absolute_url())
return view_func(request, *args, **kwargs)
return wrapper
| open_redirect | {
"code": [
"from django.shortcuts import redirect",
" return redirect(request.GET.get('next', request.user.st.get_absolute_url()))"
],
"line_no": [
7,
51
]
} | {
"code": [
"from spirit.core.utils.http import safe_redirect",
" return safe_redirect(request, 'next', request.user.st.get_absolute_url())"
],
"line_no": [
9,
51
]
} |
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from django.shortcuts import redirect
from spirit.core.conf import settings
def FUNC_0(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
VAR_4 = VAR_1.user
if not VAR_4.is_authenticated:
return redirect_to_login(next=VAR_1.get_full_path(),
login_url=settings.LOGIN_URL)
if not VAR_4.st.is_moderator:
raise PermissionDenied
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
def FUNC_1(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
VAR_4 = VAR_1.user
if not VAR_4.is_authenticated:
return redirect_to_login(next=VAR_1.get_full_path(),
login_url=settings.LOGIN_URL)
if not VAR_4.st.is_administrator:
raise PermissionDenied
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
def FUNC_2(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
if VAR_1.user.is_authenticated:
return redirect(VAR_1.GET.get('next', VAR_1.user.st.get_absolute_url()))
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
|
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
def FUNC_0(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
VAR_4 = VAR_1.user
if not VAR_4.is_authenticated:
return redirect_to_login(next=VAR_1.get_full_path(),
login_url=settings.LOGIN_URL)
if not VAR_4.st.is_moderator:
raise PermissionDenied
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
def FUNC_1(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
VAR_4 = VAR_1.user
if not VAR_4.is_authenticated:
return redirect_to_login(next=VAR_1.get_full_path(),
login_url=settings.LOGIN_URL)
if not VAR_4.st.is_administrator:
raise PermissionDenied
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
def FUNC_2(VAR_0):
@wraps(VAR_0)
def FUNC_3(VAR_1, *VAR_2, **VAR_3):
if VAR_1.user.is_authenticated:
return safe_redirect(VAR_1, 'next', VAR_1.user.st.get_absolute_url())
return VAR_0(VAR_1, *VAR_2, **VAR_3)
return FUNC_3
| [
1,
2,
4,
8,
10,
11,
16,
20,
23,
25,
27,
28,
33,
37,
40,
42,
44,
45,
47,
52,
54,
56
] | [
1,
2,
4,
7,
10,
11,
16,
20,
23,
25,
27,
28,
33,
37,
40,
42,
44,
45,
47,
52,
54,
56
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
import os
import datetime
from xml.sax.saxutils import quoteattr
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from opendiamond.dataretriever.util import ATTR_SUFFIX
BASEURL = 'collection'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('diamond_store', __name__)
@scope_blueprint.route('/<gididx>')
@scope_blueprint.route('/<gididx>/limit/<int:limit>')
def get_scope(gididx, limit=None):
index = 'GIDIDX' + gididx.upper()
index = _get_index_absolute_path(index)
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
num_entries = 0
with open(index, 'r') as f:
for _ in f.readlines():
num_entries += 1
if limit is not None and num_entries >= limit:
break
with open(index, 'r') as f:
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(num_entries)
count = 0
for path in f.readlines():
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
count += 1
if limit is not None and count >= limit:
break
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
with DiamondTextAttr(path, 'r') as attributes:
for key, value in attributes:
attrs[key] = value
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
if os.path.isfile(path + ATTR_SUFFIX):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
else:
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
else:
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return os.path.join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return os.path.join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
import os
import datetime
from xml.sax.saxutils import quoteattr
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import ATTR_SUFFIX
BASEURL = 'collection'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('diamond_store', __name__)
@scope_blueprint.route('/<gididx>')
@scope_blueprint.route('/<gididx>/limit/<int:limit>')
def get_scope(gididx, limit=None):
index = 'GIDIDX' + gididx.upper()
index = _get_index_absolute_path(index)
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
num_entries = 0
with open(index, 'r') as f:
for _ in f.readlines():
num_entries += 1
if limit is not None and num_entries >= limit:
break
with open(index, 'r') as f:
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(num_entries)
count = 0
for path in f.readlines():
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
count += 1
if limit is not None and count >= limit:
break
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
with DiamondTextAttr(path, 'r') as attributes:
for key, value in attributes:
attrs[key] = value
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
if os.path.isfile(path + ATTR_SUFFIX):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
else:
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
else:
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return safe_join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| path_disclosure | {
"code": [
" return os.path.join(DATAROOT, obj_path)",
" return os.path.join(INDEXDIR, index)"
],
"line_no": [
123,
127
]
} | {
"code": [
" return safe_join(DATAROOT, obj_path)"
],
"line_no": [
124
]
} |
import os
import datetime
from xml.sax.saxutils import quoteattr
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from opendiamond.dataretriever.util import ATTR_SUFFIX
VAR_0 = 'collection'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_13, otherwise http.
VAR_3 = VAR_4 = None
def FUNC_0(VAR_5):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = VAR_5.indexdir
VAR_4 = VAR_5.dataroot
VAR_6 = Blueprint('diamond_store', __name__)
@VAR_6.route('/<VAR_7>')
@VAR_6.route('/<VAR_7>/VAR_8/<int:VAR_8>')
def FUNC_1(VAR_7, VAR_8=None):
VAR_11 = 'GIDIDX' + VAR_7.upper()
VAR_11 = FUNC_7(VAR_11)
def FUNC_9():
VAR_16 = 0
with open(VAR_11, 'r') as f:
for _ in f.readlines():
VAR_16 += 1
if VAR_8 is not None and VAR_16 >= VAR_8:
break
with open(VAR_11, 'r') as f:
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist VAR_17="{:d}">\n'.format(VAR_16)
VAR_17 = 0
for VAR_13 in f.readlines():
VAR_13 = path.strip()
yield FUNC_4(VAR_9=VAR_13) + '\n'
VAR_17 += 1
if VAR_8 is not None and VAR_17 >= VAR_8:
break
yield '</objectlist>\n'
VAR_12 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_12=headers)
@VAR_6.route('/id/<VAR_13:VAR_9>')
def FUNC_2(VAR_9):
VAR_12 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_9=object_path),
"200 OK",
VAR_12=headers)
@VAR_6.route('/meta/<VAR_13:VAR_9>')
def FUNC_3(VAR_9):
VAR_13 = FUNC_6(VAR_9)
VAR_14 = dict()
try:
with DiamondTextAttr(VAR_13, 'r') as attributes:
for VAR_18, value in attributes:
VAR_14[VAR_18] = value
except IOError:
pass
return jsonify(VAR_14)
def FUNC_4(VAR_9):
VAR_13 = FUNC_6(VAR_9)
if os.path.isfile(VAR_13 + ATTR_SUFFIX):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_9=object_path)),
quoteattr(FUNC_5(VAR_9)),
quoteattr(url_for('.get_object_meta', VAR_9=object_path)))
else:
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_9=object_path)),
quoteattr(FUNC_5(VAR_9)))
def FUNC_5(VAR_9):
if VAR_2:
return 'file://' + FUNC_6(VAR_9)
else:
return url_for('.get_object_src_http', VAR_10=VAR_9)
def FUNC_6(VAR_10):
return os.path.join(VAR_4, VAR_10)
def FUNC_7(VAR_11):
return os.path.join(VAR_3, VAR_11)
@VAR_6.route('/obj/<VAR_13:VAR_10>')
def FUNC_8(VAR_10):
VAR_13 = FUNC_6(VAR_10)
VAR_12 = Headers()
VAR_15 = send_file(VAR_13,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_15.headers.extend(VAR_12)
return VAR_15
|
import os
import datetime
from xml.sax.saxutils import quoteattr
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import ATTR_SUFFIX
VAR_0 = 'collection'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_13, otherwise http.
VAR_3 = VAR_4 = None
def FUNC_0(VAR_5):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = VAR_5.indexdir
VAR_4 = VAR_5.dataroot
VAR_6 = Blueprint('diamond_store', __name__)
@VAR_6.route('/<VAR_7>')
@VAR_6.route('/<VAR_7>/VAR_8/<int:VAR_8>')
def FUNC_1(VAR_7, VAR_8=None):
VAR_11 = 'GIDIDX' + VAR_7.upper()
VAR_11 = FUNC_7(VAR_11)
def FUNC_9():
VAR_16 = 0
with open(VAR_11, 'r') as f:
for _ in f.readlines():
VAR_16 += 1
if VAR_8 is not None and VAR_16 >= VAR_8:
break
with open(VAR_11, 'r') as f:
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist VAR_17="{:d}">\n'.format(VAR_16)
VAR_17 = 0
for VAR_13 in f.readlines():
VAR_13 = path.strip()
yield FUNC_4(VAR_9=VAR_13) + '\n'
VAR_17 += 1
if VAR_8 is not None and VAR_17 >= VAR_8:
break
yield '</objectlist>\n'
VAR_12 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_9()),
status="200 OK",
VAR_12=headers)
@VAR_6.route('/id/<VAR_13:VAR_9>')
def FUNC_2(VAR_9):
VAR_12 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_9=object_path),
"200 OK",
VAR_12=headers)
@VAR_6.route('/meta/<VAR_13:VAR_9>')
def FUNC_3(VAR_9):
VAR_13 = FUNC_6(VAR_9)
VAR_14 = dict()
try:
with DiamondTextAttr(VAR_13, 'r') as attributes:
for VAR_18, value in attributes:
VAR_14[VAR_18] = value
except IOError:
pass
return jsonify(VAR_14)
def FUNC_4(VAR_9):
VAR_13 = FUNC_6(VAR_9)
if os.path.isfile(VAR_13 + ATTR_SUFFIX):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_9=object_path)),
quoteattr(FUNC_5(VAR_9)),
quoteattr(url_for('.get_object_meta', VAR_9=object_path)))
else:
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_9=object_path)),
quoteattr(FUNC_5(VAR_9)))
def FUNC_5(VAR_9):
if VAR_2:
return 'file://' + FUNC_6(VAR_9)
else:
return url_for('.get_object_src_http', VAR_10=VAR_9)
def FUNC_6(VAR_10):
return safe_join(VAR_4, VAR_10)
def FUNC_7(VAR_11):
return safe_join(VAR_3, VAR_11)
@VAR_6.route('/obj/<VAR_13:VAR_10>')
def FUNC_8(VAR_10):
VAR_13 = FUNC_6(VAR_10)
VAR_12 = Headers()
VAR_15 = send_file(VAR_13,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_15.headers.extend(VAR_12)
return VAR_15
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
16,
20,
22,
27,
28,
33,
34,
36,
37,
43,
44,
45,
53,
58,
60,
68,
70,
72,
76,
77,
84,
85,
90,
97,
99,
100,
103,
113,
114,
120,
121,
124,
125,
128,
129,
133,
135,
136,
144
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
16,
21,
23,
28,
29,
34,
35,
37,
38,
44,
45,
46,
54,
59,
61,
69,
71,
73,
77,
78,
85,
86,
91,
98,
100,
101,
104,
114,
115,
121,
122,
125,
126,
129,
130,
134,
136,
137,
145
] |
1CWE-79
| """Template filters for Fava.
All functions in this module will be automatically added as template filters.
"""
from __future__ import annotations
import datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
import flask
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
MappingValue = TypeVar("MappingValue")
def remove_keys(
_dict: MutableMapping[str, MappingValue] | None, keys: list[str]
) -> MutableMapping[str, MappingValue]:
"""Remove keys from a dictionary."""
if not _dict:
return {}
new = dict(_dict)
for key in keys:
try:
del new[key]
except KeyError:
pass
return new
def cost_or_value(
inventory: Inventory, date: datetime.date | None = None
) -> Any:
"""Get the cost or value of an inventory."""
return cost_or_value_without_context(
inventory, g.conversion, g.ledger.price_map, date
)
def format_currency(
value: Decimal,
currency: str | None = None,
show_if_zero: bool = False,
invert: bool = False,
) -> str:
"""Format a value using the derived precision for a specified currency."""
if not value and not show_if_zero:
return ""
if value == ZERO:
return g.ledger.format_decimal(ZERO, currency)
if invert:
value = -value
return g.ledger.format_decimal(value, currency)
def format_date(date: datetime.date) -> str:
"""Format a date according to the current interval."""
if g.interval is Interval.YEAR:
return date.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{date.year}Q{(date.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return date.strftime("%YW%W")
if g.interval is Interval.DAY:
return date.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return date.strftime("%b %Y")
def hash_entry(entry: Directive) -> str:
"""Hash an entry."""
return compare.hash_entry(entry)
def balance_children(account: realization.RealAccount) -> Inventory:
"""Compute the total balance of an account."""
return realization.compute_balance(account)
def get_or_create(
account: realization.RealAccount, account_name: str
) -> realization.RealAccount:
"""Get or create a child account."""
if account.account == account_name:
return account
return realization.get_or_create(account, account_name)
FLAGS_TO_TYPES = {"*": "cleared", "!": "pending"}
def flag_to_type(flag: str) -> str:
"""Names for entry flags."""
return FLAGS_TO_TYPES.get(flag, "other")
def should_show(account: TreeNode) -> bool:
"""Determine whether the account should be shown."""
if not account.balance_children.is_empty() or any(
should_show(a) for a in account.children
):
return True
ledger = g.ledger
filtered = g.filtered
if account.name not in ledger.accounts:
return False
fava_options = ledger.fava_options
if not fava_options.show_closed_accounts and filtered.account_is_closed(
account.name
):
return False
if (
not fava_options.show_accounts_with_zero_balance
and account.balance.is_empty()
):
return False
if (
not fava_options.show_accounts_with_zero_transactions
and not account.has_txns
):
return False
return True
def basename(file_path: str) -> str:
"""Return the basename of a filepath."""
return unicodedata.normalize("NFC", os.path.basename(file_path))
def format_errormsg(message: str) -> str:
"""Match account names in error messages and insert HTML links for them."""
match = re.search(ACCOUNT_RE, message)
if not match:
return message
account = match.group()
url = flask.url_for("account", name=account)
return (
message.replace(account, f'<a href="{url}">{account}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def collapse_account(account_name: str) -> bool:
"""Return true if account should be collapsed."""
collapse_patterns = g.ledger.fava_options.collapse_pattern
return any(pattern.match(account_name) for pattern in collapse_patterns)
FILTERS = [
balance_children,
basename,
collapse_account,
cost,
cost_or_value,
cost_or_value,
flag_to_type,
format_currency,
format_date,
format_errormsg,
get_or_create,
hash_entry,
remove_keys,
should_show,
units,
]
| """Template filters for Fava.
All functions in this module will be automatically added as template filters.
"""
from __future__ import annotations
import datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from flask import url_for
from markupsafe import Markup
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
MappingValue = TypeVar("MappingValue")
def remove_keys(
_dict: MutableMapping[str, MappingValue] | None, keys: list[str]
) -> MutableMapping[str, MappingValue]:
"""Remove keys from a dictionary."""
if not _dict:
return {}
new = dict(_dict)
for key in keys:
try:
del new[key]
except KeyError:
pass
return new
def cost_or_value(
inventory: Inventory, date: datetime.date | None = None
) -> Any:
"""Get the cost or value of an inventory."""
return cost_or_value_without_context(
inventory, g.conversion, g.ledger.price_map, date
)
def format_currency(
value: Decimal,
currency: str | None = None,
show_if_zero: bool = False,
invert: bool = False,
) -> str:
"""Format a value using the derived precision for a specified currency."""
if not value and not show_if_zero:
return ""
if value == ZERO:
return g.ledger.format_decimal(ZERO, currency)
if invert:
value = -value
return g.ledger.format_decimal(value, currency)
def format_date(date: datetime.date) -> str:
"""Format a date according to the current interval."""
if g.interval is Interval.YEAR:
return date.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{date.year}Q{(date.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return date.strftime("%YW%W")
if g.interval is Interval.DAY:
return date.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return date.strftime("%b %Y")
def hash_entry(entry: Directive) -> str:
"""Hash an entry."""
return compare.hash_entry(entry)
def balance_children(account: realization.RealAccount) -> Inventory:
"""Compute the total balance of an account."""
return realization.compute_balance(account)
def get_or_create(
account: realization.RealAccount, account_name: str
) -> realization.RealAccount:
"""Get or create a child account."""
if account.account == account_name:
return account
return realization.get_or_create(account, account_name)
FLAGS_TO_TYPES = {"*": "cleared", "!": "pending"}
def flag_to_type(flag: str) -> str:
"""Names for entry flags."""
return FLAGS_TO_TYPES.get(flag, "other")
def should_show(account: TreeNode) -> bool:
"""Determine whether the account should be shown."""
if not account.balance_children.is_empty() or any(
should_show(a) for a in account.children
):
return True
ledger = g.ledger
filtered = g.filtered
if account.name not in ledger.accounts:
return False
fava_options = ledger.fava_options
if not fava_options.show_closed_accounts and filtered.account_is_closed(
account.name
):
return False
if (
not fava_options.show_accounts_with_zero_balance
and account.balance.is_empty()
):
return False
if (
not fava_options.show_accounts_with_zero_transactions
and not account.has_txns
):
return False
return True
def basename(file_path: str) -> str:
"""Return the basename of a filepath."""
return unicodedata.normalize("NFC", os.path.basename(file_path))
def format_errormsg(message: str) -> Markup:
"""Match account names in error messages and insert HTML links for them."""
match = re.search(ACCOUNT_RE, message)
if not match:
return Markup(message)
account = match.group()
url = url_for("account", name=account)
return Markup(
message.replace(account, f'<a href="{url}">{account}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def collapse_account(account_name: str) -> bool:
"""Return true if account should be collapsed."""
collapse_patterns = g.ledger.fava_options.collapse_pattern
return any(pattern.match(account_name) for pattern in collapse_patterns)
FILTERS = [
balance_children,
basename,
collapse_account,
cost,
cost_or_value,
cost_or_value,
flag_to_type,
format_currency,
format_date,
format_errormsg,
get_or_create,
hash_entry,
remove_keys,
should_show,
units,
]
| xss | {
"code": [
"import flask",
"def format_errormsg(message: str) -> str:",
" return message",
" url = flask.url_for(\"account\", name=account)",
" return ("
],
"line_no": [
15,
148,
152,
154,
155
]
} | {
"code": [
"from flask import url_for",
"def format_errormsg(message: str) -> Markup:",
" return Markup(message)",
" url = url_for(\"account\", name=account)",
" return Markup("
],
"line_no": [
22,
149,
153,
155,
156
]
} |
from __future__ import annotations
import .datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
import flask
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import .cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
VAR_0 = TypeVar("MappingValue")
def FUNC_0(
VAR_1: MutableMapping[str, VAR_0] | None, VAR_2: list[str]
) -> MutableMapping[str, VAR_0]:
if not VAR_1:
return {}
VAR_17 = dict(VAR_1)
for key in VAR_2:
try:
del VAR_17[key]
except KeyError:
pass
return VAR_17
def FUNC_1(
VAR_3: Inventory, VAR_4: datetime.date | None = None
) -> Any:
return cost_or_value_without_context(
VAR_3, g.conversion, g.ledger.price_map, VAR_4
)
def FUNC_2(
VAR_5: Decimal,
VAR_6: str | None = None,
VAR_7: bool = False,
VAR_8: bool = False,
) -> str:
if not VAR_5 and not VAR_7:
return ""
if VAR_5 == ZERO:
return g.ledger.format_decimal(ZERO, VAR_6)
if VAR_8:
VAR_5 = -value
return g.ledger.format_decimal(VAR_5, VAR_6)
def FUNC_3(VAR_4: datetime.date) -> str:
if g.interval is Interval.YEAR:
return VAR_4.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{VAR_4.year}Q{(VAR_4.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return VAR_4.strftime("%YW%W")
if g.interval is Interval.DAY:
return VAR_4.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return VAR_4.strftime("%b %Y")
def FUNC_4(VAR_9: Directive) -> str:
return compare.hash_entry(VAR_9)
def FUNC_5(VAR_10: realization.RealAccount) -> Inventory:
return realization.compute_balance(VAR_10)
def FUNC_6(
VAR_10: realization.RealAccount, VAR_11: str
) -> realization.RealAccount:
if VAR_10.account == VAR_11:
return VAR_10
return realization.get_or_create(VAR_10, VAR_11)
VAR_12 = {"*": "cleared", "!": "pending"}
def FUNC_7(VAR_13: str) -> str:
return VAR_12.get(VAR_13, "other")
def FUNC_8(VAR_10: TreeNode) -> bool:
if not VAR_10.balance_children.is_empty() or any(
FUNC_8(a) for a in VAR_10.children
):
return True
VAR_18 = g.ledger
VAR_19 = g.filtered
if VAR_10.name not in VAR_18.accounts:
return False
VAR_20 = VAR_18.fava_options
if not VAR_20.show_closed_accounts and VAR_19.account_is_closed(
VAR_10.name
):
return False
if (
not VAR_20.show_accounts_with_zero_balance
and VAR_10.balance.is_empty()
):
return False
if (
not VAR_20.show_accounts_with_zero_transactions
and not VAR_10.has_txns
):
return False
return True
def FUNC_9(VAR_14: str) -> str:
return unicodedata.normalize("NFC", os.path.basename(VAR_14))
def FUNC_10(VAR_15: str) -> str:
VAR_21 = re.search(ACCOUNT_RE, VAR_15)
if not VAR_21:
return VAR_15
VAR_10 = VAR_21.group()
VAR_22 = flask.url_for("account", name=VAR_10)
return (
VAR_15.replace(VAR_10, f'<a href="{VAR_22}">{VAR_10}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def FUNC_11(VAR_11: str) -> bool:
VAR_23 = g.ledger.fava_options.collapse_pattern
return any(pattern.match(VAR_11) for pattern in VAR_23)
VAR_16 = [
FUNC_5,
FUNC_9,
FUNC_11,
cost,
FUNC_1,
cost_or_value,
FUNC_7,
FUNC_2,
FUNC_3,
FUNC_10,
FUNC_6,
FUNC_4,
FUNC_0,
FUNC_8,
units,
]
|
from __future__ import annotations
import .datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from flask import .url_for
from markupsafe import Markup
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import .cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
VAR_0 = TypeVar("MappingValue")
def FUNC_0(
VAR_1: MutableMapping[str, VAR_0] | None, VAR_2: list[str]
) -> MutableMapping[str, VAR_0]:
if not VAR_1:
return {}
VAR_17 = dict(VAR_1)
for key in VAR_2:
try:
del VAR_17[key]
except KeyError:
pass
return VAR_17
def FUNC_1(
VAR_3: Inventory, VAR_4: datetime.date | None = None
) -> Any:
return cost_or_value_without_context(
VAR_3, g.conversion, g.ledger.price_map, VAR_4
)
def FUNC_2(
VAR_5: Decimal,
VAR_6: str | None = None,
VAR_7: bool = False,
VAR_8: bool = False,
) -> str:
if not VAR_5 and not VAR_7:
return ""
if VAR_5 == ZERO:
return g.ledger.format_decimal(ZERO, VAR_6)
if VAR_8:
VAR_5 = -value
return g.ledger.format_decimal(VAR_5, VAR_6)
def FUNC_3(VAR_4: datetime.date) -> str:
if g.interval is Interval.YEAR:
return VAR_4.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{VAR_4.year}Q{(VAR_4.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return VAR_4.strftime("%YW%W")
if g.interval is Interval.DAY:
return VAR_4.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return VAR_4.strftime("%b %Y")
def FUNC_4(VAR_9: Directive) -> str:
return compare.hash_entry(VAR_9)
def FUNC_5(VAR_10: realization.RealAccount) -> Inventory:
return realization.compute_balance(VAR_10)
def FUNC_6(
VAR_10: realization.RealAccount, VAR_11: str
) -> realization.RealAccount:
if VAR_10.account == VAR_11:
return VAR_10
return realization.get_or_create(VAR_10, VAR_11)
VAR_12 = {"*": "cleared", "!": "pending"}
def FUNC_7(VAR_13: str) -> str:
return VAR_12.get(VAR_13, "other")
def FUNC_8(VAR_10: TreeNode) -> bool:
if not VAR_10.balance_children.is_empty() or any(
FUNC_8(a) for a in VAR_10.children
):
return True
VAR_18 = g.ledger
VAR_19 = g.filtered
if VAR_10.name not in VAR_18.accounts:
return False
VAR_20 = VAR_18.fava_options
if not VAR_20.show_closed_accounts and VAR_19.account_is_closed(
VAR_10.name
):
return False
if (
not VAR_20.show_accounts_with_zero_balance
and VAR_10.balance.is_empty()
):
return False
if (
not VAR_20.show_accounts_with_zero_transactions
and not VAR_10.has_txns
):
return False
return True
def FUNC_9(VAR_14: str) -> str:
return unicodedata.normalize("NFC", os.path.basename(VAR_14))
def FUNC_10(VAR_15: str) -> Markup:
VAR_21 = re.search(ACCOUNT_RE, VAR_15)
if not VAR_21:
return Markup(VAR_15)
VAR_10 = VAR_21.group()
VAR_22 = url_for("account", name=VAR_10)
return Markup(
VAR_15.replace(VAR_10, f'<a href="{VAR_22}">{VAR_10}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def FUNC_11(VAR_11: str) -> bool:
VAR_23 = g.ledger.fava_options.collapse_pattern
return any(pattern.match(VAR_11) for pattern in VAR_23)
VAR_16 = [
FUNC_5,
FUNC_9,
FUNC_11,
cost,
FUNC_1,
cost_or_value,
FUNC_7,
FUNC_2,
FUNC_3,
FUNC_10,
FUNC_6,
FUNC_4,
FUNC_0,
FUNC_8,
units,
]
| [
2,
6,
14,
23,
30,
32,
33,
47,
48,
56,
57,
72,
73,
86,
87,
91,
92,
96,
97,
105,
106,
108,
109,
113,
114,
141,
142,
146,
147,
160,
161,
166,
167,
185,
1,
2,
3,
4,
37,
52,
64,
75,
89,
94,
101,
111,
116,
144,
149,
163
] | [
2,
6,
14,
24,
31,
33,
34,
48,
49,
57,
58,
73,
74,
87,
88,
92,
93,
97,
98,
106,
107,
109,
110,
114,
115,
142,
143,
147,
148,
161,
162,
167,
168,
186,
1,
2,
3,
4,
38,
53,
65,
76,
90,
95,
102,
112,
117,
145,
150,
164
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, room
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import event_injection
from tests.utils import TestHomeServer
class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
resource_for_federation=Mock(), http_client=None
)
return hs
def prepare(self, reactor, clock, hs: TestHomeServer):
# We can't test the RoomMemberStore on its own without the other event
# storage logic
self.store = hs.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
# User elsewhere on another host
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def test_one_member(self):
# Alice creates the room, and is automatically joined
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
rooms_for_user = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in rooms_for_user])
def test_count_known_servers(self):
"""
_count_known_servers will calculate how many servers are in a room.
"""
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
servers = self.get_success(self.store._count_known_servers())
self.assertEqual(servers, 2)
def test_count_known_servers_stat_counter_disabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def test_count_known_servers_stat_counter_enabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
# Initialises to 1 -- itself
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
# No rooms have been joined, so technically the SQL returns 0, but it
# will still say it knows about itself.
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
# It now knows about Charlie's server.
self.assertEqual(self.store._known_servers_count, 2)
def test_get_joined_users_from_context(self):
room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
bob_event = self.get_success(
event_injection.inject_member_event(
self.hs, room, self.u_bob, Membership.JOIN
)
)
# first, create a regular event
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[bob_event.event_id],
type="m.test.1",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
# Regression test for #7376: create a state event whose key matches bob's
# user_id, but which is *not* a membership event, and persist that; then check
# that `get_joined_users_from_context` returns the correct users for the next event.
non_member_event = self.get_success(
event_injection.inject_event(
self.hs,
room_id=room,
sender=self.u_bob,
prev_event_ids=[bob_event.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[non_member_event.event_id],
type="m.test.3",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
class CurrentStateMembershipUpdateTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, homeserver):
self.store = homeserver.get_datastore()
self.room_creator = homeserver.get_room_creation_handler()
def test_can_rerun_update(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
# Now let's create a room, which will insert a membership
user = UserID("alice", "test")
requester = create_requester(user)
self.get_success(self.room_creator.create_room(requester, {}))
# Register the background update to run again.
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
# ... and tell the DataStore that it hasn't finished all updates yet
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, room
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import event_injection
from tests.utils import TestHomeServer
class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
resource_for_federation=Mock(), federation_http_client=None
)
return hs
def prepare(self, reactor, clock, hs: TestHomeServer):
# We can't test the RoomMemberStore on its own without the other event
# storage logic
self.store = hs.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
# User elsewhere on another host
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def test_one_member(self):
# Alice creates the room, and is automatically joined
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
rooms_for_user = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in rooms_for_user])
def test_count_known_servers(self):
"""
_count_known_servers will calculate how many servers are in a room.
"""
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
servers = self.get_success(self.store._count_known_servers())
self.assertEqual(servers, 2)
def test_count_known_servers_stat_counter_disabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def test_count_known_servers_stat_counter_enabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
# Initialises to 1 -- itself
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
# No rooms have been joined, so technically the SQL returns 0, but it
# will still say it knows about itself.
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
# It now knows about Charlie's server.
self.assertEqual(self.store._known_servers_count, 2)
def test_get_joined_users_from_context(self):
room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
bob_event = self.get_success(
event_injection.inject_member_event(
self.hs, room, self.u_bob, Membership.JOIN
)
)
# first, create a regular event
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[bob_event.event_id],
type="m.test.1",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
# Regression test for #7376: create a state event whose key matches bob's
# user_id, but which is *not* a membership event, and persist that; then check
# that `get_joined_users_from_context` returns the correct users for the next event.
non_member_event = self.get_success(
event_injection.inject_event(
self.hs,
room_id=room,
sender=self.u_bob,
prev_event_ids=[bob_event.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[non_member_event.event_id],
type="m.test.3",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
class CurrentStateMembershipUpdateTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, homeserver):
self.store = homeserver.get_datastore()
self.room_creator = homeserver.get_room_creation_handler()
def test_can_rerun_update(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
# Now let's create a room, which will insert a membership
user = UserID("alice", "test")
requester = create_requester(user)
self.get_success(self.room_creator.create_room(requester, {}))
# Register the background update to run again.
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
# ... and tell the DataStore that it hasn't finished all updates yet
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| open_redirect | {
"code": [
" resource_for_federation=Mock(), http_client=None"
],
"line_no": [
39
]
} | {
"code": [
" resource_for_federation=Mock(), federation_http_client=None"
],
"line_no": [
39
]
} |
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, VAR_7
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import .event_injection
from tests.utils import TestHomeServer
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = [
login.register_servlets,
register_servlets_for_client_rest_resource,
VAR_7.register_servlets,
]
def FUNC_0(self, VAR_1, VAR_2):
VAR_3 = self.setup_test_homeserver(
resource_for_federation=Mock(), http_client=None
)
return VAR_3
def FUNC_1(self, VAR_1, VAR_2, VAR_3: TestHomeServer):
self.store = VAR_3.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def FUNC_2(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_5 = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in VAR_5])
def FUNC_3(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
VAR_6 = self.get_success(self.store._count_known_servers())
self.assertEqual(VAR_6, 2)
def FUNC_4(self):
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def FUNC_5(self):
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
self.assertEqual(self.store._known_servers_count, 2)
def FUNC_6(self):
VAR_7 = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_8 = self.get_success(
event_injection.inject_member_event(
self.hs, VAR_7, self.u_bob, Membership.JOIN
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_8.event_id],
type="m.test.1",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
VAR_12 = self.get_success(
event_injection.inject_event(
self.hs,
room_id=VAR_7,
sender=self.u_bob,
prev_event_ids=[VAR_8.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_12.event_id],
type="m.test.3",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_1(self, VAR_1, VAR_2, VAR_4):
self.store = VAR_4.get_datastore()
self.room_creator = VAR_4.get_room_creation_handler()
def FUNC_7(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
VAR_13 = UserID("alice", "test")
VAR_14 = create_requester(VAR_13)
self.get_success(self.room_creator.create_room(VAR_14, {}))
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
self.store.db_pool.updates._all_done = False
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
|
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, VAR_7
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import .event_injection
from tests.utils import TestHomeServer
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = [
login.register_servlets,
register_servlets_for_client_rest_resource,
VAR_7.register_servlets,
]
def FUNC_0(self, VAR_1, VAR_2):
VAR_3 = self.setup_test_homeserver(
resource_for_federation=Mock(), federation_http_client=None
)
return VAR_3
def FUNC_1(self, VAR_1, VAR_2, VAR_3: TestHomeServer):
self.store = VAR_3.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def FUNC_2(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_5 = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in VAR_5])
def FUNC_3(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
VAR_6 = self.get_success(self.store._count_known_servers())
self.assertEqual(VAR_6, 2)
def FUNC_4(self):
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def FUNC_5(self):
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
self.assertEqual(self.store._known_servers_count, 2)
def FUNC_6(self):
VAR_7 = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_8 = self.get_success(
event_injection.inject_member_event(
self.hs, VAR_7, self.u_bob, Membership.JOIN
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_8.event_id],
type="m.test.1",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
VAR_12 = self.get_success(
event_injection.inject_event(
self.hs,
room_id=VAR_7,
sender=self.u_bob,
prev_event_ids=[VAR_8.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_12.event_id],
type="m.test.3",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_1(self, VAR_1, VAR_2, VAR_4):
self.store = VAR_4.get_datastore()
self.room_creator = VAR_4.get_room_creation_handler()
def FUNC_7(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
VAR_13 = UserID("alice", "test")
VAR_14 = create_requester(VAR_13)
self.get_success(self.room_creator.create_room(VAR_14, {}))
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
self.store.db_pool.updates._all_done = False
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
23,
27,
28,
30,
36,
42,
44,
45,
46,
48,
52,
53,
55,
57,
58,
60,
66,
68,
76,
79,
85,
89,
91,
93,
101,
103,
105,
106,
107,
109,
113,
115,
116,
118,
126,
127,
138,
143,
144,
145,
146,
172,
173,
178,
180,
187,
188,
192,
193,
204,
205,
207,
208,
215,
70,
71,
72,
81,
82,
83,
98,
99,
100
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
23,
27,
28,
30,
36,
42,
44,
45,
46,
48,
52,
53,
55,
57,
58,
60,
66,
68,
76,
79,
85,
89,
91,
93,
101,
103,
105,
106,
107,
109,
113,
115,
116,
118,
126,
127,
138,
143,
144,
145,
146,
172,
173,
178,
180,
187,
188,
192,
193,
204,
205,
207,
208,
215,
70,
71,
72,
81,
82,
83,
98,
99,
100
] |
0CWE-22
| ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import unittest
from chameleon.exc import ExpressionError
import zope.component.testing
from AccessControl import SecurityManager
from AccessControl.SecurityManagement import noSecurityManager
from Acquisition import Implicit
from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver
from Products.PageTemplates.PageTemplate import PageTemplate
from Products.PageTemplates.tests import util
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
PreferredCharsetResolver
from zExceptions import NotFound
from zope.component import provideUtility
from zope.traversing.adapters import DefaultTraversable
from .util import useChameleonEngine
class AqPageTemplate(Implicit, PageTemplate):
pass
class Folder(util.Base):
pass
class UnitTestSecurityPolicy:
"""
Stub out the existing security policy for unit testing purposes.
"""
# Standard SecurityPolicy interface
def validate(self,
accessed=None,
container=None,
name=None,
value=None,
context=None,
roles=None,
*args, **kw):
return 1
def checkPermission(self, permission, object, context):
return 1
class HTMLTests(zope.component.testing.PlacelessSetup, unittest.TestCase):
PREFIX = None
def setUp(self):
super().setUp()
useChameleonEngine()
zope.component.provideAdapter(DefaultTraversable, (None,))
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.folder = f = Folder()
f.laf = AqPageTemplate()
f.t = AqPageTemplate()
self.policy = UnitTestSecurityPolicy()
self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)
noSecurityManager() # Use the new policy.
def tearDown(self):
super().tearDown()
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager() # Reset to old policy.
def assert_expected(self, t, fname, *args, **kwargs):
t.write(util.read_input(fname))
assert not t._v_errors, 'Template errors: %s' % t._v_errors
if self.PREFIX is not None \
and util.exists_output(self.PREFIX + fname):
fname = self.PREFIX + fname
expect = util.read_output(fname)
out = t(*args, **kwargs)
util.check_html(expect, out)
def assert_expected_unicode(self, t, fname, *args, **kwargs):
t.write(util.read_input(fname))
assert not t._v_errors, 'Template errors: %s' % t._v_errors
expect = util.read_output(fname)
if not isinstance(expect, str):
expect = str(expect, 'utf-8')
out = t(*args, **kwargs)
util.check_html(expect, out)
def getProducts(self):
return [
{'description': 'This is the tee for those who LOVE Zope. '
'Show your heart on your tee.',
'price': 12.99, 'image': 'smlatee.jpg'
},
{'description': 'This is the tee for Jim Fulton. '
'He\'s the Zope Pope!',
'price': 11.99, 'image': 'smpztee.jpg'
},
]
def test_1(self):
self.assert_expected(self.folder.laf, 'TeeShopLAF.html')
def test_2(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop2.html',
getProducts=self.getProducts)
def test_3(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop1.html',
getProducts=self.getProducts)
def testSimpleLoop(self):
self.assert_expected(self.folder.t, 'Loop1.html')
def testFancyLoop(self):
self.assert_expected(self.folder.t, 'Loop2.html')
def testGlobalsShadowLocals(self):
self.assert_expected(self.folder.t, 'GlobalsShadowLocals.html')
def testStringExpressions(self):
self.assert_expected(self.folder.t, 'StringExpression.html')
def testReplaceWithNothing(self):
self.assert_expected(self.folder.t, 'CheckNothing.html')
def testWithXMLHeader(self):
self.assert_expected(self.folder.t, 'CheckWithXMLHeader.html')
def testNotExpression(self):
self.assert_expected(self.folder.t, 'CheckNotExpression.html')
def testPathNothing(self):
self.assert_expected(self.folder.t, 'CheckPathNothing.html')
def testPathAlt(self):
self.assert_expected(self.folder.t, 'CheckPathAlt.html')
def testPathTraverse(self):
# need to perform this test with a "real" folder
from OFS.Folder import Folder
f = self.folder
self.folder = Folder()
self.folder.t, self.folder.laf = f.t, f.laf
self.folder.laf.write('ok')
self.assert_expected(self.folder.t, 'CheckPathTraverse.html')
def testBatchIteration(self):
self.assert_expected(self.folder.t, 'CheckBatchIteration.html')
def testUnicodeInserts(self):
self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')
def testI18nTranslate(self):
self.assert_expected(self.folder.t, 'CheckI18nTranslate.html')
def testImportOldStyleClass(self):
self.assert_expected(self.folder.t, 'CheckImportOldStyleClass.html')
def testRepeatVariable(self):
self.assert_expected(self.folder.t, 'RepeatVariable.html')
def testBooleanAttributes(self):
# Test rendering an attribute that should be empty or left out
# if the value is non-True
self.assert_expected(self.folder.t, 'BooleanAttributes.html')
def testBooleanAttributesAndDefault(self):
# Zope 2.9 and below support the semantics that an HTML
# "boolean" attribute (e.g. 'selected', 'disabled', etc.) can
# be used together with 'default'.
self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')
def testInterpolationInContent(self):
# the chameleon template engine supports ``${path}``
# interpolations not only as part of ``string`` expressions
# but globally
self.assert_expected(self.folder.t, 'InterpolationInContent.html')
def testBadExpression(self):
t = self.folder.t
t.write("<p tal:define='p a//b' />")
with self.assertRaises(ExpressionError):
t()
def testPathAlternativesWithSpaces(self):
self.assert_expected(self.folder.t, 'PathAlternativesWithSpaces.html')
def testDefaultKeywordHandling(self):
self.assert_expected(self.folder.t, 'Default.html')
def testSwitch(self):
self.assert_expected(self.folder.t, 'switch.html')
def test_unicode_conflict_resolution(self):
# override with the more "demanding" resolver
provideUtility(PreferredCharsetResolver)
t = PageTemplate()
self.assert_expected(t, 'UnicodeResolution.html')
def test_underscore_traversal(self):
t = self.folder.t
t.write('<p tal:define="p context/__class__" />')
with self.assertRaises(NotFound):
t()
t.write('<p tal:define="p nocall: random/_itertools/repeat"/>')
with self.assertRaises(NotFound):
t()
t.write('<p tal:content="random/_itertools/repeat/foobar"/>')
with self.assertRaises(NotFound):
t()
| ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import unittest
from chameleon.exc import ExpressionError
import zope.component.testing
from AccessControl import SecurityManager
from AccessControl.SecurityManagement import noSecurityManager
from Acquisition import Implicit
from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver
from Products.PageTemplates.PageTemplate import PageTemplate
from Products.PageTemplates.tests import util
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
PreferredCharsetResolver
from Products.PageTemplates.ZopePageTemplate import ZopePageTemplate
from zExceptions import NotFound
from zope.component import provideUtility
from zope.location.interfaces import LocationError
from zope.traversing.adapters import DefaultTraversable
from .util import useChameleonEngine
class AqPageTemplate(Implicit, PageTemplate):
pass
class AqZopePageTemplate(Implicit, ZopePageTemplate):
pass
class Folder(util.Base):
pass
class UnitTestSecurityPolicy:
"""
Stub out the existing security policy for unit testing purposes.
"""
# Standard SecurityPolicy interface
def validate(self,
accessed=None,
container=None,
name=None,
value=None,
context=None,
roles=None,
*args, **kw):
return 1
def checkPermission(self, permission, object, context):
return 1
class HTMLTests(zope.component.testing.PlacelessSetup, unittest.TestCase):
PREFIX = None
def setUp(self):
super().setUp()
useChameleonEngine()
zope.component.provideAdapter(DefaultTraversable, (None,))
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.folder = f = Folder()
f.laf = AqPageTemplate()
f.t = AqPageTemplate()
f.z = AqZopePageTemplate('testing')
self.policy = UnitTestSecurityPolicy()
self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)
noSecurityManager() # Use the new policy.
def tearDown(self):
super().tearDown()
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager() # Reset to old policy.
def assert_expected(self, t, fname, *args, **kwargs):
t.write(util.read_input(fname))
assert not t._v_errors, 'Template errors: %s' % t._v_errors
if self.PREFIX is not None \
and util.exists_output(self.PREFIX + fname):
fname = self.PREFIX + fname
expect = util.read_output(fname)
out = t(*args, **kwargs)
util.check_html(expect, out)
def assert_expected_unicode(self, t, fname, *args, **kwargs):
t.write(util.read_input(fname))
assert not t._v_errors, 'Template errors: %s' % t._v_errors
expect = util.read_output(fname)
if not isinstance(expect, str):
expect = str(expect, 'utf-8')
out = t(*args, **kwargs)
util.check_html(expect, out)
def getProducts(self):
return [
{'description': 'This is the tee for those who LOVE Zope. '
'Show your heart on your tee.',
'price': 12.99, 'image': 'smlatee.jpg'
},
{'description': 'This is the tee for Jim Fulton. '
'He\'s the Zope Pope!',
'price': 11.99, 'image': 'smpztee.jpg'
},
]
def test_1(self):
self.assert_expected(self.folder.laf, 'TeeShopLAF.html')
def test_2(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop2.html',
getProducts=self.getProducts)
def test_3(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop1.html',
getProducts=self.getProducts)
def testSimpleLoop(self):
self.assert_expected(self.folder.t, 'Loop1.html')
def testFancyLoop(self):
self.assert_expected(self.folder.t, 'Loop2.html')
def testGlobalsShadowLocals(self):
self.assert_expected(self.folder.t, 'GlobalsShadowLocals.html')
def testStringExpressions(self):
self.assert_expected(self.folder.t, 'StringExpression.html')
def testReplaceWithNothing(self):
self.assert_expected(self.folder.t, 'CheckNothing.html')
def testWithXMLHeader(self):
self.assert_expected(self.folder.t, 'CheckWithXMLHeader.html')
def testNotExpression(self):
self.assert_expected(self.folder.t, 'CheckNotExpression.html')
def testPathNothing(self):
self.assert_expected(self.folder.t, 'CheckPathNothing.html')
def testPathAlt(self):
self.assert_expected(self.folder.t, 'CheckPathAlt.html')
def testPathTraverse(self):
# need to perform this test with a "real" folder
from OFS.Folder import Folder
f = self.folder
self.folder = Folder()
self.folder.t, self.folder.laf = f.t, f.laf
self.folder.laf.write('ok')
self.assert_expected(self.folder.t, 'CheckPathTraverse.html')
def testBatchIteration(self):
self.assert_expected(self.folder.t, 'CheckBatchIteration.html')
def testUnicodeInserts(self):
self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')
def testI18nTranslate(self):
self.assert_expected(self.folder.t, 'CheckI18nTranslate.html')
def testImportOldStyleClass(self):
self.assert_expected(self.folder.t, 'CheckImportOldStyleClass.html')
def testRepeatVariable(self):
self.assert_expected(self.folder.t, 'RepeatVariable.html')
def testBooleanAttributes(self):
# Test rendering an attribute that should be empty or left out
# if the value is non-True
self.assert_expected(self.folder.t, 'BooleanAttributes.html')
def testBooleanAttributesAndDefault(self):
# Zope 2.9 and below support the semantics that an HTML
# "boolean" attribute (e.g. 'selected', 'disabled', etc.) can
# be used together with 'default'.
self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')
def testInterpolationInContent(self):
# the chameleon template engine supports ``${path}``
# interpolations not only as part of ``string`` expressions
# but globally
self.assert_expected(self.folder.t, 'InterpolationInContent.html')
def testBadExpression(self):
t = self.folder.t
t.write("<p tal:define='p a//b' />")
with self.assertRaises(ExpressionError):
t()
def testPathAlternativesWithSpaces(self):
self.assert_expected(self.folder.t, 'PathAlternativesWithSpaces.html')
def testDefaultKeywordHandling(self):
self.assert_expected(self.folder.t, 'Default.html')
def testSwitch(self):
self.assert_expected(self.folder.t, 'switch.html')
def test_unicode_conflict_resolution(self):
# override with the more "demanding" resolver
provideUtility(PreferredCharsetResolver)
t = PageTemplate()
self.assert_expected(t, 'UnicodeResolution.html')
def test_underscore_traversal(self):
t = self.folder.t
t.write('<p tal:define="p context/__class__" />')
with self.assertRaises(NotFound):
t()
t.write('<p tal:define="p nocall: random/_itertools/repeat"/>')
with self.assertRaises((NotFound, LocationError)):
t()
t.write('<p tal:content="random/_itertools/repeat/foobar"/>')
with self.assertRaises((NotFound, LocationError)):
t()
def test_module_traversal(self):
t = self.folder.z
# Need to reset to the standard security policy so AccessControl
# checks are actually performed. The test setup initializes
# a policy that circumvents those checks.
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager()
# The getSecurityManager function is explicitly allowed
content = ('<p tal:define="a nocall:%s"'
' tal:content="python: a().getUser().getUserName()"/>')
t.write(content % 'modules/AccessControl/getSecurityManager')
self.assertEqual(t(), '<p>Anonymous User</p>')
# Anything else should be unreachable and raise NotFound:
# Direct access through AccessControl
t.write('<p tal:define="a nocall:modules/AccessControl/users"/>')
with self.assertRaises(NotFound):
t()
# Indirect access through an intermediary variable
content = ('<p tal:define="mod nocall:modules/AccessControl;'
' must_fail nocall:mod/users"/>')
t.write(content)
with self.assertRaises(NotFound):
t()
# Indirect access through an intermediary variable and a dictionary
content = ('<p tal:define="mod nocall:modules/AccessControl;'
' a_dict python: {\'unsafe\': mod};'
' must_fail nocall: a_dict/unsafe/users"/>')
t.write(content)
with self.assertRaises(NotFound):
t()
| path_disclosure | {
"code": [
" with self.assertRaises(NotFound):"
],
"line_no": [
229
]
} | {
"code": [
"from Products.PageTemplates.ZopePageTemplate import ZopePageTemplate",
"class AqZopePageTemplate(Implicit, ZopePageTemplate):",
" f.z = AqZopePageTemplate('testing')",
" with self.assertRaises((NotFound, LocationError)):",
" with self.assertRaises((NotFound, LocationError)):",
" t()",
" def test_module_traversal(self):",
" t = self.folder.z",
" SecurityManager.setSecurityPolicy(self.oldPolicy)",
" noSecurityManager()",
" content = ('<p tal:define=\"a nocall:%s\"'",
" ' tal:content=\"python: a().getUser().getUserName()\"/>')",
" t.write(content % 'modules/AccessControl/getSecurityManager')",
" self.assertEqual(t(), '<p>Anonymous User</p>')",
" t.write('<p tal:define=\"a nocall:modules/AccessControl/users\"/>')",
" with self.assertRaises(NotFound):",
" t()",
" content = ('<p tal:define=\"mod nocall:modules/AccessControl;'",
" ' must_fail nocall:mod/users\"/>')",
" t.write(content)",
" with self.assertRaises(NotFound):",
" t()",
" content = ('<p tal:define=\"mod nocall:modules/AccessControl;'",
" ' a_dict python: {\\'unsafe\\': mod};'",
" ' must_fail nocall: a_dict/unsafe/users\"/>')",
" t.write(content)"
],
"line_no": [
29,
42,
83,
236,
240,
241,
243,
244,
249,
250,
253,
254,
255,
256,
260,
261,
262,
265,
266,
267,
268,
269,
272,
273,
274,
275
]
} |
import unittest
from chameleon.exc import ExpressionError
import zope.component.testing
from AccessControl import SecurityManager
from AccessControl.SecurityManagement import noSecurityManager
from Acquisition import Implicit
from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver
from Products.PageTemplates.PageTemplate import PageTemplate
from Products.PageTemplates.tests import util
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
PreferredCharsetResolver
from zExceptions import NotFound
from zope.component import provideUtility
from zope.traversing.adapters import DefaultTraversable
from .util import useChameleonEngine
class CLASS_0(Implicit, PageTemplate):
pass
class CLASS_1(util.Base):
pass
class CLASS_2:
def FUNC_0(self,
VAR_0=None,
VAR_1=None,
VAR_2=None,
VAR_3=None,
VAR_4=None,
VAR_5=None,
*VAR_6, **VAR_7):
return 1
def FUNC_1(self, VAR_8, VAR_9, VAR_4):
return 1
class CLASS_3(zope.component.testing.PlacelessSetup, unittest.TestCase):
VAR_10 = None
def FUNC_2(self):
super().setUp()
useChameleonEngine()
zope.component.provideAdapter(DefaultTraversable, (None,))
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.folder = VAR_14 = CLASS_1()
VAR_14.laf = CLASS_0()
VAR_14.t = CLASS_0()
self.policy = CLASS_2()
self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)
noSecurityManager() # Use the new policy.
def FUNC_3(self):
super().tearDown()
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager() # Reset to old policy.
def FUNC_4(self, VAR_11, VAR_12, *VAR_6, **VAR_13):
VAR_11.write(util.read_input(VAR_12))
assert not VAR_11._v_errors, 'Template errors: %s' % VAR_11._v_errors
if self.PREFIX is not None \
and util.exists_output(self.PREFIX + VAR_12):
fname = self.PREFIX + VAR_12
VAR_15 = util.read_output(VAR_12)
VAR_16 = VAR_11(*VAR_6, **VAR_13)
util.check_html(VAR_15, VAR_16)
def FUNC_5(self, VAR_11, VAR_12, *VAR_6, **VAR_13):
VAR_11.write(util.read_input(VAR_12))
assert not VAR_11._v_errors, 'Template errors: %s' % VAR_11._v_errors
VAR_15 = util.read_output(VAR_12)
if not isinstance(VAR_15, str):
VAR_15 = str(VAR_15, 'utf-8')
VAR_16 = VAR_11(*VAR_6, **VAR_13)
util.check_html(VAR_15, VAR_16)
def FUNC_6(self):
return [
{'description': 'This is the tee for those who LOVE Zope. '
'Show your heart on your tee.',
'price': 12.99, 'image': 'smlatee.jpg'
},
{'description': 'This is the tee for Jim Fulton. '
'He\'s the Zope Pope!',
'price': 11.99, 'image': 'smpztee.jpg'
},
]
def FUNC_7(self):
self.assert_expected(self.folder.laf, 'TeeShopLAF.html')
def FUNC_8(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop2.html',
FUNC_6=self.getProducts)
def FUNC_9(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop1.html',
FUNC_6=self.getProducts)
def FUNC_10(self):
self.assert_expected(self.folder.t, 'Loop1.html')
def FUNC_11(self):
self.assert_expected(self.folder.t, 'Loop2.html')
def FUNC_12(self):
self.assert_expected(self.folder.t, 'GlobalsShadowLocals.html')
def FUNC_13(self):
self.assert_expected(self.folder.t, 'StringExpression.html')
def FUNC_14(self):
self.assert_expected(self.folder.t, 'CheckNothing.html')
def FUNC_15(self):
self.assert_expected(self.folder.t, 'CheckWithXMLHeader.html')
def FUNC_16(self):
self.assert_expected(self.folder.t, 'CheckNotExpression.html')
def FUNC_17(self):
self.assert_expected(self.folder.t, 'CheckPathNothing.html')
def FUNC_18(self):
self.assert_expected(self.folder.t, 'CheckPathAlt.html')
def FUNC_19(self):
from OFS.Folder import .Folder
VAR_14 = self.folder
self.folder = CLASS_1()
self.folder.t, self.folder.laf = VAR_14.t, VAR_14.laf
self.folder.laf.write('ok')
self.assert_expected(self.folder.t, 'CheckPathTraverse.html')
def FUNC_20(self):
self.assert_expected(self.folder.t, 'CheckBatchIteration.html')
def FUNC_21(self):
self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')
def FUNC_22(self):
self.assert_expected(self.folder.t, 'CheckI18nTranslate.html')
def FUNC_23(self):
self.assert_expected(self.folder.t, 'CheckImportOldStyleClass.html')
def FUNC_24(self):
self.assert_expected(self.folder.t, 'RepeatVariable.html')
def FUNC_25(self):
self.assert_expected(self.folder.t, 'BooleanAttributes.html')
def FUNC_26(self):
self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')
def FUNC_27(self):
self.assert_expected(self.folder.t, 'InterpolationInContent.html')
def FUNC_28(self):
VAR_11 = self.folder.t
VAR_11.write("<p tal:define='p a//b' />")
with self.assertRaises(ExpressionError):
VAR_11()
def FUNC_29(self):
self.assert_expected(self.folder.t, 'PathAlternativesWithSpaces.html')
def FUNC_30(self):
self.assert_expected(self.folder.t, 'Default.html')
def FUNC_31(self):
self.assert_expected(self.folder.t, 'switch.html')
def FUNC_32(self):
provideUtility(PreferredCharsetResolver)
VAR_11 = PageTemplate()
self.assert_expected(VAR_11, 'UnicodeResolution.html')
def FUNC_33(self):
VAR_11 = self.folder.t
VAR_11.write('<p tal:define="p VAR_4/__class__" />')
with self.assertRaises(NotFound):
VAR_11()
VAR_11.write('<p tal:define="p nocall: random/_itertools/repeat"/>')
with self.assertRaises(NotFound):
VAR_11()
VAR_11.write('<p tal:content="random/_itertools/repeat/foobar"/>')
with self.assertRaises(NotFound):
VAR_11()
|
import unittest
from chameleon.exc import ExpressionError
import zope.component.testing
from AccessControl import SecurityManager
from AccessControl.SecurityManagement import noSecurityManager
from Acquisition import Implicit
from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver
from Products.PageTemplates.PageTemplate import PageTemplate
from Products.PageTemplates.tests import util
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
PreferredCharsetResolver
from Products.PageTemplates.ZopePageTemplate import ZopePageTemplate
from zExceptions import NotFound
from zope.component import provideUtility
from zope.location.interfaces import LocationError
from zope.traversing.adapters import DefaultTraversable
from .util import useChameleonEngine
class CLASS_0(Implicit, PageTemplate):
pass
class CLASS_1(Implicit, ZopePageTemplate):
pass
class CLASS_2(util.Base):
pass
class CLASS_3:
def FUNC_0(self,
VAR_0=None,
VAR_1=None,
VAR_2=None,
VAR_3=None,
VAR_4=None,
VAR_5=None,
*VAR_6, **VAR_7):
return 1
def FUNC_1(self, VAR_8, VAR_9, VAR_4):
return 1
class CLASS_4(zope.component.testing.PlacelessSetup, unittest.TestCase):
VAR_10 = None
def FUNC_2(self):
super().setUp()
useChameleonEngine()
zope.component.provideAdapter(DefaultTraversable, (None,))
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.folder = VAR_14 = CLASS_2()
VAR_14.laf = CLASS_0()
VAR_14.t = CLASS_0()
VAR_14.z = CLASS_1('testing')
self.policy = CLASS_3()
self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)
noSecurityManager() # Use the new policy.
def FUNC_3(self):
super().tearDown()
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager() # Reset to old policy.
def FUNC_4(self, VAR_11, VAR_12, *VAR_6, **VAR_13):
VAR_11.write(util.read_input(VAR_12))
assert not VAR_11._v_errors, 'Template errors: %s' % VAR_11._v_errors
if self.PREFIX is not None \
and util.exists_output(self.PREFIX + VAR_12):
fname = self.PREFIX + VAR_12
VAR_15 = util.read_output(VAR_12)
VAR_16 = VAR_11(*VAR_6, **VAR_13)
util.check_html(VAR_15, VAR_16)
def FUNC_5(self, VAR_11, VAR_12, *VAR_6, **VAR_13):
VAR_11.write(util.read_input(VAR_12))
assert not VAR_11._v_errors, 'Template errors: %s' % VAR_11._v_errors
VAR_15 = util.read_output(VAR_12)
if not isinstance(VAR_15, str):
VAR_15 = str(VAR_15, 'utf-8')
VAR_16 = VAR_11(*VAR_6, **VAR_13)
util.check_html(VAR_15, VAR_16)
def FUNC_6(self):
return [
{'description': 'This is the tee for those who LOVE Zope. '
'Show your heart on your tee.',
'price': 12.99, 'image': 'smlatee.jpg'
},
{'description': 'This is the tee for Jim Fulton. '
'He\'s the Zope Pope!',
'price': 11.99, 'image': 'smpztee.jpg'
},
]
def FUNC_7(self):
self.assert_expected(self.folder.laf, 'TeeShopLAF.html')
def FUNC_8(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop2.html',
FUNC_6=self.getProducts)
def FUNC_9(self):
self.folder.laf.write(util.read_input('TeeShopLAF.html'))
self.assert_expected(self.folder.t, 'TeeShop1.html',
FUNC_6=self.getProducts)
def FUNC_10(self):
self.assert_expected(self.folder.t, 'Loop1.html')
def FUNC_11(self):
self.assert_expected(self.folder.t, 'Loop2.html')
def FUNC_12(self):
self.assert_expected(self.folder.t, 'GlobalsShadowLocals.html')
def FUNC_13(self):
self.assert_expected(self.folder.t, 'StringExpression.html')
def FUNC_14(self):
self.assert_expected(self.folder.t, 'CheckNothing.html')
def FUNC_15(self):
self.assert_expected(self.folder.t, 'CheckWithXMLHeader.html')
def FUNC_16(self):
self.assert_expected(self.folder.t, 'CheckNotExpression.html')
def FUNC_17(self):
self.assert_expected(self.folder.t, 'CheckPathNothing.html')
def FUNC_18(self):
self.assert_expected(self.folder.t, 'CheckPathAlt.html')
def FUNC_19(self):
from OFS.Folder import .Folder
VAR_14 = self.folder
self.folder = CLASS_2()
self.folder.t, self.folder.laf = VAR_14.t, VAR_14.laf
self.folder.laf.write('ok')
self.assert_expected(self.folder.t, 'CheckPathTraverse.html')
def FUNC_20(self):
self.assert_expected(self.folder.t, 'CheckBatchIteration.html')
def FUNC_21(self):
self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')
def FUNC_22(self):
self.assert_expected(self.folder.t, 'CheckI18nTranslate.html')
def FUNC_23(self):
self.assert_expected(self.folder.t, 'CheckImportOldStyleClass.html')
def FUNC_24(self):
self.assert_expected(self.folder.t, 'RepeatVariable.html')
def FUNC_25(self):
self.assert_expected(self.folder.t, 'BooleanAttributes.html')
def FUNC_26(self):
self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')
def FUNC_27(self):
self.assert_expected(self.folder.t, 'InterpolationInContent.html')
def FUNC_28(self):
VAR_11 = self.folder.t
VAR_11.write("<p tal:define='p a//b' />")
with self.assertRaises(ExpressionError):
VAR_11()
def FUNC_29(self):
self.assert_expected(self.folder.t, 'PathAlternativesWithSpaces.html')
def FUNC_30(self):
self.assert_expected(self.folder.t, 'Default.html')
def FUNC_31(self):
self.assert_expected(self.folder.t, 'switch.html')
def FUNC_32(self):
provideUtility(PreferredCharsetResolver)
VAR_11 = PageTemplate()
self.assert_expected(VAR_11, 'UnicodeResolution.html')
def FUNC_33(self):
VAR_11 = self.folder.t
VAR_11.write('<p tal:define="p VAR_4/__class__" />')
with self.assertRaises(NotFound):
VAR_11()
VAR_11.write('<p tal:define="p nocall: random/_itertools/repeat"/>')
with self.assertRaises((NotFound, LocationError)):
VAR_11()
VAR_11.write('<p tal:VAR_17="random/_itertools/repeat/foobar"/>')
with self.assertRaises((NotFound, LocationError)):
VAR_11()
def FUNC_34(self):
VAR_11 = self.folder.z
SecurityManager.setSecurityPolicy(self.oldPolicy)
noSecurityManager()
VAR_17 = ('<p tal:define="a nocall:%s"'
' tal:VAR_17="python: a().getUser().getUserName()"/>')
VAR_11.write(VAR_17 % 'modules/AccessControl/getSecurityManager')
self.assertEqual(VAR_11(), '<p>Anonymous User</p>')
VAR_11.write('<p tal:define="a nocall:modules/AccessControl/users"/>')
with self.assertRaises(NotFound):
VAR_11()
VAR_17 = ('<p tal:define="mod nocall:modules/AccessControl;'
' must_fail nocall:mod/users"/>')
VAR_11.write(VAR_17)
with self.assertRaises(NotFound):
VAR_11()
VAR_17 = ('<p tal:define="mod nocall:modules/AccessControl;'
' a_dict python: {\'unsafe\': mod};'
' must_fail nocall: a_dict/unsafe/users"/>')
VAR_11.write(VAR_17)
with self.assertRaises(NotFound):
VAR_11()
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
17,
32,
34,
35,
38,
39,
42,
43,
48,
58,
61,
62,
65,
70,
73,
80,
85,
95,
104,
116,
119,
122,
125,
128,
131,
134,
137,
140,
143,
146,
149,
152,
155,
158,
160,
167,
170,
173,
176,
179,
182,
184,
185,
187,
189,
190,
191,
193,
195,
196,
197,
199,
205,
208,
211,
214,
216,
220,
223,
227,
231,
235,
45,
46,
47
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
17,
34,
36,
37,
40,
41,
44,
45,
48,
49,
54,
64,
67,
68,
71,
76,
79,
87,
92,
102,
111,
123,
126,
129,
132,
135,
138,
141,
144,
147,
150,
153,
156,
159,
162,
165,
167,
174,
177,
180,
183,
186,
189,
191,
192,
194,
196,
197,
198,
200,
202,
203,
204,
206,
212,
215,
218,
221,
223,
227,
230,
234,
238,
242,
245,
246,
247,
248,
251,
252,
257,
258,
259,
263,
264,
270,
271,
278,
51,
52,
53
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from mock import Mock
from twisted.internet import defer
from synapse.rest import admin
from synapse.rest.client.v1 import login, room
from tests.replication._base import BaseMultiWorkerStreamTestCase
logger = logging.getLogger(__name__)
class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
"""Checks pusher sharding works
"""
servlets = [
admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
]
def prepare(self, reactor, clock, hs):
# Register a user who sends a message that we'll get notified about
self.other_user_id = self.register_user("otheruser", "pass")
self.other_access_token = self.login("otheruser", "pass")
def default_config(self):
conf = super().default_config()
conf["start_pushers"] = False
return conf
def _create_pusher_and_send_msg(self, localpart):
# Create a user that will get push notifications
user_id = self.register_user(localpart, "pass")
access_token = self.login(localpart, "pass")
# Register a pusher
user_dict = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_dict.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "https://push.example.com/push"},
)
)
self.pump()
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(
room=room, user=self.other_user_id, tok=self.other_access_token
)
# The other user sends some messages
response = self.helper.send(room, body="Hi!", tok=self.other_access_token)
event_id = response["event_id"]
return event_id
def test_send_push_single_worker(self):
"""Test that registration works when using a pusher worker.
"""
http_client_mock = Mock(spec_set=["post_json_get_json"])
http_client_mock.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{"start_pushers": True},
proxied_http_client=http_client_mock,
)
event_id = self._create_pusher_and_send_msg("user")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock.post_json_get_json.assert_called_once()
self.assertEqual(
http_client_mock.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
def test_send_push_multiple_workers(self):
"""Test that registration works when using sharded pusher workers.
"""
http_client_mock1 = Mock(spec_set=["post_json_get_json"])
http_client_mock1.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher1",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_http_client=http_client_mock1,
)
http_client_mock2 = Mock(spec_set=["post_json_get_json"])
http_client_mock2.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher2",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_http_client=http_client_mock2,
)
# We choose a user name that we know should go to pusher1.
event_id = self._create_pusher_and_send_msg("user2")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock1.post_json_get_json.assert_called_once()
http_client_mock2.post_json_get_json.assert_not_called()
self.assertEqual(
http_client_mock1.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock1.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
http_client_mock1.post_json_get_json.reset_mock()
http_client_mock2.post_json_get_json.reset_mock()
# Now we choose a user name that we know should go to pusher2.
event_id = self._create_pusher_and_send_msg("user4")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock1.post_json_get_json.assert_not_called()
http_client_mock2.post_json_get_json.assert_called_once()
self.assertEqual(
http_client_mock2.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock2.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from mock import Mock
from twisted.internet import defer
from synapse.rest import admin
from synapse.rest.client.v1 import login, room
from tests.replication._base import BaseMultiWorkerStreamTestCase
logger = logging.getLogger(__name__)
class PusherShardTestCase(BaseMultiWorkerStreamTestCase):
"""Checks pusher sharding works
"""
servlets = [
admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
]
def prepare(self, reactor, clock, hs):
# Register a user who sends a message that we'll get notified about
self.other_user_id = self.register_user("otheruser", "pass")
self.other_access_token = self.login("otheruser", "pass")
def default_config(self):
conf = super().default_config()
conf["start_pushers"] = False
return conf
def _create_pusher_and_send_msg(self, localpart):
# Create a user that will get push notifications
user_id = self.register_user(localpart, "pass")
access_token = self.login(localpart, "pass")
# Register a pusher
user_dict = self.get_success(
self.hs.get_datastore().get_user_by_access_token(access_token)
)
token_id = user_dict.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
user_id=user_id,
access_token=token_id,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "https://push.example.com/push"},
)
)
self.pump()
# Create a room
room = self.helper.create_room_as(user_id, tok=access_token)
# The other user joins
self.helper.join(
room=room, user=self.other_user_id, tok=self.other_access_token
)
# The other user sends some messages
response = self.helper.send(room, body="Hi!", tok=self.other_access_token)
event_id = response["event_id"]
return event_id
def test_send_push_single_worker(self):
"""Test that registration works when using a pusher worker.
"""
http_client_mock = Mock(spec_set=["post_json_get_json"])
http_client_mock.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{"start_pushers": True},
proxied_blacklisted_http_client=http_client_mock,
)
event_id = self._create_pusher_and_send_msg("user")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock.post_json_get_json.assert_called_once()
self.assertEqual(
http_client_mock.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
def test_send_push_multiple_workers(self):
"""Test that registration works when using sharded pusher workers.
"""
http_client_mock1 = Mock(spec_set=["post_json_get_json"])
http_client_mock1.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher1",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_blacklisted_http_client=http_client_mock1,
)
http_client_mock2 = Mock(spec_set=["post_json_get_json"])
http_client_mock2.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher2",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_blacklisted_http_client=http_client_mock2,
)
# We choose a user name that we know should go to pusher1.
event_id = self._create_pusher_and_send_msg("user2")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock1.post_json_get_json.assert_called_once()
http_client_mock2.post_json_get_json.assert_not_called()
self.assertEqual(
http_client_mock1.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock1.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
http_client_mock1.post_json_get_json.reset_mock()
http_client_mock2.post_json_get_json.reset_mock()
# Now we choose a user name that we know should go to pusher2.
event_id = self._create_pusher_and_send_msg("user4")
# Advance time a bit, so the pusher will register something has happened
self.pump()
http_client_mock1.post_json_get_json.assert_not_called()
http_client_mock2.post_json_get_json.assert_called_once()
self.assertEqual(
http_client_mock2.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
event_id,
http_client_mock2.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
| open_redirect | {
"code": [
" proxied_http_client=http_client_mock,",
" proxied_http_client=http_client_mock1,",
" proxied_http_client=http_client_mock2,"
],
"line_no": [
101,
136,
151
]
} | {
"code": [
" proxied_blacklisted_http_client=http_client_mock,",
" proxied_blacklisted_http_client=http_client_mock1,",
" proxied_blacklisted_http_client=http_client_mock2,"
],
"line_no": [
101,
136,
151
]
} |
import logging
from mock import Mock
from twisted.internet import defer
from synapse.rest import admin
from synapse.rest.client.v1 import login, VAR_11
from tests.replication._base import BaseMultiWorkerStreamTestCase
VAR_0 = logging.getLogger(__name__)
class CLASS_0(BaseMultiWorkerStreamTestCase):
VAR_1 = [
admin.register_servlets_for_client_rest_resource,
VAR_11.register_servlets,
login.register_servlets,
]
def FUNC_0(self, VAR_2, VAR_3, VAR_4):
self.other_user_id = self.register_user("otheruser", "pass")
self.other_access_token = self.login("otheruser", "pass")
def FUNC_1(self):
VAR_6 = super().default_config()
VAR_6["start_pushers"] = False
return VAR_6
def FUNC_2(self, VAR_5):
VAR_7 = self.register_user(VAR_5, "pass")
VAR_8 = self.login(VAR_5, "pass")
VAR_9 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_8)
)
VAR_10 = VAR_9.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_7=user_id,
VAR_8=VAR_10,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "https://push.example.com/push"},
)
)
self.pump()
room = self.helper.create_room_as(VAR_7, tok=VAR_8)
self.helper.join(
VAR_11=room, user=self.other_user_id, tok=self.other_access_token
)
VAR_12 = self.helper.send(VAR_11, body="Hi!", tok=self.other_access_token)
VAR_13 = VAR_12["event_id"]
return VAR_13
def FUNC_3(self):
VAR_14 = Mock(spec_set=["post_json_get_json"])
VAR_14.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{"start_pushers": True},
proxied_http_client=VAR_14,
)
VAR_13 = self._create_pusher_and_send_msg("user")
self.pump()
VAR_14.post_json_get_json.assert_called_once()
self.assertEqual(
VAR_14.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_14.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
def FUNC_4(self):
VAR_15 = Mock(spec_set=["post_json_get_json"])
VAR_15.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher1",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_http_client=VAR_15,
)
VAR_16 = Mock(spec_set=["post_json_get_json"])
VAR_16.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher2",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_http_client=VAR_16,
)
VAR_13 = self._create_pusher_and_send_msg("user2")
self.pump()
VAR_15.post_json_get_json.assert_called_once()
VAR_16.post_json_get_json.assert_not_called()
self.assertEqual(
VAR_15.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_15.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
VAR_15.post_json_get_json.reset_mock()
VAR_16.post_json_get_json.reset_mock()
VAR_13 = self._create_pusher_and_send_msg("user4")
self.pump()
VAR_15.post_json_get_json.assert_not_called()
VAR_16.post_json_get_json.assert_called_once()
self.assertEqual(
VAR_16.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_16.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
|
import logging
from mock import Mock
from twisted.internet import defer
from synapse.rest import admin
from synapse.rest.client.v1 import login, VAR_11
from tests.replication._base import BaseMultiWorkerStreamTestCase
VAR_0 = logging.getLogger(__name__)
class CLASS_0(BaseMultiWorkerStreamTestCase):
VAR_1 = [
admin.register_servlets_for_client_rest_resource,
VAR_11.register_servlets,
login.register_servlets,
]
def FUNC_0(self, VAR_2, VAR_3, VAR_4):
self.other_user_id = self.register_user("otheruser", "pass")
self.other_access_token = self.login("otheruser", "pass")
def FUNC_1(self):
VAR_6 = super().default_config()
VAR_6["start_pushers"] = False
return VAR_6
def FUNC_2(self, VAR_5):
VAR_7 = self.register_user(VAR_5, "pass")
VAR_8 = self.login(VAR_5, "pass")
VAR_9 = self.get_success(
self.hs.get_datastore().get_user_by_access_token(VAR_8)
)
VAR_10 = VAR_9.token_id
self.get_success(
self.hs.get_pusherpool().add_pusher(
VAR_7=user_id,
VAR_8=VAR_10,
kind="http",
app_id="m.http",
app_display_name="HTTP Push Notifications",
device_display_name="pushy push",
pushkey="a@example.com",
lang=None,
data={"url": "https://push.example.com/push"},
)
)
self.pump()
room = self.helper.create_room_as(VAR_7, tok=VAR_8)
self.helper.join(
VAR_11=room, user=self.other_user_id, tok=self.other_access_token
)
VAR_12 = self.helper.send(VAR_11, body="Hi!", tok=self.other_access_token)
VAR_13 = VAR_12["event_id"]
return VAR_13
def FUNC_3(self):
VAR_14 = Mock(spec_set=["post_json_get_json"])
VAR_14.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{"start_pushers": True},
proxied_blacklisted_http_client=VAR_14,
)
VAR_13 = self._create_pusher_and_send_msg("user")
self.pump()
VAR_14.post_json_get_json.assert_called_once()
self.assertEqual(
VAR_14.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_14.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
def FUNC_4(self):
VAR_15 = Mock(spec_set=["post_json_get_json"])
VAR_15.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher1",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_blacklisted_http_client=VAR_15,
)
VAR_16 = Mock(spec_set=["post_json_get_json"])
VAR_16.post_json_get_json.side_effect = lambda *_, **__: defer.succeed(
{}
)
self.make_worker_hs(
"synapse.app.pusher",
{
"start_pushers": True,
"worker_name": "pusher2",
"pusher_instances": ["pusher1", "pusher2"],
},
proxied_blacklisted_http_client=VAR_16,
)
VAR_13 = self._create_pusher_and_send_msg("user2")
self.pump()
VAR_15.post_json_get_json.assert_called_once()
VAR_16.post_json_get_json.assert_not_called()
self.assertEqual(
VAR_15.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_15.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
VAR_15.post_json_get_json.reset_mock()
VAR_16.post_json_get_json.reset_mock()
VAR_13 = self._create_pusher_and_send_msg("user4")
self.pump()
VAR_15.post_json_get_json.assert_not_called()
VAR_16.post_json_get_json.assert_called_once()
self.assertEqual(
VAR_16.post_json_get_json.call_args[0][0],
"https://push.example.com/push",
)
self.assertEqual(
VAR_13,
VAR_16.post_json_get_json.call_args[0][1]["notification"][
"event_id"
],
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
20,
23,
25,
27,
28,
32,
38,
40,
43,
48,
50,
53,
54,
59,
73,
75,
76,
78,
79,
83,
84,
87,
89,
97,
103,
105,
106,
108,
120,
128,
138,
143,
153,
154,
156,
157,
159,
172,
175,
176,
178,
179,
181,
194,
30,
31,
91,
92,
122,
123
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
20,
23,
25,
27,
28,
32,
38,
40,
43,
48,
50,
53,
54,
59,
73,
75,
76,
78,
79,
83,
84,
87,
89,
97,
103,
105,
106,
108,
120,
128,
138,
143,
153,
154,
156,
157,
159,
172,
175,
176,
178,
179,
181,
194,
30,
31,
91,
92,
122,
123
] |
2CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.http import Http404
from djconfig import config
from spirit.core.utils.views import is_post, post_data, is_ajax
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.core.utils.decorators import moderator_required
from spirit.core.utils import markdown, paginator, render_form_errors, json_response
from spirit.topic.models import Topic
from .models import Comment
from .forms import CommentForm, CommentMoveForm, CommentImageForm, CommentFileForm
from .utils import comment_posted, post_comment_update, pre_comment_update, post_comment_move
@login_required
@ratelimit(rate='1/10s')
def publish(request, topic_id, pk=None):
initial = None
if pk: # todo: move to form
comment = get_object_or_404(
Comment.objects.for_access(user=request.user), pk=pk)
quote = markdown.quotify(comment.comment, comment.user.st.nickname)
initial = {'comment': quote}
user = request.user
topic = get_object_or_404(
Topic.objects.opened().for_access(user),
pk=topic_id)
form = CommentForm(
user=user,
topic=topic,
data=post_data(request),
initial=initial)
if is_post(request) and not request.is_limited() and form.is_valid():
if not user.st.update_post_hash(form.get_comment_hash()):
# Hashed comment may have not been saved yet
return redirect(
request.POST.get('next', None) or
Comment
.get_last_for_topic(topic_id)
.get_absolute_url())
comment = form.save()
comment_posted(comment=comment, mentions=form.mentions)
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/publish.html',
context={
'form': form,
'topic': topic})
@login_required
def update(request, pk):
comment = Comment.objects.for_update_or_404(pk, request.user)
form = CommentForm(data=post_data(request), instance=comment)
if is_post(request) and form.is_valid():
pre_comment_update(comment=Comment.objects.get(pk=comment.pk))
comment = form.save()
post_comment_update(comment=comment)
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/update.html',
context={'form': form})
@moderator_required
def delete(request, pk, remove=True):
comment = get_object_or_404(Comment, pk=pk)
if is_post(request):
(Comment.objects
.filter(pk=pk)
.update(is_removed=remove))
return redirect(request.GET.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/moderate.html',
context={'comment': comment})
@require_POST
@moderator_required
def move(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = CommentMoveForm(topic=topic, data=request.POST)
if form.is_valid():
comments = form.save()
for comment in comments:
comment_posted(comment=comment, mentions=None)
topic.decrease_comment_count()
post_comment_move(comment=comment, topic=topic)
else:
messages.error(request, render_form_errors(form))
return redirect(request.POST.get('next', topic.get_absolute_url()))
def find(request, pk):
comment = get_object_or_404(Comment.objects.select_related('topic'), pk=pk)
comment_number = (
Comment.objects
.filter(topic=comment.topic, date__lte=comment.date)
.count())
url = paginator.get_url(
comment.topic.get_absolute_url(),
comment_number,
config.comments_per_page,
'page')
return redirect(url)
@require_POST
@login_required
def image_upload_ajax(request):
if not is_ajax(request):
return Http404()
form = CommentImageForm(
user=request.user, data=request.POST, files=request.FILES)
if form.is_valid():
image = form.save()
return json_response({'url': image.url})
return json_response({'error': dict(form.errors.items())})
@require_POST
@login_required
def file_upload_ajax(request):
if not is_ajax(request):
return Http404()
form = CommentFileForm(
user=request.user, data=request.POST, files=request.FILES)
if form.is_valid():
file = form.save()
return json_response({'url': file.url})
return json_response({'error': dict(form.errors.items())})
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.http import Http404
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data, is_ajax
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.core.utils.decorators import moderator_required
from spirit.core.utils import markdown, paginator, render_form_errors, json_response
from spirit.topic.models import Topic
from .models import Comment
from .forms import CommentForm, CommentMoveForm, CommentImageForm, CommentFileForm
from .utils import comment_posted, post_comment_update, pre_comment_update, post_comment_move
@login_required
@ratelimit(rate='1/10s')
def publish(request, topic_id, pk=None):
initial = None
if pk: # todo: move to form
comment = get_object_or_404(
Comment.objects.for_access(user=request.user), pk=pk)
quote = markdown.quotify(comment.comment, comment.user.st.nickname)
initial = {'comment': quote}
user = request.user
topic = get_object_or_404(
Topic.objects.opened().for_access(user),
pk=topic_id)
form = CommentForm(
user=user,
topic=topic,
data=post_data(request),
initial=initial)
if is_post(request) and not request.is_limited() and form.is_valid():
if not user.st.update_post_hash(form.get_comment_hash()):
# Hashed comment may have not been saved yet
default_url = lambda: (Comment
.get_last_for_topic(topic_id)
.get_absolute_url())
return safe_redirect(request, 'next', default_url, method='POST')
comment = form.save()
comment_posted(comment=comment, mentions=form.mentions)
return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/comment/publish.html',
context={
'form': form,
'topic': topic})
@login_required
def update(request, pk):
comment = Comment.objects.for_update_or_404(pk, request.user)
form = CommentForm(data=post_data(request), instance=comment)
if is_post(request) and form.is_valid():
pre_comment_update(comment=Comment.objects.get(pk=comment.pk))
comment = form.save()
post_comment_update(comment=comment)
return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/comment/update.html',
context={'form': form})
@moderator_required
def delete(request, pk, remove=True):
comment = get_object_or_404(Comment, pk=pk)
if is_post(request):
(Comment.objects
.filter(pk=pk)
.update(is_removed=remove))
return safe_redirect(request, 'next', comment.get_absolute_url())
return render(
request=request,
template_name='spirit/comment/moderate.html',
context={'comment': comment})
@require_POST
@moderator_required
def move(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = CommentMoveForm(topic=topic, data=request.POST)
if form.is_valid():
comments = form.save()
for comment in comments:
comment_posted(comment=comment, mentions=None)
topic.decrease_comment_count()
post_comment_move(comment=comment, topic=topic)
else:
messages.error(request, render_form_errors(form))
return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')
def find(request, pk):
comment = get_object_or_404(Comment.objects.select_related('topic'), pk=pk)
comment_number = (
Comment.objects
.filter(topic=comment.topic, date__lte=comment.date)
.count())
url = paginator.get_url(
comment.topic.get_absolute_url(),
comment_number,
config.comments_per_page,
'page')
return redirect(url)
@require_POST
@login_required
def image_upload_ajax(request):
if not is_ajax(request):
return Http404()
form = CommentImageForm(
user=request.user, data=request.POST, files=request.FILES)
if form.is_valid():
image = form.save()
return json_response({'url': image.url})
return json_response({'error': dict(form.errors.items())})
@require_POST
@login_required
def file_upload_ajax(request):
if not is_ajax(request):
return Http404()
form = CommentFileForm(
user=request.user, data=request.POST, files=request.FILES)
if form.is_valid():
file = form.save()
return json_response({'url': file.url})
return json_response({'error': dict(form.errors.items())})
| open_redirect | {
"code": [
" return redirect(",
" request.POST.get('next', None) or",
" Comment",
" return redirect(request.POST.get('next', comment.get_absolute_url()))",
" return redirect(request.POST.get('next', comment.get_absolute_url()))",
" return redirect(request.GET.get('next', comment.get_absolute_url()))",
" return redirect(request.POST.get('next', topic.get_absolute_url()))"
],
"line_no": [
44,
45,
46,
52,
70,
84,
107
]
} | {
"code": [
"from spirit.core.utils.http import safe_redirect",
" default_url = lambda: (Comment",
" return safe_redirect(request, 'next', default_url, method='POST')",
" return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')",
" return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')",
" return safe_redirect(request, 'next', comment.get_absolute_url())",
" return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')"
],
"line_no": [
11,
45,
48,
52,
70,
84,
107
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.http import Http404
from djconfig import config
from spirit.core.utils.views import is_post, post_data, is_ajax
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.core.utils.decorators import moderator_required
from spirit.core.utils import markdown, paginator, render_form_errors, json_response
from spirit.topic.models import Topic
from .models import Comment
from .forms import CommentForm, CommentMoveForm, CommentImageForm, CommentFileForm
from .utils import .comment_posted, post_comment_update, pre_comment_update, post_comment_move
@login_required
@ratelimit(rate='1/10s')
def FUNC_0(VAR_0, VAR_1, VAR_2=None):
VAR_4 = None
if VAR_2: # todo: FUNC_3 to VAR_7
VAR_8 = get_object_or_404(
Comment.objects.for_access(VAR_5=VAR_0.user), VAR_2=pk)
VAR_11 = markdown.quotify(VAR_8.comment, VAR_8.user.st.nickname)
VAR_4 = {'comment': VAR_11}
VAR_5 = VAR_0.user
VAR_6 = get_object_or_404(
Topic.objects.opened().for_access(VAR_5),
VAR_2=VAR_1)
VAR_7 = CommentForm(
VAR_5=user,
VAR_6=topic,
data=post_data(VAR_0),
VAR_4=initial)
if is_post(VAR_0) and not VAR_0.is_limited() and VAR_7.is_valid():
if not VAR_5.st.update_post_hash(VAR_7.get_comment_hash()):
return redirect(
VAR_0.POST.get('next', None) or
Comment
.get_last_for_topic(VAR_1)
.get_absolute_url())
VAR_8 = VAR_7.save()
comment_posted(VAR_8=comment, mentions=VAR_7.mentions)
return redirect(VAR_0.POST.get('next', VAR_8.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_8/FUNC_0.html',
context={
'form': VAR_7,
'topic': VAR_6})
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_8 = Comment.objects.for_update_or_404(VAR_2, VAR_0.user)
VAR_7 = CommentForm(data=post_data(VAR_0), instance=VAR_8)
if is_post(VAR_0) and VAR_7.is_valid():
pre_comment_update(VAR_8=Comment.objects.get(VAR_2=VAR_8.pk))
VAR_8 = VAR_7.save()
post_comment_update(VAR_8=VAR_8)
return redirect(VAR_0.POST.get('next', VAR_8.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_8/FUNC_1.html',
context={'form': VAR_7})
@moderator_required
def FUNC_2(VAR_0, VAR_2, VAR_3=True):
VAR_8 = get_object_or_404(Comment, VAR_2=pk)
if is_post(VAR_0):
(Comment.objects
.filter(VAR_2=pk)
.update(is_removed=VAR_3))
return redirect(VAR_0.GET.get('next', VAR_8.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_8/moderate.html',
context={'comment': VAR_8})
@require_POST
@moderator_required
def FUNC_3(VAR_0, VAR_1):
VAR_6 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_7 = CommentMoveForm(VAR_6=topic, data=VAR_0.POST)
if VAR_7.is_valid():
VAR_12 = VAR_7.save()
for VAR_8 in VAR_12:
comment_posted(VAR_8=comment, mentions=None)
VAR_6.decrease_comment_count()
post_comment_move(VAR_8=comment, VAR_6=topic)
else:
messages.error(VAR_0, render_form_errors(VAR_7))
return redirect(VAR_0.POST.get('next', VAR_6.get_absolute_url()))
def FUNC_4(VAR_0, VAR_2):
VAR_8 = get_object_or_404(Comment.objects.select_related('topic'), VAR_2=pk)
VAR_9 = (
Comment.objects
.filter(VAR_6=VAR_8.topic, date__lte=VAR_8.date)
.count())
VAR_10 = paginator.get_url(
VAR_8.topic.get_absolute_url(),
VAR_9,
config.comments_per_page,
'page')
return redirect(VAR_10)
@require_POST
@login_required
def FUNC_5(VAR_0):
if not is_ajax(VAR_0):
return Http404()
VAR_7 = CommentImageForm(
VAR_5=VAR_0.user, data=VAR_0.POST, files=VAR_0.FILES)
if VAR_7.is_valid():
VAR_13 = VAR_7.save()
return json_response({'url': VAR_13.url})
return json_response({'error': dict(VAR_7.errors.items())})
@require_POST
@login_required
def FUNC_6(VAR_0):
if not is_ajax(VAR_0):
return Http404()
VAR_7 = CommentFileForm(
VAR_5=VAR_0.user, data=VAR_0.POST, files=VAR_0.FILES)
if VAR_7.is_valid():
VAR_14 = VAR_7.save()
return json_response({'url': VAR_14.url})
return json_response({'error': dict(VAR_7.errors.items())})
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.http import Http404
from djconfig import config
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data, is_ajax
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.core.utils.decorators import moderator_required
from spirit.core.utils import markdown, paginator, render_form_errors, json_response
from spirit.topic.models import Topic
from .models import Comment
from .forms import CommentForm, CommentMoveForm, CommentImageForm, CommentFileForm
from .utils import .comment_posted, post_comment_update, pre_comment_update, post_comment_move
@login_required
@ratelimit(rate='1/10s')
def FUNC_0(VAR_0, VAR_1, VAR_2=None):
VAR_4 = None
if VAR_2: # todo: FUNC_3 to VAR_7
VAR_8 = get_object_or_404(
Comment.objects.for_access(VAR_5=VAR_0.user), VAR_2=pk)
VAR_11 = markdown.quotify(VAR_8.comment, VAR_8.user.st.nickname)
VAR_4 = {'comment': VAR_11}
VAR_5 = VAR_0.user
VAR_6 = get_object_or_404(
Topic.objects.opened().for_access(VAR_5),
VAR_2=VAR_1)
VAR_7 = CommentForm(
VAR_5=user,
VAR_6=topic,
data=post_data(VAR_0),
VAR_4=initial)
if is_post(VAR_0) and not VAR_0.is_limited() and VAR_7.is_valid():
if not VAR_5.st.update_post_hash(VAR_7.get_comment_hash()):
VAR_15 = lambda: (Comment
.get_last_for_topic(VAR_1)
.get_absolute_url())
return safe_redirect(VAR_0, 'next', VAR_15, method='POST')
VAR_8 = VAR_7.save()
comment_posted(VAR_8=comment, mentions=VAR_7.mentions)
return safe_redirect(VAR_0, 'next', VAR_8.get_absolute_url(), method='POST')
return render(
VAR_0=request,
template_name='spirit/VAR_8/FUNC_0.html',
context={
'form': VAR_7,
'topic': VAR_6})
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_8 = Comment.objects.for_update_or_404(VAR_2, VAR_0.user)
VAR_7 = CommentForm(data=post_data(VAR_0), instance=VAR_8)
if is_post(VAR_0) and VAR_7.is_valid():
pre_comment_update(VAR_8=Comment.objects.get(VAR_2=VAR_8.pk))
VAR_8 = VAR_7.save()
post_comment_update(VAR_8=VAR_8)
return safe_redirect(VAR_0, 'next', VAR_8.get_absolute_url(), method='POST')
return render(
VAR_0=request,
template_name='spirit/VAR_8/FUNC_1.html',
context={'form': VAR_7})
@moderator_required
def FUNC_2(VAR_0, VAR_2, VAR_3=True):
VAR_8 = get_object_or_404(Comment, VAR_2=pk)
if is_post(VAR_0):
(Comment.objects
.filter(VAR_2=pk)
.update(is_removed=VAR_3))
return safe_redirect(VAR_0, 'next', VAR_8.get_absolute_url())
return render(
VAR_0=request,
template_name='spirit/VAR_8/moderate.html',
context={'comment': VAR_8})
@require_POST
@moderator_required
def FUNC_3(VAR_0, VAR_1):
VAR_6 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_7 = CommentMoveForm(VAR_6=topic, data=VAR_0.POST)
if VAR_7.is_valid():
VAR_12 = VAR_7.save()
for VAR_8 in VAR_12:
comment_posted(VAR_8=comment, mentions=None)
VAR_6.decrease_comment_count()
post_comment_move(VAR_8=comment, VAR_6=topic)
else:
messages.error(VAR_0, render_form_errors(VAR_7))
return safe_redirect(VAR_0, 'next', VAR_6.get_absolute_url(), method='POST')
def FUNC_4(VAR_0, VAR_2):
VAR_8 = get_object_or_404(Comment.objects.select_related('topic'), VAR_2=pk)
VAR_9 = (
Comment.objects
.filter(VAR_6=VAR_8.topic, date__lte=VAR_8.date)
.count())
VAR_10 = paginator.get_url(
VAR_8.topic.get_absolute_url(),
VAR_9,
config.comments_per_page,
'page')
return redirect(VAR_10)
@require_POST
@login_required
def FUNC_5(VAR_0):
if not is_ajax(VAR_0):
return Http404()
VAR_7 = CommentImageForm(
VAR_5=VAR_0.user, data=VAR_0.POST, files=VAR_0.FILES)
if VAR_7.is_valid():
VAR_13 = VAR_7.save()
return json_response({'url': VAR_13.url})
return json_response({'error': dict(VAR_7.errors.items())})
@require_POST
@login_required
def FUNC_6(VAR_0):
if not is_ajax(VAR_0):
return Http404()
VAR_7 = CommentFileForm(
VAR_5=VAR_0.user, data=VAR_0.POST, files=VAR_0.FILES)
if VAR_7.is_valid():
VAR_14 = VAR_7.save()
return json_response({'url': VAR_14.url})
return json_response({'error': dict(VAR_7.errors.items())})
| [
1,
2,
8,
10,
19,
20,
30,
40,
43,
49,
53,
60,
61,
75,
76,
89,
90,
96,
99,
106,
108,
109,
122,
123,
129,
132,
136,
138,
139,
145,
148,
152,
154
] | [
1,
2,
8,
10,
20,
21,
31,
41,
44,
49,
53,
60,
61,
75,
76,
89,
90,
96,
99,
106,
108,
109,
122,
123,
129,
132,
136,
138,
139,
145,
148,
152,
154
] |
2CWE-601
| """
.. module: security_monkey.sso.views
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Patrick Kelley <patrick@netflix.com>
"""
import jwt
import base64
import requests
from flask import Blueprint, current_app, redirect, request
from flask.ext.restful import reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from flask_login import login_user
try:
from onelogin.saml2.auth import OneLogin_Saml2_Auth
from onelogin.saml2.utils import OneLogin_Saml2_Utils
onelogin_import_success = True
except ImportError:
onelogin_import_success = False
from .service import fetch_token_header_payload, get_rsa_public_key
from security_monkey.datastore import User
from security_monkey import db, rbac
from urlparse import urlparse
mod = Blueprint('sso', __name__)
api = Api(mod)
from flask_security.utils import validate_redirect_url
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Security Monkey. In
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath.
"""
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def get(self):
return self.post()
def post(self):
if "ping" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Ping is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
default_state = 'clientId,{client_id},redirectUri,{redirectUri},return_to,{return_to}'.format(
client_id=current_app.config.get('PING_CLIENT_ID'),
redirectUri=current_app.config.get('PING_REDIRECT_URI'),
return_to=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=default_state)
args = self.reqparse.parse_args()
client_id = args['state'].split(',')[1]
redirect_uri = args['state'].split(',')[3]
return_to = args['state'].split(',')[5]
if not validate_redirect_url(return_to):
return_to = current_app.config.get('WEB_PATH')
# take the information we have received from the provider to create a new request
params = {
'client_id': client_id,
'grant_type': 'authorization_code',
'scope': 'openid email profile address',
'redirect_uri': redirect_uri,
'code': args['code']
}
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
user_api_url = current_app.config.get('PING_USER_API_URL')
# the secret and cliendId will be given to you when you signup for the provider
basic = base64.b64encode(bytes('{0}:{1}'.format(client_id, current_app.config.get("PING_SECRET"))))
headers = {'Authorization': 'Basic {0}'.format(basic.decode('utf-8'))}
# exchange authorization code for access token.
r = requests.post(access_token_url, headers=headers, params=params)
id_token = r.json()['id_token']
access_token = r.json()['access_token']
# fetch token public key
header_data = fetch_token_header_payload(id_token)[0]
jwks_url = current_app.config.get('PING_JWKS_URL')
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()['keys']:
if key['kid'] == header_data['kid']:
secret = get_rsa_public_key(key['n'], key['e'])
algo = header_data['alg']
break
else:
return dict(message='Key not found'), 403
# validate your token based on the key it was signed with
try:
current_app.logger.debug(id_token)
current_app.logger.debug(secret)
current_app.logger.debug(algo)
jwt.decode(id_token, secret.decode('utf-8'), algorithms=[algo], audience=client_id)
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
user_params = dict(access_token=access_token, schema='profile')
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params)
profile = r.json()
user = User.query.filter(User.email==profile['email']).first()
# if we get an sso user create them an account
if not user:
user = User(
email=profile['email'],
active=True,
role='View'
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
return redirect(return_to, code=302)
class Google(Resource):
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Google, self).__init__()
def get(self):
return self.post()
def post(self):
if "google" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Google is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
default_state = 'clientId,{client_id},redirectUri,{redirectUri},return_to,{return_to}'.format(
client_id=current_app.config.get("GOOGLE_CLIENT_ID"),
redirectUri=api.url_for(Google),
return_to=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=default_state)
args = self.reqparse.parse_args()
client_id = args['state'].split(',')[1]
redirect_uri = args['state'].split(',')[3]
return_to = args['state'].split(',')[5]
if not validate_redirect_url(return_to):
return_to = current_app.config.get('WEB_PATH')
access_token_url = 'https://accounts.google.com/o/oauth2/token'
people_api_url = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect'
args = self.reqparse.parse_args()
# Step 1. Exchange authorization code for access token
payload = {
'client_id': client_id,
'grant_type': 'authorization_code',
'redirect_uri': redirect_uri,
'code': args['code'],
'client_secret': current_app.config.get('GOOGLE_SECRET')
}
r = requests.post(access_token_url, data=payload)
token = r.json()
# Step 1bis. Validate (some information of) the id token (if necessary)
google_hosted_domain = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if google_hosted_domain is not None:
current_app.logger.debug('We need to verify that the token was issued for this hosted domain: %s ' % (google_hosted_domain))
# Get the JSON Web Token
id_token = r.json()['id_token']
current_app.logger.debug('The id_token is: %s' % (id_token))
# Extract the payload
(header_data, payload_data) = fetch_token_header_payload(id_token)
current_app.logger.debug('id_token.header_data: %s' % (header_data))
current_app.logger.debug('id_token.payload_data: %s' % (payload_data))
token_hd = payload_data.get('hd')
if token_hd != google_hosted_domain:
current_app.logger.debug('Verification failed: %s != %s' % (token_hd, google_hosted_domain))
return dict(message='Token is invalid %s' % token), 403
current_app.logger.debug('Verification passed')
# Step 2. Retrieve information about the current user
headers = {'Authorization': 'Bearer {0}'.format(token['access_token'])}
r = requests.get(people_api_url, headers=headers)
profile = r.json()
user = User.query.filter(User.email == profile['email']).first()
# if we get an sso user create them an account
if not user:
user = User(
email=profile['email'],
active=True,
role='View'
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
return redirect(return_to, code=302)
class OneLogin(Resource):
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.req = OneLogin.prepare_from_flask_request(request)
super(OneLogin, self).__init__()
@staticmethod
def prepare_from_flask_request(req):
url_data = urlparse(req.url)
return {
'http_host': req.host,
'server_port': url_data.port,
'script_name': req.path,
'get_data': req.args.copy(),
'post_data': req.form.copy(),
'https': ("on" if current_app.config.get("ONELOGIN_HTTPS") else "off")
}
def get(self):
return self.post()
def _consumer(self, auth):
auth.process_response()
errors = auth.get_errors()
if not errors:
if auth.is_authenticated():
return True
else:
return False
else:
current_app.logger.error('Error processing %s' % (', '.join(errors)))
return False
def post(self):
if "onelogin" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
auth = OneLogin_Saml2_Auth(self.req, current_app.config.get("ONELOGIN_SETTINGS"))
self.reqparse.add_argument('return_to', required=False, default=current_app.config.get('WEB_PATH'))
self.reqparse.add_argument('acs', required=False)
self.reqparse.add_argument('sls', required=False)
args = self.reqparse.parse_args()
return_to = args['return_to']
if args['acs'] != None:
# valids the SAML response and checks if successfully authenticated
if self._consumer(auth):
email = auth.get_attribute(current_app.config.get("ONELOGIN_EMAIL_FIELD"))[0]
user = User.query.filter(User.email == email).first()
# if we get an sso user create them an account
if not user:
user = User(
email=email,
active=True,
role=current_app.config.get('ONELOGIN_DEFAULT_ROLE')
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
self_url = OneLogin_Saml2_Utils.get_self_url(self.req)
if 'RelayState' in request.form and self_url != request.form['RelayState']:
return redirect(auth.redirect_to(request.form['RelayState']), code=302)
else:
return redirect(current_app.config.get('BASE_URL'), code=302)
else:
return dict(message='OneLogin authentication failed.'), 403
elif args['sls'] != None:
return dict(message='OneLogin SLS not implemented yet.'), 405
else:
return redirect(auth.login(return_to=return_to))
class Providers(Resource):
decorators = [rbac.allow(["anonymous"], ["GET"])]
def __init__(self):
super(Providers, self).__init__()
def get(self):
active_providers = []
for provider in current_app.config.get("ACTIVE_PROVIDERS"):
provider = provider.lower()
if provider == "ping":
active_providers.append({
'name': current_app.config.get("PING_NAME"),
'url': current_app.config.get('PING_REDIRECT_URI'),
'redirectUri': current_app.config.get("PING_REDIRECT_URI"),
'clientId': current_app.config.get("PING_CLIENT_ID"),
'responseType': 'code',
'scope': ['openid', 'profile', 'email'],
'scopeDelimiter': ' ',
'authorizationEndpoint': current_app.config.get("PING_AUTH_ENDPOINT"),
'requiredUrlParams': ['scope'],
'type': '2.0'
})
elif provider == "google":
google_provider = {
'name': 'google',
'clientId': current_app.config.get("GOOGLE_CLIENT_ID"),
'url': api.url_for(Google, _external=True, _scheme='https'),
'redirectUri': api.url_for(Google, _external=True, _scheme='https'),
'authorizationEndpoint': current_app.config.get("GOOGLE_AUTH_ENDPOINT"),
'scope': ['openid email'],
'responseType': 'code'
}
google_hosted_domain = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if google_hosted_domain is not None:
google_provider['hd'] = google_hosted_domain
active_providers.append(google_provider)
elif provider == "onelogin":
active_providers.append({
'name': 'OneLogin',
'authorizationEndpoint': api.url_for(OneLogin)
})
else:
raise Exception("Unknown authentication provider: {0}".format(provider))
return active_providers
api.add_resource(Ping, '/auth/ping', endpoint='ping')
api.add_resource(Google, '/auth/google', endpoint='google')
api.add_resource(Providers, '/auth/providers', endpoint='providers')
if onelogin_import_success:
api.add_resource(OneLogin, '/auth/onelogin', endpoint='onelogin')
| """
.. module: security_monkey.sso.views
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Patrick Kelley <patrick@netflix.com>
"""
import jwt
import base64
import requests
from flask import Blueprint, current_app, redirect, request
from flask.ext.restful import reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from flask_security.utils import login_user
try:
from onelogin.saml2.auth import OneLogin_Saml2_Auth
from onelogin.saml2.utils import OneLogin_Saml2_Utils
onelogin_import_success = True
except ImportError:
onelogin_import_success = False
from .service import fetch_token_header_payload, get_rsa_public_key
from security_monkey.datastore import User
from security_monkey import db, rbac
from urlparse import urlparse
mod = Blueprint('sso', __name__)
api = Api(mod)
from flask_security.utils import validate_redirect_url
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Security Monkey. In
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath.
"""
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def get(self):
return self.post()
def post(self):
if "ping" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Ping is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
default_state = 'clientId,{client_id},redirectUri,{redirectUri},return_to,{return_to}'.format(
client_id=current_app.config.get('PING_CLIENT_ID'),
redirectUri=current_app.config.get('PING_REDIRECT_URI'),
return_to=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=default_state)
args = self.reqparse.parse_args()
client_id = args['state'].split(',')[1]
redirect_uri = args['state'].split(',')[3]
return_to = args['state'].split(',')[5]
if not validate_redirect_url(return_to):
return_to = current_app.config.get('WEB_PATH')
# take the information we have received from the provider to create a new request
params = {
'client_id': client_id,
'grant_type': 'authorization_code',
'scope': 'openid email profile address',
'redirect_uri': redirect_uri,
'code': args['code']
}
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
user_api_url = current_app.config.get('PING_USER_API_URL')
# the secret and cliendId will be given to you when you signup for the provider
basic = base64.b64encode(bytes('{0}:{1}'.format(client_id, current_app.config.get("PING_SECRET"))))
headers = {'Authorization': 'Basic {0}'.format(basic.decode('utf-8'))}
# exchange authorization code for access token.
r = requests.post(access_token_url, headers=headers, params=params)
id_token = r.json()['id_token']
access_token = r.json()['access_token']
# fetch token public key
header_data = fetch_token_header_payload(id_token)[0]
jwks_url = current_app.config.get('PING_JWKS_URL')
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()['keys']:
if key['kid'] == header_data['kid']:
secret = get_rsa_public_key(key['n'], key['e'])
algo = header_data['alg']
break
else:
return dict(message='Key not found'), 403
# validate your token based on the key it was signed with
try:
current_app.logger.debug(id_token)
current_app.logger.debug(secret)
current_app.logger.debug(algo)
jwt.decode(id_token, secret.decode('utf-8'), algorithms=[algo], audience=client_id)
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
user_params = dict(access_token=access_token, schema='profile')
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params)
profile = r.json()
user = User.query.filter(User.email==profile['email']).first()
# if we get an sso user create them an account
if not user:
user = User(
email=profile['email'],
active=True,
role='View'
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
return redirect(return_to, code=302)
class Google(Resource):
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Google, self).__init__()
def get(self):
return self.post()
def post(self):
if "google" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Google is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
default_state = 'clientId,{client_id},redirectUri,{redirectUri},return_to,{return_to}'.format(
client_id=current_app.config.get("GOOGLE_CLIENT_ID"),
redirectUri=api.url_for(Google),
return_to=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=default_state)
args = self.reqparse.parse_args()
client_id = args['state'].split(',')[1]
redirect_uri = args['state'].split(',')[3]
return_to = args['state'].split(',')[5]
if not validate_redirect_url(return_to):
return_to = current_app.config.get('WEB_PATH')
access_token_url = 'https://accounts.google.com/o/oauth2/token'
people_api_url = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect'
args = self.reqparse.parse_args()
# Step 1. Exchange authorization code for access token
payload = {
'client_id': client_id,
'grant_type': 'authorization_code',
'redirect_uri': redirect_uri,
'code': args['code'],
'client_secret': current_app.config.get('GOOGLE_SECRET')
}
r = requests.post(access_token_url, data=payload)
token = r.json()
# Step 1bis. Validate (some information of) the id token (if necessary)
google_hosted_domain = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if google_hosted_domain is not None:
current_app.logger.debug('We need to verify that the token was issued for this hosted domain: %s ' % (google_hosted_domain))
# Get the JSON Web Token
id_token = r.json()['id_token']
current_app.logger.debug('The id_token is: %s' % (id_token))
# Extract the payload
(header_data, payload_data) = fetch_token_header_payload(id_token)
current_app.logger.debug('id_token.header_data: %s' % (header_data))
current_app.logger.debug('id_token.payload_data: %s' % (payload_data))
token_hd = payload_data.get('hd')
if token_hd != google_hosted_domain:
current_app.logger.debug('Verification failed: %s != %s' % (token_hd, google_hosted_domain))
return dict(message='Token is invalid %s' % token), 403
current_app.logger.debug('Verification passed')
# Step 2. Retrieve information about the current user
headers = {'Authorization': 'Bearer {0}'.format(token['access_token'])}
r = requests.get(people_api_url, headers=headers)
profile = r.json()
user = User.query.filter(User.email == profile['email']).first()
# if we get an sso user create them an account
if not user:
user = User(
email=profile['email'],
active=True,
role='View'
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
return redirect(return_to, code=302)
class OneLogin(Resource):
decorators = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.req = OneLogin.prepare_from_flask_request(request)
super(OneLogin, self).__init__()
@staticmethod
def prepare_from_flask_request(req):
url_data = urlparse(req.url)
return {
'http_host': req.host,
'server_port': url_data.port,
'script_name': req.path,
'get_data': req.args.copy(),
'post_data': req.form.copy(),
'https': ("on" if current_app.config.get("ONELOGIN_HTTPS") else "off")
}
def get(self):
return self.post()
def _consumer(self, auth):
auth.process_response()
errors = auth.get_errors()
if not errors:
if auth.is_authenticated:
return True
else:
return False
else:
current_app.logger.error('Error processing %s' % (', '.join(errors)))
return False
def post(self):
if "onelogin" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
auth = OneLogin_Saml2_Auth(self.req, current_app.config.get("ONELOGIN_SETTINGS"))
self.reqparse.add_argument('return_to', required=False, default=current_app.config.get('WEB_PATH'))
self.reqparse.add_argument('acs', required=False)
self.reqparse.add_argument('sls', required=False)
args = self.reqparse.parse_args()
return_to = args['return_to']
if args['acs'] != None:
# valids the SAML response and checks if successfully authenticated
if self._consumer(auth):
email = auth.get_attribute(current_app.config.get("ONELOGIN_EMAIL_FIELD"))[0]
user = User.query.filter(User.email == email).first()
# if we get an sso user create them an account
if not user:
user = User(
email=email,
active=True,
role=current_app.config.get('ONELOGIN_DEFAULT_ROLE')
# profile_picture=profile.get('thumbnailPhotoUrl')
)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
login_user(user)
self_url = OneLogin_Saml2_Utils.get_self_url(self.req)
if 'RelayState' in request.form and self_url != request.form['RelayState']:
return redirect(auth.redirect_to(request.form['RelayState']), code=302)
else:
return redirect(current_app.config.get('BASE_URL'), code=302)
else:
return dict(message='OneLogin authentication failed.'), 403
elif args['sls'] != None:
return dict(message='OneLogin SLS not implemented yet.'), 405
else:
return redirect(auth.login(return_to=return_to))
class Providers(Resource):
decorators = [rbac.allow(["anonymous"], ["GET"])]
def __init__(self):
super(Providers, self).__init__()
def get(self):
active_providers = []
for provider in current_app.config.get("ACTIVE_PROVIDERS"):
provider = provider.lower()
if provider == "ping":
active_providers.append({
'name': current_app.config.get("PING_NAME"),
'url': current_app.config.get('PING_REDIRECT_URI'),
'redirectUri': current_app.config.get("PING_REDIRECT_URI"),
'clientId': current_app.config.get("PING_CLIENT_ID"),
'responseType': 'code',
'scope': ['openid', 'profile', 'email'],
'scopeDelimiter': ' ',
'authorizationEndpoint': current_app.config.get("PING_AUTH_ENDPOINT"),
'requiredUrlParams': ['scope'],
'type': '2.0'
})
elif provider == "google":
google_provider = {
'name': 'google',
'clientId': current_app.config.get("GOOGLE_CLIENT_ID"),
'url': api.url_for(Google, _external=True, _scheme='https'),
'redirectUri': api.url_for(Google, _external=True, _scheme='https'),
'authorizationEndpoint': current_app.config.get("GOOGLE_AUTH_ENDPOINT"),
'scope': ['openid email'],
'responseType': 'code'
}
google_hosted_domain = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if google_hosted_domain is not None:
google_provider['hd'] = google_hosted_domain
active_providers.append(google_provider)
elif provider == "onelogin":
active_providers.append({
'name': 'OneLogin',
'authorizationEndpoint': api.url_for(OneLogin)
})
else:
raise Exception("Unknown authentication provider: {0}".format(provider))
return active_providers
api.add_resource(Ping, '/auth/ping', endpoint='ping')
api.add_resource(Google, '/auth/google', endpoint='google')
api.add_resource(Providers, '/auth/providers', endpoint='providers')
if onelogin_import_success:
api.add_resource(OneLogin, '/auth/onelogin', endpoint='onelogin')
| open_redirect | {
"code": [
"from flask_login import login_user",
" if auth.is_authenticated():"
],
"line_no": [
16,
267
]
} | {
"code": [
"from flask_security.utils import login_user",
" if auth.is_authenticated:"
],
"line_no": [
16,
267
]
} |
import jwt
import base64
import .requests
from flask import Blueprint, current_app, redirect, request
from flask.ext.restful import .reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from flask_login import login_user
try:
from onelogin.saml2.auth import .OneLogin_Saml2_Auth
from onelogin.saml2.utils import .OneLogin_Saml2_Utils
VAR_2 = True
except ImportError:
VAR_2 = False
from .service import fetch_token_header_payload, get_rsa_public_key
from security_monkey.datastore import User
from security_monkey import db, rbac
from urlparse import urlparse
VAR_0 = Blueprint('sso', __name__)
VAR_1 = Api(VAR_0)
from flask_security.utils import validate_redirect_url
class CLASS_0(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_0, self).__init__()
def FUNC_0(self):
return self.post()
def FUNC_1(self):
if "ping" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Ping is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_6 = 'clientId,{VAR_8},redirectUri,{redirectUri},VAR_10,{return_to}'.format(
VAR_8=current_app.config.get('PING_CLIENT_ID'),
redirectUri=current_app.config.get('PING_REDIRECT_URI'),
VAR_10=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=VAR_6)
VAR_7 = self.reqparse.parse_args()
VAR_8 = VAR_7['state'].split(',')[1]
VAR_9 = VAR_7['state'].split(',')[3]
VAR_10 = VAR_7['state'].split(',')[5]
if not validate_redirect_url(VAR_10):
return_to = current_app.config.get('WEB_PATH')
VAR_11 = {
'client_id': VAR_8,
'grant_type': 'authorization_code',
'scope': 'openid VAR_36 VAR_22 address',
'redirect_uri': VAR_9,
'code': VAR_7['code']
}
VAR_12 = current_app.config.get('PING_ACCESS_TOKEN_URL')
VAR_13 = current_app.config.get('PING_USER_API_URL')
VAR_14 = base64.b64encode(bytes('{0}:{1}'.format(VAR_8, current_app.config.get("PING_SECRET"))))
VAR_15 = {'Authorization': 'Basic {0}'.format(VAR_14.decode('utf-8'))}
VAR_16 = requests.post(VAR_12, VAR_15=headers, VAR_11=params)
VAR_17 = VAR_16.json()['id_token']
VAR_18 = VAR_16.json()['access_token']
VAR_19 = fetch_token_header_payload(VAR_17)[0]
VAR_20 = current_app.config.get('PING_JWKS_URL')
VAR_16 = requests.get(VAR_20)
for key in VAR_16.json()['keys']:
if key['kid'] == VAR_19['kid']:
VAR_34 = get_rsa_public_key(key['n'], key['e'])
VAR_35 = VAR_19['alg']
break
else:
return dict(message='Key not found'), 403
try:
current_app.logger.debug(VAR_17)
current_app.logger.debug(VAR_34)
current_app.logger.debug(VAR_35)
jwt.decode(VAR_17, VAR_34.decode('utf-8'), algorithms=[VAR_35], audience=VAR_8)
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
VAR_21 = dict(VAR_18=access_token, schema='profile')
VAR_16 = requests.get(VAR_13, VAR_11=VAR_21)
VAR_22 = VAR_16.json()
VAR_23 = User.query.filter(User.email==VAR_22['email']).first()
if not VAR_23:
VAR_23 = User(
VAR_36=VAR_22['email'],
active=True,
role='View'
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
return redirect(VAR_10, code=302)
class CLASS_1(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_1, self).__init__()
def FUNC_0(self):
return self.post()
def FUNC_1(self):
if "google" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Google is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_6 = 'clientId,{VAR_8},redirectUri,{redirectUri},VAR_10,{return_to}'.format(
VAR_8=current_app.config.get("GOOGLE_CLIENT_ID"),
redirectUri=VAR_1.url_for(CLASS_1),
VAR_10=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=VAR_6)
VAR_7 = self.reqparse.parse_args()
VAR_8 = VAR_7['state'].split(',')[1]
VAR_9 = VAR_7['state'].split(',')[3]
VAR_10 = VAR_7['state'].split(',')[5]
if not validate_redirect_url(VAR_10):
return_to = current_app.config.get('WEB_PATH')
VAR_12 = 'https://accounts.google.com/o/oauth2/token'
VAR_24 = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect'
VAR_7 = self.reqparse.parse_args()
VAR_25 = {
'client_id': VAR_8,
'grant_type': 'authorization_code',
'redirect_uri': VAR_9,
'code': VAR_7['code'],
'client_secret': current_app.config.get('GOOGLE_SECRET')
}
VAR_16 = requests.post(VAR_12, data=VAR_25)
VAR_26 = VAR_16.json()
VAR_27 = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if VAR_27 is not None:
current_app.logger.debug('We need to verify that the VAR_26 was issued for this hosted domain: %s ' % (VAR_27))
VAR_17 = VAR_16.json()['id_token']
current_app.logger.debug('The VAR_17 is: %s' % (VAR_17))
(VAR_19, VAR_31) = fetch_token_header_payload(VAR_17)
current_app.logger.debug('id_token.header_data: %s' % (VAR_19))
current_app.logger.debug('id_token.payload_data: %s' % (VAR_31))
VAR_32 = VAR_31.get('hd')
if VAR_32 != VAR_27:
current_app.logger.debug('Verification failed: %s != %s' % (VAR_32, VAR_27))
return dict(message='Token is invalid %s' % VAR_26), 403
current_app.logger.debug('Verification passed')
VAR_15 = {'Authorization': 'Bearer {0}'.format(VAR_26['access_token'])}
VAR_16 = requests.get(VAR_24, VAR_15=headers)
VAR_22 = VAR_16.json()
VAR_23 = User.query.filter(User.email == VAR_22['email']).first()
if not VAR_23:
VAR_23 = User(
VAR_36=VAR_22['email'],
active=True,
role='View'
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
return redirect(VAR_10, code=302)
class CLASS_2(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.req = CLASS_2.prepare_from_flask_request(request)
super(CLASS_2, self).__init__()
@staticmethod
def FUNC_2(VAR_4):
VAR_28 = urlparse(VAR_4.url)
return {
'http_host': VAR_4.host,
'server_port': VAR_28.port,
'script_name': VAR_4.path,
'get_data': VAR_4.args.copy(),
'post_data': VAR_4.form.copy(),
'https': ("on" if current_app.config.get("ONELOGIN_HTTPS") else "off")
}
def FUNC_0(self):
return self.post()
def FUNC_3(self, VAR_5):
VAR_5.process_response()
VAR_29 = VAR_5.get_errors()
if not VAR_29:
if VAR_5.is_authenticated():
return True
else:
return False
else:
current_app.logger.error('Error processing %s' % (', '.join(VAR_29)))
return False
def FUNC_1(self):
if "onelogin" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_5 = OneLogin_Saml2_Auth(self.req, current_app.config.get("ONELOGIN_SETTINGS"))
self.reqparse.add_argument('return_to', required=False, default=current_app.config.get('WEB_PATH'))
self.reqparse.add_argument('acs', required=False)
self.reqparse.add_argument('sls', required=False)
VAR_7 = self.reqparse.parse_args()
VAR_10 = VAR_7['return_to']
if VAR_7['acs'] != None:
if self._consumer(VAR_5):
VAR_36 = VAR_5.get_attribute(current_app.config.get("ONELOGIN_EMAIL_FIELD"))[0]
VAR_23 = User.query.filter(User.email == VAR_36).first()
if not VAR_23:
VAR_23 = User(
VAR_36=email,
active=True,
role=current_app.config.get('ONELOGIN_DEFAULT_ROLE')
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
VAR_37 = OneLogin_Saml2_Utils.get_self_url(self.req)
if 'RelayState' in request.form and VAR_37 != request.form['RelayState']:
return redirect(VAR_5.redirect_to(request.form['RelayState']), code=302)
else:
return redirect(current_app.config.get('BASE_URL'), code=302)
else:
return dict(message='OneLogin authentication failed.'), 403
elif VAR_7['sls'] != None:
return dict(message='OneLogin SLS not implemented yet.'), 405
else:
return redirect(VAR_5.login(VAR_10=return_to))
class CLASS_3(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET"])]
def __init__(self):
super(CLASS_3, self).__init__()
def FUNC_0(self):
VAR_30 = []
for VAR_33 in current_app.config.get("ACTIVE_PROVIDERS"):
VAR_33 = provider.lower()
if VAR_33 == "ping":
VAR_30.append({
'name': current_app.config.get("PING_NAME"),
'url': current_app.config.get('PING_REDIRECT_URI'),
'redirectUri': current_app.config.get("PING_REDIRECT_URI"),
'clientId': current_app.config.get("PING_CLIENT_ID"),
'responseType': 'code',
'scope': ['openid', 'profile', 'email'],
'scopeDelimiter': ' ',
'authorizationEndpoint': current_app.config.get("PING_AUTH_ENDPOINT"),
'requiredUrlParams': ['scope'],
'type': '2.0'
})
elif VAR_33 == "google":
VAR_38 = {
'name': 'google',
'clientId': current_app.config.get("GOOGLE_CLIENT_ID"),
'url': VAR_1.url_for(CLASS_1, _external=True, _scheme='https'),
'redirectUri': VAR_1.url_for(CLASS_1, _external=True, _scheme='https'),
'authorizationEndpoint': current_app.config.get("GOOGLE_AUTH_ENDPOINT"),
'scope': ['openid email'],
'responseType': 'code'
}
VAR_27 = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if VAR_27 is not None:
VAR_38['hd'] = VAR_27
VAR_30.append(VAR_38)
elif VAR_33 == "onelogin":
VAR_30.append({
'name': 'OneLogin',
'authorizationEndpoint': VAR_1.url_for(CLASS_2)
})
else:
raise Exception("Unknown authentication VAR_33: {0}".format(VAR_33))
return VAR_30
VAR_1.add_resource(CLASS_0, '/VAR_5/ping', endpoint='ping')
VAR_1.add_resource(CLASS_1, '/VAR_5/google', endpoint='google')
VAR_1.add_resource(CLASS_3, '/VAR_5/providers', endpoint='providers')
if VAR_2:
VAR_1.add_resource(CLASS_2, '/VAR_5/onelogin', endpoint='onelogin')
|
import jwt
import base64
import .requests
from flask import Blueprint, current_app, redirect, request
from flask.ext.restful import .reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from flask_security.utils import login_user
try:
from onelogin.saml2.auth import .OneLogin_Saml2_Auth
from onelogin.saml2.utils import .OneLogin_Saml2_Utils
VAR_2 = True
except ImportError:
VAR_2 = False
from .service import fetch_token_header_payload, get_rsa_public_key
from security_monkey.datastore import User
from security_monkey import db, rbac
from urlparse import urlparse
VAR_0 = Blueprint('sso', __name__)
VAR_1 = Api(VAR_0)
from flask_security.utils import validate_redirect_url
class CLASS_0(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_0, self).__init__()
def FUNC_0(self):
return self.post()
def FUNC_1(self):
if "ping" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Ping is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_6 = 'clientId,{VAR_8},redirectUri,{redirectUri},VAR_10,{return_to}'.format(
VAR_8=current_app.config.get('PING_CLIENT_ID'),
redirectUri=current_app.config.get('PING_REDIRECT_URI'),
VAR_10=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=VAR_6)
VAR_7 = self.reqparse.parse_args()
VAR_8 = VAR_7['state'].split(',')[1]
VAR_9 = VAR_7['state'].split(',')[3]
VAR_10 = VAR_7['state'].split(',')[5]
if not validate_redirect_url(VAR_10):
return_to = current_app.config.get('WEB_PATH')
VAR_11 = {
'client_id': VAR_8,
'grant_type': 'authorization_code',
'scope': 'openid VAR_36 VAR_22 address',
'redirect_uri': VAR_9,
'code': VAR_7['code']
}
VAR_12 = current_app.config.get('PING_ACCESS_TOKEN_URL')
VAR_13 = current_app.config.get('PING_USER_API_URL')
VAR_14 = base64.b64encode(bytes('{0}:{1}'.format(VAR_8, current_app.config.get("PING_SECRET"))))
VAR_15 = {'Authorization': 'Basic {0}'.format(VAR_14.decode('utf-8'))}
VAR_16 = requests.post(VAR_12, VAR_15=headers, VAR_11=params)
VAR_17 = VAR_16.json()['id_token']
VAR_18 = VAR_16.json()['access_token']
VAR_19 = fetch_token_header_payload(VAR_17)[0]
VAR_20 = current_app.config.get('PING_JWKS_URL')
VAR_16 = requests.get(VAR_20)
for key in VAR_16.json()['keys']:
if key['kid'] == VAR_19['kid']:
VAR_34 = get_rsa_public_key(key['n'], key['e'])
VAR_35 = VAR_19['alg']
break
else:
return dict(message='Key not found'), 403
try:
current_app.logger.debug(VAR_17)
current_app.logger.debug(VAR_34)
current_app.logger.debug(VAR_35)
jwt.decode(VAR_17, VAR_34.decode('utf-8'), algorithms=[VAR_35], audience=VAR_8)
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
VAR_21 = dict(VAR_18=access_token, schema='profile')
VAR_16 = requests.get(VAR_13, VAR_11=VAR_21)
VAR_22 = VAR_16.json()
VAR_23 = User.query.filter(User.email==VAR_22['email']).first()
if not VAR_23:
VAR_23 = User(
VAR_36=VAR_22['email'],
active=True,
role='View'
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
return redirect(VAR_10, code=302)
class CLASS_1(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(CLASS_1, self).__init__()
def FUNC_0(self):
return self.post()
def FUNC_1(self):
if "google" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Google is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_6 = 'clientId,{VAR_8},redirectUri,{redirectUri},VAR_10,{return_to}'.format(
VAR_8=current_app.config.get("GOOGLE_CLIENT_ID"),
redirectUri=VAR_1.url_for(CLASS_1),
VAR_10=current_app.config.get('WEB_PATH')
)
self.reqparse.add_argument('code', type=str, required=True)
self.reqparse.add_argument('state', type=str, required=False, default=VAR_6)
VAR_7 = self.reqparse.parse_args()
VAR_8 = VAR_7['state'].split(',')[1]
VAR_9 = VAR_7['state'].split(',')[3]
VAR_10 = VAR_7['state'].split(',')[5]
if not validate_redirect_url(VAR_10):
return_to = current_app.config.get('WEB_PATH')
VAR_12 = 'https://accounts.google.com/o/oauth2/token'
VAR_24 = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect'
VAR_7 = self.reqparse.parse_args()
VAR_25 = {
'client_id': VAR_8,
'grant_type': 'authorization_code',
'redirect_uri': VAR_9,
'code': VAR_7['code'],
'client_secret': current_app.config.get('GOOGLE_SECRET')
}
VAR_16 = requests.post(VAR_12, data=VAR_25)
VAR_26 = VAR_16.json()
VAR_27 = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if VAR_27 is not None:
current_app.logger.debug('We need to verify that the VAR_26 was issued for this hosted domain: %s ' % (VAR_27))
VAR_17 = VAR_16.json()['id_token']
current_app.logger.debug('The VAR_17 is: %s' % (VAR_17))
(VAR_19, VAR_31) = fetch_token_header_payload(VAR_17)
current_app.logger.debug('id_token.header_data: %s' % (VAR_19))
current_app.logger.debug('id_token.payload_data: %s' % (VAR_31))
VAR_32 = VAR_31.get('hd')
if VAR_32 != VAR_27:
current_app.logger.debug('Verification failed: %s != %s' % (VAR_32, VAR_27))
return dict(message='Token is invalid %s' % VAR_26), 403
current_app.logger.debug('Verification passed')
VAR_15 = {'Authorization': 'Bearer {0}'.format(VAR_26['access_token'])}
VAR_16 = requests.get(VAR_24, VAR_15=headers)
VAR_22 = VAR_16.json()
VAR_23 = User.query.filter(User.email == VAR_22['email']).first()
if not VAR_23:
VAR_23 = User(
VAR_36=VAR_22['email'],
active=True,
role='View'
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
return redirect(VAR_10, code=302)
class CLASS_2(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET", "POST"])]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.req = CLASS_2.prepare_from_flask_request(request)
super(CLASS_2, self).__init__()
@staticmethod
def FUNC_2(VAR_4):
VAR_28 = urlparse(VAR_4.url)
return {
'http_host': VAR_4.host,
'server_port': VAR_28.port,
'script_name': VAR_4.path,
'get_data': VAR_4.args.copy(),
'post_data': VAR_4.form.copy(),
'https': ("on" if current_app.config.get("ONELOGIN_HTTPS") else "off")
}
def FUNC_0(self):
return self.post()
def FUNC_3(self, VAR_5):
VAR_5.process_response()
VAR_29 = VAR_5.get_errors()
if not VAR_29:
if VAR_5.is_authenticated:
return True
else:
return False
else:
current_app.logger.error('Error processing %s' % (', '.join(VAR_29)))
return False
def FUNC_1(self):
if "onelogin" not in current_app.config.get("ACTIVE_PROVIDERS"):
return "Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.", 404
VAR_5 = OneLogin_Saml2_Auth(self.req, current_app.config.get("ONELOGIN_SETTINGS"))
self.reqparse.add_argument('return_to', required=False, default=current_app.config.get('WEB_PATH'))
self.reqparse.add_argument('acs', required=False)
self.reqparse.add_argument('sls', required=False)
VAR_7 = self.reqparse.parse_args()
VAR_10 = VAR_7['return_to']
if VAR_7['acs'] != None:
if self._consumer(VAR_5):
VAR_36 = VAR_5.get_attribute(current_app.config.get("ONELOGIN_EMAIL_FIELD"))[0]
VAR_23 = User.query.filter(User.email == VAR_36).first()
if not VAR_23:
VAR_23 = User(
VAR_36=email,
active=True,
role=current_app.config.get('ONELOGIN_DEFAULT_ROLE')
)
db.session.add(VAR_23)
db.session.commit()
db.session.refresh(VAR_23)
identity_changed.send(current_app._get_current_object(), identity=Identity(VAR_23.id))
login_user(VAR_23)
VAR_37 = OneLogin_Saml2_Utils.get_self_url(self.req)
if 'RelayState' in request.form and VAR_37 != request.form['RelayState']:
return redirect(VAR_5.redirect_to(request.form['RelayState']), code=302)
else:
return redirect(current_app.config.get('BASE_URL'), code=302)
else:
return dict(message='OneLogin authentication failed.'), 403
elif VAR_7['sls'] != None:
return dict(message='OneLogin SLS not implemented yet.'), 405
else:
return redirect(VAR_5.login(VAR_10=return_to))
class CLASS_3(Resource):
VAR_3 = [rbac.allow(["anonymous"], ["GET"])]
def __init__(self):
super(CLASS_3, self).__init__()
def FUNC_0(self):
VAR_30 = []
for VAR_33 in current_app.config.get("ACTIVE_PROVIDERS"):
VAR_33 = provider.lower()
if VAR_33 == "ping":
VAR_30.append({
'name': current_app.config.get("PING_NAME"),
'url': current_app.config.get('PING_REDIRECT_URI'),
'redirectUri': current_app.config.get("PING_REDIRECT_URI"),
'clientId': current_app.config.get("PING_CLIENT_ID"),
'responseType': 'code',
'scope': ['openid', 'profile', 'email'],
'scopeDelimiter': ' ',
'authorizationEndpoint': current_app.config.get("PING_AUTH_ENDPOINT"),
'requiredUrlParams': ['scope'],
'type': '2.0'
})
elif VAR_33 == "google":
VAR_38 = {
'name': 'google',
'clientId': current_app.config.get("GOOGLE_CLIENT_ID"),
'url': VAR_1.url_for(CLASS_1, _external=True, _scheme='https'),
'redirectUri': VAR_1.url_for(CLASS_1, _external=True, _scheme='https'),
'authorizationEndpoint': current_app.config.get("GOOGLE_AUTH_ENDPOINT"),
'scope': ['openid email'],
'responseType': 'code'
}
VAR_27 = current_app.config.get("GOOGLE_HOSTED_DOMAIN")
if VAR_27 is not None:
VAR_38['hd'] = VAR_27
VAR_30.append(VAR_38)
elif VAR_33 == "onelogin":
VAR_30.append({
'name': 'OneLogin',
'authorizationEndpoint': VAR_1.url_for(CLASS_2)
})
else:
raise Exception("Unknown authentication VAR_33: {0}".format(VAR_33))
return VAR_30
VAR_1.add_resource(CLASS_0, '/VAR_5/ping', endpoint='ping')
VAR_1.add_resource(CLASS_1, '/VAR_5/google', endpoint='google')
VAR_1.add_resource(CLASS_3, '/VAR_5/providers', endpoint='providers')
if VAR_2:
VAR_1.add_resource(CLASS_2, '/VAR_5/onelogin', endpoint='onelogin')
| [
11,
13,
17,
24,
26,
29,
31,
34,
35,
37,
38,
48,
51,
55,
63,
68,
71,
72,
80,
81,
84,
85,
88,
89,
93,
94,
97,
98,
107,
108,
120,
122,
123,
126,
128,
129,
135,
140,
141,
144,
146,
147,
153,
156,
160,
168,
173,
176,
179,
181,
182,
190,
193,
194,
198,
199,
202,
203,
207,
213,
214,
216,
219,
221,
222,
228,
233,
234,
237,
239,
240,
247,
259,
262,
274,
279,
283,
285,
287,
289,
293,
294,
300,
305,
306,
309,
321,
322,
327,
330,
333,
368,
370,
371,
375,
378,
1,
2,
3,
4,
5,
6,
7,
40,
41,
42,
43
] | [
11,
13,
17,
24,
26,
29,
31,
34,
35,
37,
38,
48,
51,
55,
63,
68,
71,
72,
80,
81,
84,
85,
88,
89,
93,
94,
97,
98,
107,
108,
120,
122,
123,
126,
128,
129,
135,
140,
141,
144,
146,
147,
153,
156,
160,
168,
173,
176,
179,
181,
182,
190,
193,
194,
198,
199,
202,
203,
207,
213,
214,
216,
219,
221,
222,
228,
233,
234,
237,
239,
240,
247,
259,
262,
274,
279,
283,
285,
287,
289,
293,
294,
300,
305,
306,
309,
321,
322,
327,
330,
333,
368,
370,
371,
375,
378,
1,
2,
3,
4,
5,
6,
7,
40,
41,
42,
43
] |
1CWE-79
| # ##############################################################################
# Author: echel0n <echel0n@sickrage.ca>
# URL: https://sickrage.ca/
# Git: https://git.sickrage.ca/SiCKRAGE/sickrage.git
# -
# This file is part of SiCKRAGE.
# -
# SiCKRAGE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# -
# SiCKRAGE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# -
# You should have received a copy of the GNU General Public License
# along with SiCKRAGE. If not, see <http://www.gnu.org/licenses/>.
# ##############################################################################
import functools
import time
import traceback
import types
from concurrent.futures.thread import ThreadPoolExecutor
from typing import Optional, Awaitable
from urllib.parse import urlparse, urljoin
from jose import ExpiredSignatureError
from keycloak.exceptions import KeycloakClientError
from mako.exceptions import RichTraceback
from tornado import locale
from tornado.web import RequestHandler
import sickrage
from sickrage.core.helpers import is_ip_whitelisted, torrent_webui_url
class BaseHandler(RequestHandler):
def __init__(self, application, request, **kwargs):
super(BaseHandler, self).__init__(application, request, **kwargs)
self.executor = ThreadPoolExecutor(thread_name_prefix='TORNADO-Thread')
self.startTime = time.time()
def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
pass
def get_user_locale(self):
return locale.get(sickrage.app.config.gui.gui_lang)
def write_error(self, status_code, **kwargs):
if status_code not in [401, 404] and "exc_info" in kwargs:
exc_info = kwargs["exc_info"]
error = repr(exc_info[1])
sickrage.app.log.error(error)
if self.settings.get("debug"):
trace_info = ''.join([f"{line}<br>" for line in traceback.format_exception(*exc_info)])
request_info = ''.join([f"<strong>{k}</strong>: {v}<br>" for k, v in self.request.__dict__.items()])
self.set_header('Content-Type', 'text/html')
return self.write(f"""<html>
<title>{error}</title>
<body>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logs/';">View Log(Errors)</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/home/restart?pid={sickrage.app.pid}&force=1';">Restart SiCKRAGE</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logout';">Logout</button>
<h2>Error</h2>
<p>{error}</p>
<h2>Traceback</h2>
<p>{trace_info}</p>
<h2>Request Info</h2>
<p>{request_info}</p>
</body>
</html>""")
def get_current_user(self):
if is_ip_whitelisted(self.request.remote_ip):
return True
elif sickrage.app.config.general.sso_auth_enabled and sickrage.app.auth_server.health:
try:
access_token = self.get_secure_cookie('_sr_access_token')
refresh_token = self.get_secure_cookie('_sr_refresh_token')
if not all([access_token, refresh_token]):
return
certs = sickrage.app.auth_server.certs()
if not certs:
return
try:
return sickrage.app.auth_server.decode_token(access_token.decode("utf-8"), certs)
except (KeycloakClientError, ExpiredSignatureError):
token = sickrage.app.auth_server.refresh_token(refresh_token.decode("utf-8"))
if not token:
return
self.set_secure_cookie('_sr_access_token', token['access_token'])
self.set_secure_cookie('_sr_refresh_token', token['refresh_token'])
return sickrage.app.auth_server.decode_token(token['access_token'], certs)
except Exception as e:
return
elif sickrage.app.config.general.local_auth_enabled:
cookie = self.get_secure_cookie('_sr').decode() if self.get_secure_cookie('_sr') else None
if cookie == sickrage.app.config.general.api_v1_key:
return True
def render_string(self, template_name, **kwargs):
template_kwargs = {
'title': "",
'header': "",
'topmenu': "",
'submenu': "",
'controller': "home",
'action': "index",
'srPID': sickrage.app.pid,
'srHttpsEnabled': sickrage.app.config.general.enable_https or bool(self.request.headers.get('X-Forwarded-Proto') == 'https'),
'srHost': self.request.headers.get('X-Forwarded-Host', self.request.host.split(':')[0]),
'srHttpPort': self.request.headers.get('X-Forwarded-Port', sickrage.app.config.general.web_port),
'srHttpsPort': sickrage.app.config.general.web_port,
'srHandleReverseProxy': sickrage.app.config.general.handle_reverse_proxy,
'srDefaultPage': sickrage.app.config.general.default_page.value,
'srWebRoot': sickrage.app.config.general.web_root,
'srLocale': self.get_user_locale().code,
'srLocaleDir': sickrage.LOCALE_DIR,
'srStartTime': self.startTime,
'makoStartTime': time.time(),
'overall_stats': None,
'torrent_webui_url': torrent_webui_url(),
'application': self.application,
'request': self.request,
}
template_kwargs.update(self.get_template_namespace())
template_kwargs.update(kwargs)
try:
return self.application.settings['templates'][template_name].render_unicode(**template_kwargs)
except Exception:
kwargs['title'] = _('HTTP Error 500')
kwargs['header'] = _('HTTP Error 500')
kwargs['backtrace'] = RichTraceback()
template_kwargs.update(kwargs)
sickrage.app.log.error("%s: %s" % (str(kwargs['backtrace'].error.__class__.__name__), kwargs['backtrace'].error))
return self.application.settings['templates']['errors/500.mako'].render_unicode(**template_kwargs)
def render(self, template_name, **kwargs):
self.write(self.render_string(template_name, **kwargs))
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, PUT, PATCH, DELETE, OPTIONS')
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def redirect(self, url, permanent=True, status=None):
if sickrage.app.config.general.web_root not in url:
url = urljoin(sickrage.app.config.general.web_root + '/', url.lstrip('/'))
super(BaseHandler, self).redirect(url, permanent, status)
def previous_url(self):
url = urlparse(self.request.headers.get("referer", "/{}/".format(sickrage.app.config.general.default_page.value)))
return url._replace(scheme="", netloc="").geturl()
def _genericMessage(self, subject, message):
return self.render('generic_message.mako',
message=message,
subject=subject,
title="",
controller='root',
action='genericmessage')
def get_url(self, url):
if sickrage.app.config.general.web_root not in url:
url = urljoin(sickrage.app.config.general.web_root + '/', url.lstrip('/'))
url = urljoin("{}://{}".format(self.request.protocol, self.request.host), url)
return url
def run_async(self, method):
@functools.wraps(method)
async def wrapper(self, *args, **kwargs):
await sickrage.app.wserver.io_loop.run_in_executor(self.executor, functools.partial(method, *args, **kwargs))
return types.MethodType(wrapper, self)
def prepare(self):
method_name = self.request.method.lower()
method = self.run_async(getattr(self, method_name))
setattr(self, method_name, method)
def options(self, *args, **kwargs):
self.set_status(204)
self.finish()
| # ##############################################################################
# Author: echel0n <echel0n@sickrage.ca>
# URL: https://sickrage.ca/
# Git: https://git.sickrage.ca/SiCKRAGE/sickrage.git
# -
# This file is part of SiCKRAGE.
# -
# SiCKRAGE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# -
# SiCKRAGE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# -
# You should have received a copy of the GNU General Public License
# along with SiCKRAGE. If not, see <http://www.gnu.org/licenses/>.
# ##############################################################################
import functools
import html
import time
import traceback
import types
from concurrent.futures.thread import ThreadPoolExecutor
from typing import Optional, Awaitable
from urllib.parse import urlparse, urljoin
import bleach
from jose import ExpiredSignatureError
from keycloak.exceptions import KeycloakClientError
from mako.exceptions import RichTraceback
from tornado import locale, escape
from tornado.web import RequestHandler
import sickrage
from sickrage.core.helpers import is_ip_whitelisted, torrent_webui_url
class BaseHandler(RequestHandler):
def __init__(self, application, request, **kwargs):
super(BaseHandler, self).__init__(application, request, **kwargs)
self.executor = ThreadPoolExecutor(thread_name_prefix='TORNADO-Thread')
self.startTime = time.time()
def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
pass
def get_user_locale(self):
return locale.get(sickrage.app.config.gui.gui_lang)
def write_error(self, status_code, **kwargs):
if status_code not in [401, 404] and "exc_info" in kwargs:
exc_info = kwargs["exc_info"]
error = repr(exc_info[1])
sickrage.app.log.error(error)
if self.settings.get("debug"):
trace_info = ''.join([f"{line}<br>" for line in traceback.format_exception(*exc_info)])
request_info = ''.join([f"<strong>{k}</strong>: {v}<br>" for k, v in self.request.__dict__.items()])
self.set_header('Content-Type', 'text/html')
return self.write(f"""<html>
<title>{error}</title>
<body>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logs/';">View Log(Errors)</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/home/restart?pid={sickrage.app.pid}&force=1';">Restart SiCKRAGE</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logout';">Logout</button>
<h2>Error</h2>
<p>{error}</p>
<h2>Traceback</h2>
<p>{trace_info}</p>
<h2>Request Info</h2>
<p>{request_info}</p>
</body>
</html>""")
def get_current_user(self):
if is_ip_whitelisted(self.request.remote_ip):
return True
elif sickrage.app.config.general.sso_auth_enabled and sickrage.app.auth_server.health:
try:
access_token = self.get_secure_cookie('_sr_access_token')
refresh_token = self.get_secure_cookie('_sr_refresh_token')
if not all([access_token, refresh_token]):
return
certs = sickrage.app.auth_server.certs()
if not certs:
return
try:
return sickrage.app.auth_server.decode_token(access_token.decode("utf-8"), certs)
except (KeycloakClientError, ExpiredSignatureError):
token = sickrage.app.auth_server.refresh_token(refresh_token.decode("utf-8"))
if not token:
return
self.set_secure_cookie('_sr_access_token', token['access_token'])
self.set_secure_cookie('_sr_refresh_token', token['refresh_token'])
return sickrage.app.auth_server.decode_token(token['access_token'], certs)
except Exception as e:
return
elif sickrage.app.config.general.local_auth_enabled:
cookie = self.get_secure_cookie('_sr').decode() if self.get_secure_cookie('_sr') else None
if cookie == sickrage.app.config.general.api_v1_key:
return True
def render_string(self, template_name, **kwargs):
template_kwargs = {
'title': "",
'header': "",
'topmenu': "",
'submenu': "",
'controller': "home",
'action': "index",
'srPID': sickrage.app.pid,
'srHttpsEnabled': sickrage.app.config.general.enable_https or bool(self.request.headers.get('X-Forwarded-Proto') == 'https'),
'srHost': self.request.headers.get('X-Forwarded-Host', self.request.host.split(':')[0]),
'srHttpPort': self.request.headers.get('X-Forwarded-Port', sickrage.app.config.general.web_port),
'srHttpsPort': sickrage.app.config.general.web_port,
'srHandleReverseProxy': sickrage.app.config.general.handle_reverse_proxy,
'srDefaultPage': sickrage.app.config.general.default_page.value,
'srWebRoot': sickrage.app.config.general.web_root,
'srLocale': self.get_user_locale().code,
'srLocaleDir': sickrage.LOCALE_DIR,
'srStartTime': self.startTime,
'makoStartTime': time.time(),
'overall_stats': None,
'torrent_webui_url': torrent_webui_url(),
'application': self.application,
'request': self.request,
}
template_kwargs.update(self.get_template_namespace())
template_kwargs.update(kwargs)
try:
return self.application.settings['templates'][template_name].render_unicode(**template_kwargs)
except Exception:
kwargs['title'] = _('HTTP Error 500')
kwargs['header'] = _('HTTP Error 500')
kwargs['backtrace'] = RichTraceback()
template_kwargs.update(kwargs)
sickrage.app.log.error("%s: %s" % (str(kwargs['backtrace'].error.__class__.__name__), kwargs['backtrace'].error))
return self.application.settings['templates']['errors/500.mako'].render_unicode(**template_kwargs)
def render(self, template_name, **kwargs):
self.write(self.render_string(template_name, **kwargs))
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, PUT, PATCH, DELETE, OPTIONS')
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def redirect(self, url, permanent=True, status=None):
if sickrage.app.config.general.web_root not in url:
url = urljoin(sickrage.app.config.general.web_root + '/', url.lstrip('/'))
super(BaseHandler, self).redirect(url, permanent, status)
def previous_url(self):
url = urlparse(self.request.headers.get("referer", "/{}/".format(sickrage.app.config.general.default_page.value)))
return url._replace(scheme="", netloc="").geturl()
def _genericMessage(self, subject, message):
return self.render('generic_message.mako',
message=message,
subject=subject,
title="",
controller='root',
action='genericmessage')
def get_url(self, url):
if sickrage.app.config.general.web_root not in url:
url = urljoin(sickrage.app.config.general.web_root + '/', url.lstrip('/'))
url = urljoin("{}://{}".format(self.request.protocol, self.request.host), url)
return url
def run_async(self, method):
@functools.wraps(method)
async def wrapper(self, *args, **kwargs):
await sickrage.app.wserver.io_loop.run_in_executor(self.executor, functools.partial(method, *args, **kwargs))
return types.MethodType(wrapper, self)
def prepare(self):
method_name = self.request.method.lower()
method = self.run_async(getattr(self, method_name))
setattr(self, method_name, method)
def options(self, *args, **kwargs):
self.set_status(204)
self.finish()
def get_argument(self, *args, **kwargs):
value = super(BaseHandler, self).get_argument(*args, **kwargs)
try:
return bleach.clean(value)
except TypeError:
return value
| xss | {
"code": [
"from tornado import locale"
],
"line_no": [
32
]
} | {
"code": [
"import html",
"import bleach",
" def get_argument(self, *args, **kwargs):",
" value = super(BaseHandler, self).get_argument(*args, **kwargs)",
" try:",
" return bleach.clean(value)",
" except TypeError:",
" return value"
],
"line_no": [
22,
30,
202,
203,
205,
206,
207,
208
]
} |
import functools
import time
import traceback
import types
from concurrent.futures.thread import ThreadPoolExecutor
from typing import Optional, Awaitable
from urllib.parse import .urlparse, urljoin
from jose import ExpiredSignatureError
from keycloak.exceptions import KeycloakClientError
from mako.exceptions import RichTraceback
from tornado import locale
from tornado.web import RequestHandler
import sickrage
from sickrage.core.helpers import is_ip_whitelisted, torrent_webui_url
class CLASS_0(RequestHandler):
def __init__(self, VAR_0, VAR_1, **VAR_2):
super(CLASS_0, self).__init__(VAR_0, VAR_1, **VAR_2)
self.executor = ThreadPoolExecutor(thread_name_prefix='TORNADO-Thread')
self.startTime = time.time()
def FUNC_0(self, VAR_3: bytes) -> Optional[Awaitable[None]]:
pass
def FUNC_1(self):
return locale.get(sickrage.app.config.gui.gui_lang)
def FUNC_2(self, VAR_4, **VAR_2):
if VAR_4 not in [401, 404] and "exc_info" in VAR_2:
VAR_15 = VAR_2["exc_info"]
VAR_16 = repr(VAR_15[1])
sickrage.app.log.error(VAR_16)
if self.settings.get("debug"):
VAR_17 = ''.join([f"{line}<br>" for line in traceback.format_exception(*VAR_15)])
VAR_18 = ''.join([f"<strong>{k}</strong>: {v}<br>" for k, v in self.request.__dict__.items()])
self.set_header('Content-Type', 'text/html')
return self.write(f"""<html>
<title>{VAR_16}</title>
<body>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logs/';">View Log(Errors)</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/home/restart?pid={sickrage.app.pid}&force=1';">Restart SiCKRAGE</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logout';">Logout</button>
<h2>Error</h2>
<p>{VAR_16}</p>
<h2>Traceback</h2>
<p>{VAR_17}</p>
<h2>Request Info</h2>
<p>{VAR_18}</p>
</body>
</html>""")
def FUNC_3(self):
if is_ip_whitelisted(self.request.remote_ip):
return True
elif sickrage.app.config.general.sso_auth_enabled and sickrage.app.auth_server.health:
try:
VAR_19 = self.get_secure_cookie('_sr_access_token')
VAR_20 = self.get_secure_cookie('_sr_refresh_token')
if not all([VAR_19, VAR_20]):
return
VAR_21 = sickrage.app.auth_server.certs()
if not VAR_21:
return
try:
return sickrage.app.auth_server.decode_token(VAR_19.decode("utf-8"), VAR_21)
except (KeycloakClientError, ExpiredSignatureError):
VAR_23 = sickrage.app.auth_server.refresh_token(VAR_20.decode("utf-8"))
if not VAR_23:
return
self.set_secure_cookie('_sr_access_token', VAR_23['access_token'])
self.set_secure_cookie('_sr_refresh_token', VAR_23['refresh_token'])
return sickrage.app.auth_server.decode_token(VAR_23['access_token'], VAR_21)
except Exception as e:
return
elif sickrage.app.config.general.local_auth_enabled:
VAR_22 = self.get_secure_cookie('_sr').decode() if self.get_secure_cookie('_sr') else None
if VAR_22 == sickrage.app.config.general.api_v1_key:
return True
def FUNC_4(self, VAR_5, **VAR_2):
VAR_13 = {
'title': "",
'header': "",
'topmenu': "",
'submenu': "",
'controller': "home",
'action': "index",
'srPID': sickrage.app.pid,
'srHttpsEnabled': sickrage.app.config.general.enable_https or bool(self.request.headers.get('X-Forwarded-Proto') == 'https'),
'srHost': self.request.headers.get('X-Forwarded-Host', self.request.host.split(':')[0]),
'srHttpPort': self.request.headers.get('X-Forwarded-Port', sickrage.app.config.general.web_port),
'srHttpsPort': sickrage.app.config.general.web_port,
'srHandleReverseProxy': sickrage.app.config.general.handle_reverse_proxy,
'srDefaultPage': sickrage.app.config.general.default_page.value,
'srWebRoot': sickrage.app.config.general.web_root,
'srLocale': self.get_user_locale().code,
'srLocaleDir': sickrage.LOCALE_DIR,
'srStartTime': self.startTime,
'makoStartTime': time.time(),
'overall_stats': None,
'torrent_webui_url': torrent_webui_url(),
'application': self.application,
'request': self.request,
}
VAR_13.update(self.get_template_namespace())
VAR_13.update(VAR_2)
try:
return self.application.settings['templates'][VAR_5].render_unicode(**VAR_13)
except Exception:
VAR_2['title'] = _('HTTP Error 500')
VAR_2['header'] = _('HTTP Error 500')
VAR_2['backtrace'] = RichTraceback()
VAR_13.update(VAR_2)
sickrage.app.log.error("%s: %s" % (str(VAR_2['backtrace'].error.__class__.__name__), VAR_2['backtrace'].error))
return self.application.settings['templates']['errors/500.mako'].render_unicode(**VAR_13)
def FUNC_5(self, VAR_5, **VAR_2):
self.write(self.render_string(VAR_5, **VAR_2))
def FUNC_6(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, PUT, PATCH, DELETE, OPTIONS')
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def FUNC_7(self, VAR_6, VAR_7=True, VAR_8=None):
if sickrage.app.config.general.web_root not in VAR_6:
url = urljoin(sickrage.app.config.general.web_root + '/', VAR_6.lstrip('/'))
super(CLASS_0, self).redirect(VAR_6, VAR_7, VAR_8)
def FUNC_8(self):
VAR_6 = urlparse(self.request.headers.get("referer", "/{}/".format(sickrage.app.config.general.default_page.value)))
return VAR_6._replace(scheme="", netloc="").geturl()
def FUNC_9(self, VAR_9, VAR_10):
return self.render('generic_message.mako',
VAR_10=message,
VAR_9=subject,
title="",
controller='root',
action='genericmessage')
def FUNC_10(self, VAR_6):
if sickrage.app.config.general.web_root not in VAR_6:
url = urljoin(sickrage.app.config.general.web_root + '/', VAR_6.lstrip('/'))
VAR_6 = urljoin("{}://{}".format(self.request.protocol, self.request.host), VAR_6)
return VAR_6
def FUNC_11(self, VAR_11):
@functools.wraps(VAR_11)
async def FUNC_14(self, *VAR_12, **VAR_2):
await sickrage.app.wserver.io_loop.run_in_executor(self.executor, functools.partial(VAR_11, *VAR_12, **VAR_2))
return types.MethodType(FUNC_14, self)
def FUNC_12(self):
VAR_14 = self.request.method.lower()
VAR_11 = self.run_async(getattr(self, VAR_14))
setattr(self, VAR_14, VAR_11)
def FUNC_13(self, *VAR_12, **VAR_2):
self.set_status(204)
self.finish()
|
import functools
import html
import time
import traceback
import types
from concurrent.futures.thread import ThreadPoolExecutor
from typing import Optional, Awaitable
from urllib.parse import .urlparse, urljoin
import bleach
from jose import ExpiredSignatureError
from keycloak.exceptions import KeycloakClientError
from mako.exceptions import RichTraceback
from tornado import locale, escape
from tornado.web import RequestHandler
import sickrage
from sickrage.core.helpers import is_ip_whitelisted, torrent_webui_url
class CLASS_0(RequestHandler):
def __init__(self, VAR_0, VAR_1, **VAR_2):
super(CLASS_0, self).__init__(VAR_0, VAR_1, **VAR_2)
self.executor = ThreadPoolExecutor(thread_name_prefix='TORNADO-Thread')
self.startTime = time.time()
def FUNC_0(self, VAR_3: bytes) -> Optional[Awaitable[None]]:
pass
def FUNC_1(self):
return locale.get(sickrage.app.config.gui.gui_lang)
def FUNC_2(self, VAR_4, **VAR_2):
if VAR_4 not in [401, 404] and "exc_info" in VAR_2:
VAR_16 = VAR_2["exc_info"]
VAR_17 = repr(VAR_16[1])
sickrage.app.log.error(VAR_17)
if self.settings.get("debug"):
VAR_18 = ''.join([f"{line}<br>" for line in traceback.format_exception(*VAR_16)])
VAR_19 = ''.join([f"<strong>{k}</strong>: {v}<br>" for k, v in self.request.__dict__.items()])
self.set_header('Content-Type', 'text/html')
return self.write(f"""<html>
<title>{VAR_17}</title>
<body>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logs/';">View Log(Errors)</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/home/restart?pid={sickrage.app.pid}&force=1';">Restart SiCKRAGE</button>
<button onclick="window.location='{sickrage.app.config.general.web_root}/logout';">Logout</button>
<h2>Error</h2>
<p>{VAR_17}</p>
<h2>Traceback</h2>
<p>{VAR_18}</p>
<h2>Request Info</h2>
<p>{VAR_19}</p>
</body>
</html>""")
def FUNC_3(self):
if is_ip_whitelisted(self.request.remote_ip):
return True
elif sickrage.app.config.general.sso_auth_enabled and sickrage.app.auth_server.health:
try:
VAR_20 = self.get_secure_cookie('_sr_access_token')
VAR_21 = self.get_secure_cookie('_sr_refresh_token')
if not all([VAR_20, VAR_21]):
return
VAR_22 = sickrage.app.auth_server.certs()
if not VAR_22:
return
try:
return sickrage.app.auth_server.decode_token(VAR_20.decode("utf-8"), VAR_22)
except (KeycloakClientError, ExpiredSignatureError):
VAR_24 = sickrage.app.auth_server.refresh_token(VAR_21.decode("utf-8"))
if not VAR_24:
return
self.set_secure_cookie('_sr_access_token', VAR_24['access_token'])
self.set_secure_cookie('_sr_refresh_token', VAR_24['refresh_token'])
return sickrage.app.auth_server.decode_token(VAR_24['access_token'], VAR_22)
except Exception as e:
return
elif sickrage.app.config.general.local_auth_enabled:
VAR_23 = self.get_secure_cookie('_sr').decode() if self.get_secure_cookie('_sr') else None
if VAR_23 == sickrage.app.config.general.api_v1_key:
return True
def FUNC_4(self, VAR_5, **VAR_2):
VAR_13 = {
'title': "",
'header': "",
'topmenu': "",
'submenu': "",
'controller': "home",
'action': "index",
'srPID': sickrage.app.pid,
'srHttpsEnabled': sickrage.app.config.general.enable_https or bool(self.request.headers.get('X-Forwarded-Proto') == 'https'),
'srHost': self.request.headers.get('X-Forwarded-Host', self.request.host.split(':')[0]),
'srHttpPort': self.request.headers.get('X-Forwarded-Port', sickrage.app.config.general.web_port),
'srHttpsPort': sickrage.app.config.general.web_port,
'srHandleReverseProxy': sickrage.app.config.general.handle_reverse_proxy,
'srDefaultPage': sickrage.app.config.general.default_page.value,
'srWebRoot': sickrage.app.config.general.web_root,
'srLocale': self.get_user_locale().code,
'srLocaleDir': sickrage.LOCALE_DIR,
'srStartTime': self.startTime,
'makoStartTime': time.time(),
'overall_stats': None,
'torrent_webui_url': torrent_webui_url(),
'application': self.application,
'request': self.request,
}
VAR_13.update(self.get_template_namespace())
VAR_13.update(VAR_2)
try:
return self.application.settings['templates'][VAR_5].render_unicode(**VAR_13)
except Exception:
VAR_2['title'] = _('HTTP Error 500')
VAR_2['header'] = _('HTTP Error 500')
VAR_2['backtrace'] = RichTraceback()
VAR_13.update(VAR_2)
sickrage.app.log.error("%s: %s" % (str(VAR_2['backtrace'].error.__class__.__name__), VAR_2['backtrace'].error))
return self.application.settings['templates']['errors/500.mako'].render_unicode(**VAR_13)
def FUNC_5(self, VAR_5, **VAR_2):
self.write(self.render_string(VAR_5, **VAR_2))
def FUNC_6(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, PUT, PATCH, DELETE, OPTIONS')
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def FUNC_7(self, VAR_6, VAR_7=True, VAR_8=None):
if sickrage.app.config.general.web_root not in VAR_6:
url = urljoin(sickrage.app.config.general.web_root + '/', VAR_6.lstrip('/'))
super(CLASS_0, self).redirect(VAR_6, VAR_7, VAR_8)
def FUNC_8(self):
VAR_6 = urlparse(self.request.headers.get("referer", "/{}/".format(sickrage.app.config.general.default_page.value)))
return VAR_6._replace(scheme="", netloc="").geturl()
def FUNC_9(self, VAR_9, VAR_10):
return self.render('generic_message.mako',
VAR_10=message,
VAR_9=subject,
title="",
controller='root',
action='genericmessage')
def FUNC_10(self, VAR_6):
if sickrage.app.config.general.web_root not in VAR_6:
url = urljoin(sickrage.app.config.general.web_root + '/', VAR_6.lstrip('/'))
VAR_6 = urljoin("{}://{}".format(self.request.protocol, self.request.host), VAR_6)
return VAR_6
def FUNC_11(self, VAR_11):
@functools.wraps(VAR_11)
async def FUNC_15(self, *VAR_12, **VAR_2):
await sickrage.app.wserver.io_loop.run_in_executor(self.executor, functools.partial(VAR_11, *VAR_12, **VAR_2))
return types.MethodType(FUNC_15, self)
def FUNC_12(self):
VAR_14 = self.request.method.lower()
VAR_11 = self.run_async(getattr(self, VAR_14))
setattr(self, VAR_14, VAR_11)
def FUNC_13(self, *VAR_12, **VAR_2):
self.set_status(204)
self.finish()
def FUNC_14(self, *VAR_12, **VAR_2):
VAR_15 = super(CLASS_0, self).get_argument(*VAR_12, **VAR_2)
try:
return bleach.clean(VAR_15)
except TypeError:
return VAR_15
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
28,
34,
37,
38,
42,
44,
46,
49,
52,
57,
59,
63,
79,
89,
93,
100,
110,
136,
139,
147,
149,
151,
154,
160,
165,
169,
177,
183,
188,
190,
195,
199
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
29,
36,
39,
40,
44,
46,
48,
51,
54,
59,
61,
65,
81,
91,
95,
102,
112,
138,
141,
149,
151,
153,
156,
162,
167,
171,
179,
185,
190,
192,
197,
201,
204,
209
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method):
fn = getattr(self, method, None)
if not fn:
raise NotFound("Method {0} not found".format(method))
elif not getattr(fn, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint, is_whitelisted
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(fn):
"""Decorator: Whitelist method to be called remotely via REST API."""
frappe.whitelist()(fn)
return fn
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method_name):
method = getattr(self, method_name, None)
if not fn:
raise NotFound("Method {0} not found".format(method_name))
is_whitelisted(getattr(method, '__func__', method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| xss | {
"code": [
"from frappe import _, msgprint",
"\tdef whitelist(f):",
"\t\tf.whitelisted = True",
"\t\treturn f",
"\tdef is_whitelisted(self, method):",
"\t\tfn = getattr(self, method, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method))",
"\t\telif not getattr(fn, \"whitelisted\", False):",
"\t\t\traise Forbidden(\"Method {0} not whitelisted\".format(method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1155,
1156
]
} | {
"code": [
"from frappe import _, msgprint, is_whitelisted",
"\tdef whitelist(fn):",
"\t\tfrappe.whitelist()(fn)",
"\t\treturn fn",
"\tdef is_whitelisted(self, method_name):",
"\t\tmethod = getattr(self, method_name, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method_name))",
"\t\tis_whitelisted(getattr(method, '__func__', method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1156
]
} |
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_81
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
VAR_6.whitelisted = True
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_80 = frappe.db.get_singles_dict(self.doctype)
if not VAR_80:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_80["name"] = self.doctype
del VAR_80["__islocal"]
super(CLASS_0, self).__init__(VAR_80)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_81 = DOCTYPE_TABLE_FIELDS
else:
VAR_81 = self.meta.get_table_fields()
for VAR_19 in VAR_81:
VAR_50 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_50:
self.set(VAR_19.fieldname, VAR_50)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_82 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_82.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_48 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_48.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_48:
VAR_83 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_48))),
[self.name, self.doctype, VAR_18] + VAR_48)
if len(VAR_83) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_83))), tuple(row[0] for row in VAR_83))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_49 = self.get_doc_before_save()
return VAR_49.get(VAR_18)!=self.get(VAR_18) if VAR_49 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_84 = self.as_dict()
for VAR_43, VAR_26 in iteritems(VAR_84):
if VAR_26==None:
VAR_84[VAR_43] = ""
return VAR_84
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_84 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_50 = self.get_all_children()
for VAR_21 in VAR_50:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_87 = FUNC_81(VAR_19)
frappe.throw(VAR_87, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_51 = self.meta.get_workflow()
if VAR_51:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_51, self._action)
def FUNC_26(self):
VAR_52 = self.meta.get_set_only_once_fields()
if VAR_52 and self._doc_before_save:
for field in VAR_52:
VAR_97 = False
VAR_26 = self.get(field.fieldname)
VAR_53 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_81:
VAR_97 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_97 = str(VAR_26) != str(VAR_53)
else:
VAR_97 = VAR_26 != VAR_53
if VAR_97:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_53 = self._doc_before_save.get(VAR_18)
VAR_54 = True
if len(VAR_53) != len(VAR_26):
VAR_54 = False
else:
for i, VAR_21 in enumerate(VAR_53):
VAR_98 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_99 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_43 in ('modified', 'modified_by', 'creation'):
del VAR_98[VAR_43]
del VAR_99[VAR_43]
if VAR_99 != VAR_98:
VAR_54 = False
break
return VAR_54
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_55 = False
VAR_56 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_56 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_56:
if VAR_19.permlevel > 0:
VAR_55 = True
break
if not VAR_55:
return
VAR_57 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_57 = self.get_permlevel_access()
VAR_58 = self.meta.get_high_permlevel_fields()
if VAR_58:
self.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_58 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_58:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_59 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_59 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_85 = self.meta.permissions
return VAR_85
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_60 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_60)
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_60)
def FUNC_34(self):
VAR_61 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_100 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_100 = VAR_100 and VAR_100[0][0]
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
else:
VAR_101 = frappe.db.sql("""select VAR_100, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_101:
frappe.throw(_("Record does not exist"))
else:
VAR_101 = tmp[0]
VAR_100 = cstr(VAR_101.modified)
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
self.check_docstatus_transition(VAR_101.docstatus)
if VAR_61:
frappe.msgprint(_("Error: CLASS_0 has been VAR_100 after you have opened it") \
+ (" (%s, %s). " % (VAR_100, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_33=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_62 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_62.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_62:
return
for VAR_18, VAR_87 in VAR_62:
msgprint(VAR_87)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_62)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_63, VAR_64 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_86 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_63.extend(VAR_86[0])
VAR_64.extend(VAR_86[1])
if VAR_63:
VAR_87 = ", ".join((each[2] for each in VAR_63))
frappe.throw(_("Could not find {0}").format(VAR_87),
frappe.LinkValidationError)
if VAR_64:
VAR_87 = ", ".join((each[2] for each in VAR_64))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_87),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_65 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_81]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_65.extend(VAR_26)
return VAR_65
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_72 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_72 = lambda self, *VAR_0, **VAR_1: None
VAR_72.__name__ = str(VAR_25)
VAR_66 = CLASS_0.hook(VAR_72)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_66
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_88 = frappe.cache().hget('notifications', self.doctype)
if VAR_88==None:
VAR_88 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_88)
self.flags.notifications = VAR_88
if not self.flags.notifications:
return
def FUNC_82(VAR_67):
if not VAR_67.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_67.name, VAR_67.event)
self.flags.notifications_executed.append(VAR_67.name)
VAR_68 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_68['on_change'] = 'Value Change'
for VAR_67 in self.flags.notifications:
VAR_89 = VAR_68.get(VAR_25, None)
if VAR_89 and VAR_67.event == VAR_89:
FUNC_82(VAR_67)
elif VAR_67.event=='Method' and VAR_25 == VAR_67.method:
FUNC_82(VAR_67)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_69 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_90 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_90, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_70 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_70.for_insert(self)
VAR_70.insert(VAR_11=True)
elif VAR_70.set_diff(self._doc_before_save, self):
VAR_70.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_6):
def FUNC_83(self, VAR_71):
if isinstance(VAR_71, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_71)
else:
self._return_value = VAR_71 or self.get("_return_value")
def FUNC_84(VAR_72, *VAR_73):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_72(self, *VAR_0, **VAR_1))
for VAR_6 in VAR_73:
FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_73 = []
VAR_25 = VAR_6.__name__
VAR_91 = frappe.get_doc_hooks()
for handler in VAR_91.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_91.get("*", {}).get(VAR_25, []):
VAR_73.append(frappe.get_attr(handler))
VAR_92 = FUNC_84(VAR_6, *VAR_73)
return VAR_92(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_25):
VAR_72 = getattr(self, VAR_25, None)
if not VAR_72:
raise NotFound("Method {0} not found".format(VAR_25))
elif not getattr(VAR_72, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(VAR_25))
def FUNC_62(self, VAR_18, VAR_30, VAR_31, VAR_32=None, VAR_33=None):
VAR_74 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_32:
doc = self
VAR_75 = VAR_32.get_value(VAR_18)
VAR_19 = VAR_32.meta.get_field(VAR_18)
VAR_31 = VAR_32.cast(VAR_31, VAR_19)
if not frappe.compare(VAR_75, VAR_30, VAR_31):
VAR_93 = VAR_32.meta.get_label(VAR_18)
VAR_94 = VAR_74.get(VAR_30, condition)
if VAR_32.parentfield:
VAR_87 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_32.idx, VAR_93, VAR_94, VAR_31)
else:
VAR_87 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_93, VAR_94, VAR_31)
msgprint(VAR_87, VAR_33=raise_exception or True)
def FUNC_63(self, VAR_34, VAR_33=None):
if not (isinstance(self.get(VAR_34), list) and len(self.get(VAR_34)) > 0):
VAR_93 = self.meta.get_label(VAR_34)
frappe.throw(_("Table {0} cannot be empty").format(VAR_93), VAR_33 or frappe.EmptyTableError)
def FUNC_64(self, VAR_32, VAR_35=None):
if not VAR_35:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_32.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_35:
VAR_32.set(VAR_18, flt(VAR_32.get(VAR_18), self.precision(VAR_18, VAR_32.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_36='Comment', VAR_37=None, VAR_38=None, VAR_39=None, VAR_40=None, VAR_41=None):
VAR_66 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_36,
"comment_email": VAR_38 or frappe.session.user,
"comment_by": VAR_41,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_37 or VAR_36,
"link_doctype": VAR_39,
"link_name": VAR_40
}).insert(VAR_11=True)
return VAR_66
def FUNC_67(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if self.meta.track_seen:
VAR_95 = self.get('_seen') or []
VAR_95 = frappe.parse_json(VAR_95)
if VAR_42 not in VAR_95:
_seen.append(VAR_42)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_95), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_76 = getattr(self, "_liked_by", None)
if VAR_76:
return json.loads(VAR_76)
else:
return []
def FUNC_71(self, VAR_43, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_43] = VAR_26
def FUNC_72(self, VAR_43=None):
if not VAR_43:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_43]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_44=None):
VAR_77 = self.get_signature()
if file_lock.lock_exists(VAR_77):
VAR_96 = True
if VAR_44:
for i in range(VAR_44):
time.sleep(1)
if not file_lock.lock_exists(VAR_77):
VAR_96 = False
break
if VAR_96:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_77)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_45, VAR_46):
if date_diff(self.get(VAR_46), self.get(VAR_45)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_46)),
frappe.bold(self.meta.get_label(VAR_45)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_78 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_79 = set([assignment.owner for assignment in VAR_78])
return VAR_79
def FUNC_78(self, VAR_47):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_47)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_32 = frappe.get_doc(VAR_2, VAR_3)
VAR_32.unlock()
try:
getattr(VAR_32, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_87 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_87 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_32.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_87)
VAR_32.notify_update()
|
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint, FUNC_61
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_82
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
frappe.whitelist()(VAR_6)
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_81 = frappe.db.get_singles_dict(self.doctype)
if not VAR_81:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_81["name"] = self.doctype
del VAR_81["__islocal"]
super(CLASS_0, self).__init__(VAR_81)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_82 = DOCTYPE_TABLE_FIELDS
else:
VAR_82 = self.meta.get_table_fields()
for VAR_19 in VAR_82:
VAR_52 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_52:
self.set(VAR_19.fieldname, VAR_52)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_83 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_83.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_50 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_50.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_50:
VAR_84 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_50))),
[self.name, self.doctype, VAR_18] + VAR_50)
if len(VAR_84) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_84))), tuple(row[0] for row in VAR_84))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_51 = self.get_doc_before_save()
return VAR_51.get(VAR_18)!=self.get(VAR_18) if VAR_51 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_85 = self.as_dict()
for VAR_45, VAR_26 in iteritems(VAR_85):
if VAR_26==None:
VAR_85[VAR_45] = ""
return VAR_85
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_85 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_52 = self.get_all_children()
for VAR_21 in VAR_52:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_88 = FUNC_81(VAR_19)
frappe.throw(VAR_88, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_53 = self.meta.get_workflow()
if VAR_53:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_53, self._action)
def FUNC_26(self):
VAR_54 = self.meta.get_set_only_once_fields()
if VAR_54 and self._doc_before_save:
for field in VAR_54:
VAR_98 = False
VAR_26 = self.get(field.fieldname)
VAR_55 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_82:
VAR_98 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_98 = str(VAR_26) != str(VAR_55)
else:
VAR_98 = VAR_26 != VAR_55
if VAR_98:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_55 = self._doc_before_save.get(VAR_18)
VAR_56 = True
if len(VAR_55) != len(VAR_26):
VAR_56 = False
else:
for i, VAR_21 in enumerate(VAR_55):
VAR_99 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_100 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_45 in ('modified', 'modified_by', 'creation'):
del VAR_99[VAR_45]
del VAR_100[VAR_45]
if VAR_100 != VAR_99:
VAR_56 = False
break
return VAR_56
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_57 = False
VAR_58 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_58 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_58:
if VAR_19.permlevel > 0:
VAR_57 = True
break
if not VAR_57:
return
VAR_59 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_59 = self.get_permlevel_access()
VAR_60 = self.meta.get_high_permlevel_fields()
if VAR_60:
self.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_60:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_61 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_61 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_86 = self.meta.permissions
return VAR_86
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_62 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_62)
for VAR_19 in self.meta.get_table_fields():
VAR_62 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_62)
def FUNC_34(self):
VAR_63 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_101 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_101 = VAR_101 and VAR_101[0][0]
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
else:
VAR_102 = frappe.db.sql("""select VAR_101, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_102:
frappe.throw(_("Record does not exist"))
else:
VAR_102 = tmp[0]
VAR_101 = cstr(VAR_102.modified)
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
self.check_docstatus_transition(VAR_102.docstatus)
if VAR_63:
frappe.msgprint(_("Error: CLASS_0 has been VAR_101 after you have opened it") \
+ (" (%s, %s). " % (VAR_101, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_35=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_64 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_64.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_64:
return
for VAR_18, VAR_88 in VAR_64:
msgprint(VAR_88)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_64)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_65, VAR_66 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_87 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_65.extend(VAR_87[0])
VAR_66.extend(VAR_87[1])
if VAR_65:
VAR_88 = ", ".join((each[2] for each in VAR_65))
frappe.throw(_("Could not find {0}").format(VAR_88),
frappe.LinkValidationError)
if VAR_66:
VAR_88 = ", ".join((each[2] for each in VAR_66))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_88),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_67 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_82]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_67.extend(VAR_26)
return VAR_67
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_6 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_6 = lambda self, *VAR_0, **VAR_1: None
VAR_6.__name__ = str(VAR_25)
VAR_68 = CLASS_0.hook(VAR_6)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_68
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_89 = frappe.cache().hget('notifications', self.doctype)
if VAR_89==None:
VAR_89 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_89)
self.flags.notifications = VAR_89
if not self.flags.notifications:
return
def FUNC_82(VAR_69):
if not VAR_69.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_69.name, VAR_69.event)
self.flags.notifications_executed.append(VAR_69.name)
VAR_70 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_70['on_change'] = 'Value Change'
for VAR_69 in self.flags.notifications:
VAR_90 = VAR_70.get(VAR_25, None)
if VAR_90 and VAR_69.event == VAR_90:
FUNC_82(VAR_69)
elif VAR_69.event=='Method' and VAR_25 == VAR_69.method:
FUNC_82(VAR_69)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_71 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_91 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_91, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_72 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_72.for_insert(self)
VAR_72.insert(VAR_11=True)
elif VAR_72.set_diff(self._doc_before_save, self):
VAR_72.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_30):
def FUNC_83(self, VAR_73):
if isinstance(VAR_73, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_73)
else:
self._return_value = VAR_73 or self.get("_return_value")
def FUNC_84(VAR_6, *VAR_74):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_6(self, *VAR_0, **VAR_1))
for VAR_30 in VAR_74:
FUNC_83(self, VAR_30(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_74 = []
VAR_25 = VAR_30.__name__
VAR_92 = frappe.get_doc_hooks()
for handler in VAR_92.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_92.get("*", {}).get(VAR_25, []):
VAR_74.append(frappe.get_attr(handler))
VAR_93 = FUNC_84(VAR_30, *VAR_74)
return VAR_93(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_31):
VAR_25 = getattr(self, VAR_31, None)
if not VAR_6:
raise NotFound("Method {0} not found".format(VAR_31))
FUNC_61(getattr(VAR_25, '__func__', VAR_25))
def FUNC_62(self, VAR_18, VAR_32, VAR_33, VAR_34=None, VAR_35=None):
VAR_75 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_34:
doc = self
VAR_76 = VAR_34.get_value(VAR_18)
VAR_19 = VAR_34.meta.get_field(VAR_18)
VAR_33 = VAR_34.cast(VAR_33, VAR_19)
if not frappe.compare(VAR_76, VAR_32, VAR_33):
VAR_94 = VAR_34.meta.get_label(VAR_18)
VAR_95 = VAR_75.get(VAR_32, condition)
if VAR_34.parentfield:
VAR_88 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_34.idx, VAR_94, VAR_95, VAR_33)
else:
VAR_88 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_94, VAR_95, VAR_33)
msgprint(VAR_88, VAR_35=raise_exception or True)
def FUNC_63(self, VAR_36, VAR_35=None):
if not (isinstance(self.get(VAR_36), list) and len(self.get(VAR_36)) > 0):
VAR_94 = self.meta.get_label(VAR_36)
frappe.throw(_("Table {0} cannot be empty").format(VAR_94), VAR_35 or frappe.EmptyTableError)
def FUNC_64(self, VAR_34, VAR_37=None):
if not VAR_37:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_34.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_37:
VAR_34.set(VAR_18, flt(VAR_34.get(VAR_18), self.precision(VAR_18, VAR_34.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_38='Comment', VAR_39=None, VAR_40=None, VAR_41=None, VAR_42=None, VAR_43=None):
VAR_68 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_38,
"comment_email": VAR_40 or frappe.session.user,
"comment_by": VAR_43,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_39 or VAR_38,
"link_doctype": VAR_41,
"link_name": VAR_42
}).insert(VAR_11=True)
return VAR_68
def FUNC_67(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if self.meta.track_seen:
VAR_96 = self.get('_seen') or []
VAR_96 = frappe.parse_json(VAR_96)
if VAR_44 not in VAR_96:
_seen.append(VAR_44)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_96), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_77 = getattr(self, "_liked_by", None)
if VAR_77:
return json.loads(VAR_77)
else:
return []
def FUNC_71(self, VAR_45, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_45] = VAR_26
def FUNC_72(self, VAR_45=None):
if not VAR_45:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_45]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_46=None):
VAR_78 = self.get_signature()
if file_lock.lock_exists(VAR_78):
VAR_97 = True
if VAR_46:
for i in range(VAR_46):
time.sleep(1)
if not file_lock.lock_exists(VAR_78):
VAR_97 = False
break
if VAR_97:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_78)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_47, VAR_48):
if date_diff(self.get(VAR_48), self.get(VAR_47)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_48)),
frappe.bold(self.meta.get_label(VAR_47)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_79 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_80 = set([assignment.owner for assignment in VAR_79])
return VAR_80
def FUNC_78(self, VAR_49):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_49)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_34 = frappe.get_doc(VAR_2, VAR_3)
VAR_34.unlock()
try:
getattr(VAR_34, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_88 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_88 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_34.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_88)
VAR_34.notify_update()
| [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] | [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1155,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import encode_verify_key_base64, get_verify_key
from twisted.internet import defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class MockPerspectiveServer:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def get_verify_keys(self):
vk = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)}
def get_signed_key(self, server_name, verify_key):
key_id = "%s:%s" % (verify_key.alg, verify_key.version)
res = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {key_id: {"key": encode_verify_key_base64(verify_key)}},
}
self.sign_response(res)
return res
def sign_response(self, res):
signedjson.sign.sign_json(res, self.server_name, self.key)
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
self.assertEquals(getattr(current_context(), "request", None), expected)
return val
def test_verify_json_objects_for_server_awaits_previous_requests(self):
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock()
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
# a signed object that we are going to try to validate
key1 = signedjson.key.generate_signing_key(1)
json1 = {}
signedjson.sign.sign_json(json1, "server10", key1)
# start off a first set of lookups. We make the mock fetcher block until this
# deferred completes.
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
# the unsigned json should be rejected pretty quickly
self.assertTrue(res_deferreds[1].called)
try:
await res_deferreds[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None)
await make_deferred_yieldable(res_deferreds[0])
d0 = ensureDeferred(first_lookup())
mock_fetcher.get_keys.assert_called_once()
# a second request for a server with outstanding requests
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.reset_mock()
mock_fetcher.get_keys.side_effect = second_lookup_fetch
second_lookup_state = [0]
async def second_lookup():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
res_deferreds_2[0].addBoth(self.check_context, None)
second_lookup_state[0] = 1
await make_deferred_yieldable(res_deferreds_2[0])
second_lookup_state[0] = 2
d2 = ensureDeferred(second_lookup())
self.pump()
# the second request should be pending, but the fetcher should not yet have been
# called
self.assertEqual(second_lookup_state[0], 1)
mock_fetcher.get_keys.assert_not_called()
# complete the first request
first_lookup_deferred.callback(None)
# and now both verifications should succeed.
self.get_success(d0)
self.get_success(d2)
def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should succeed on a signed object
d = _verify_json_for_server(kr, "server9", json1, 500, "test signed")
# self.assertFalse(d.called)
self.get_success(d)
def test_verify_json_for_server_with_null_valid_until_ms(self):
"""Tests that we correctly handle key requests for keys we've stored
with a null `ts_valid_until_ms`
"""
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(return_value=make_awaitable({}))
kr = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should fail on a signed object with a non-zero minimum_valid_until_ms,
# as it tries to refetch the keys and fails.
d = _verify_json_for_server(
kr, "server9", json1, 500, "test signed non-zero min"
)
self.get_failure(d, SynapseError)
# We expect the keyring tried to refetch the key once.
mock_fetcher.get_keys.assert_called_once_with(
{"server9": {get_key_id(key1): 500}}
)
# should succeed on a signed object with a 0 minimum_valid_until_ms
d = _verify_json_for_server(
kr, "server9", json1, 0, "test signed with zero min"
)
self.get_success(d)
def test_verify_json_dedupes_key_requests(self):
"""Two requests for the same key should be deduped."""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys(keys_to_fetch):
# there should only be one request object (with the max validity)
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(side_effect=get_keys)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
# the first request should succeed; the second should fail because the key
# has expired
results = kr.verify_json_objects_for_server(
[("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to the fetcher
mock_fetcher.get_keys.assert_called_once()
def test_verify_json_falls_back_to_other_fetchers(self):
"""If the first fetcher cannot provide a recent enough key, we fall back"""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys1(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)}
}
async def get_keys2(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher1 = keyring.KeyFetcher()
mock_fetcher1.get_keys = Mock(side_effect=get_keys1)
mock_fetcher2 = keyring.KeyFetcher()
mock_fetcher2.get_keys = Mock(side_effect=get_keys2)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
results = kr.verify_json_objects_for_server(
[("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to each fetcher
mock_fetcher1.get_keys.assert_called_once()
mock_fetcher2.get_keys.assert_called_once()
@logcontext_clean
class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.http_client = Mock()
hs = self.setup_test_homeserver(http_client=self.http_client)
return hs
def test_get_keys_from_server(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
fetcher = ServerKeyFetcher(self.hs)
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
# valid response
response = {
"server_name": SERVER_NAME,
"old_verify_keys": {},
"valid_until_ts": VALID_UNTIL_TS,
"verify_keys": {
testverifykey_id: {
"key": signedjson.key.encode_verify_key_base64(testverifykey)
}
},
}
signedjson.sign.sign_json(response, SERVER_NAME, testkey)
async def get_json(destination, path, **kwargs):
self.assertEqual(destination, SERVER_NAME)
self.assertEqual(path, "/_matrix/key/v2/server/key1")
return response
self.http_client.get_json.side_effect = get_json
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], SERVER_NAME)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
# we expect it to be encoded as canonical json *before* it hits the db
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
# change the server name: the result should be ignored
response["server_name"] = "OTHER_SERVER"
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertEqual(keys, {})
class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock()
config = self.default_config()
config["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(http_client=self.http_client, config=config)
def build_perspectives_response(
self, server_name: str, signing_key: SigningKey, valid_until_ts: int,
) -> dict:
"""
Build a valid perspectives server response to a request for the given key
"""
verify_key = signedjson.key.get_verify_key(signing_key)
verifykey_id = "%s:%s" % (verify_key.alg, verify_key.version)
response = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": valid_until_ts,
"verify_keys": {
verifykey_id: {
"key": signedjson.key.encode_verify_key_base64(verify_key)
}
},
}
# the response must be signed by both the origin server and the perspectives
# server.
signedjson.sign.sign_json(response, server_name, signing_key)
self.mock_perspective_server.sign_response(response)
return response
def expect_outgoing_key_query(
self, expected_server_name: str, expected_key_id: str, response: dict
) -> None:
"""
Tell the mock http client to expect a perspectives-server key query
"""
async def post_json(destination, path, data, **kwargs):
self.assertEqual(destination, self.mock_perspective_server.server_name)
self.assertEqual(path, "/_matrix/key/v2/query")
# check that the request is for the expected key
q = data["server_keys"]
self.assertEqual(list(q[expected_server_name].keys()), [expected_key_id])
return {"server_keys": [response]}
self.http_client.post_json.side_effect = post_json
def test_get_keys_from_perspectives(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS,
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_get_perspectives_own_key(self):
"""Check that we can get the perspectives server's own keys
This is slightly complicated by the fact that the perspectives server may
use different keys for signing notary responses.
"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = self.mock_perspective_server.server_name
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_invalid_perspectives_responses(self):
"""Check that invalid responses from the perspectives server are rejected"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
def build_response():
return self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
def get_key_from_perspectives(response):
fetcher = PerspectivesKeyFetcher(self.hs)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
return self.get_success(fetcher.get_keys(keys_to_fetch))
# start with a valid response so we can check we are testing the right thing
response = build_response()
keys = get_key_from_perspectives(response)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.verify_key, testverifykey)
# remove the perspectives server's signature
response = build_response()
del response["signatures"][self.mock_perspective_server.server_name]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing persp server sig")
# remove the origin server's signature
response = build_response()
del response["signatures"][SERVER_NAME]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig")
def get_key_id(key):
"""Get the matrix ID tag for a given SigningKey or VerifyKey"""
return "%s:%s" % (key.alg, key.version)
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx") as ctx:
# we set the "request" prop to make it easier to follow what's going on in the
# logs.
ctx.request = "testctx"
rv = yield f(*args, **kwargs)
return rv
def _verify_json_for_server(kr, *args):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield kr.verify_json_for_server(*args)
return rv1
return run_in_context(v)
| # -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import encode_verify_key_base64, get_verify_key
from twisted.internet import defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class MockPerspectiveServer:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def get_verify_keys(self):
vk = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)}
def get_signed_key(self, server_name, verify_key):
key_id = "%s:%s" % (verify_key.alg, verify_key.version)
res = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {key_id: {"key": encode_verify_key_base64(verify_key)}},
}
self.sign_response(res)
return res
def sign_response(self, res):
signedjson.sign.sign_json(res, self.server_name, self.key)
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
self.assertEquals(getattr(current_context(), "request", None), expected)
return val
def test_verify_json_objects_for_server_awaits_previous_requests(self):
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock()
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
# a signed object that we are going to try to validate
key1 = signedjson.key.generate_signing_key(1)
json1 = {}
signedjson.sign.sign_json(json1, "server10", key1)
# start off a first set of lookups. We make the mock fetcher block until this
# deferred completes.
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
# the unsigned json should be rejected pretty quickly
self.assertTrue(res_deferreds[1].called)
try:
await res_deferreds[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None)
await make_deferred_yieldable(res_deferreds[0])
d0 = ensureDeferred(first_lookup())
mock_fetcher.get_keys.assert_called_once()
# a second request for a server with outstanding requests
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.reset_mock()
mock_fetcher.get_keys.side_effect = second_lookup_fetch
second_lookup_state = [0]
async def second_lookup():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
res_deferreds_2[0].addBoth(self.check_context, None)
second_lookup_state[0] = 1
await make_deferred_yieldable(res_deferreds_2[0])
second_lookup_state[0] = 2
d2 = ensureDeferred(second_lookup())
self.pump()
# the second request should be pending, but the fetcher should not yet have been
# called
self.assertEqual(second_lookup_state[0], 1)
mock_fetcher.get_keys.assert_not_called()
# complete the first request
first_lookup_deferred.callback(None)
# and now both verifications should succeed.
self.get_success(d0)
self.get_success(d2)
def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should succeed on a signed object
d = _verify_json_for_server(kr, "server9", json1, 500, "test signed")
# self.assertFalse(d.called)
self.get_success(d)
def test_verify_json_for_server_with_null_valid_until_ms(self):
"""Tests that we correctly handle key requests for keys we've stored
with a null `ts_valid_until_ms`
"""
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(return_value=make_awaitable({}))
kr = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should fail on a signed object with a non-zero minimum_valid_until_ms,
# as it tries to refetch the keys and fails.
d = _verify_json_for_server(
kr, "server9", json1, 500, "test signed non-zero min"
)
self.get_failure(d, SynapseError)
# We expect the keyring tried to refetch the key once.
mock_fetcher.get_keys.assert_called_once_with(
{"server9": {get_key_id(key1): 500}}
)
# should succeed on a signed object with a 0 minimum_valid_until_ms
d = _verify_json_for_server(
kr, "server9", json1, 0, "test signed with zero min"
)
self.get_success(d)
def test_verify_json_dedupes_key_requests(self):
"""Two requests for the same key should be deduped."""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys(keys_to_fetch):
# there should only be one request object (with the max validity)
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(side_effect=get_keys)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
# the first request should succeed; the second should fail because the key
# has expired
results = kr.verify_json_objects_for_server(
[("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to the fetcher
mock_fetcher.get_keys.assert_called_once()
def test_verify_json_falls_back_to_other_fetchers(self):
"""If the first fetcher cannot provide a recent enough key, we fall back"""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys1(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)}
}
async def get_keys2(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher1 = keyring.KeyFetcher()
mock_fetcher1.get_keys = Mock(side_effect=get_keys1)
mock_fetcher2 = keyring.KeyFetcher()
mock_fetcher2.get_keys = Mock(side_effect=get_keys2)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
results = kr.verify_json_objects_for_server(
[("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to each fetcher
mock_fetcher1.get_keys.assert_called_once()
mock_fetcher2.get_keys.assert_called_once()
@logcontext_clean
class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.http_client = Mock()
hs = self.setup_test_homeserver(federation_http_client=self.http_client)
return hs
def test_get_keys_from_server(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
fetcher = ServerKeyFetcher(self.hs)
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
# valid response
response = {
"server_name": SERVER_NAME,
"old_verify_keys": {},
"valid_until_ts": VALID_UNTIL_TS,
"verify_keys": {
testverifykey_id: {
"key": signedjson.key.encode_verify_key_base64(testverifykey)
}
},
}
signedjson.sign.sign_json(response, SERVER_NAME, testkey)
async def get_json(destination, path, **kwargs):
self.assertEqual(destination, SERVER_NAME)
self.assertEqual(path, "/_matrix/key/v2/server/key1")
return response
self.http_client.get_json.side_effect = get_json
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], SERVER_NAME)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
# we expect it to be encoded as canonical json *before* it hits the db
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
# change the server name: the result should be ignored
response["server_name"] = "OTHER_SERVER"
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertEqual(keys, {})
class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock()
config = self.default_config()
config["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(
federation_http_client=self.http_client, config=config
)
def build_perspectives_response(
self, server_name: str, signing_key: SigningKey, valid_until_ts: int,
) -> dict:
"""
Build a valid perspectives server response to a request for the given key
"""
verify_key = signedjson.key.get_verify_key(signing_key)
verifykey_id = "%s:%s" % (verify_key.alg, verify_key.version)
response = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": valid_until_ts,
"verify_keys": {
verifykey_id: {
"key": signedjson.key.encode_verify_key_base64(verify_key)
}
},
}
# the response must be signed by both the origin server and the perspectives
# server.
signedjson.sign.sign_json(response, server_name, signing_key)
self.mock_perspective_server.sign_response(response)
return response
def expect_outgoing_key_query(
self, expected_server_name: str, expected_key_id: str, response: dict
) -> None:
"""
Tell the mock http client to expect a perspectives-server key query
"""
async def post_json(destination, path, data, **kwargs):
self.assertEqual(destination, self.mock_perspective_server.server_name)
self.assertEqual(path, "/_matrix/key/v2/query")
# check that the request is for the expected key
q = data["server_keys"]
self.assertEqual(list(q[expected_server_name].keys()), [expected_key_id])
return {"server_keys": [response]}
self.http_client.post_json.side_effect = post_json
def test_get_keys_from_perspectives(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS,
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_get_perspectives_own_key(self):
"""Check that we can get the perspectives server's own keys
This is slightly complicated by the fact that the perspectives server may
use different keys for signing notary responses.
"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = self.mock_perspective_server.server_name
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_invalid_perspectives_responses(self):
"""Check that invalid responses from the perspectives server are rejected"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
def build_response():
return self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
def get_key_from_perspectives(response):
fetcher = PerspectivesKeyFetcher(self.hs)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
return self.get_success(fetcher.get_keys(keys_to_fetch))
# start with a valid response so we can check we are testing the right thing
response = build_response()
keys = get_key_from_perspectives(response)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.verify_key, testverifykey)
# remove the perspectives server's signature
response = build_response()
del response["signatures"][self.mock_perspective_server.server_name]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing persp server sig")
# remove the origin server's signature
response = build_response()
del response["signatures"][SERVER_NAME]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig")
def get_key_id(key):
"""Get the matrix ID tag for a given SigningKey or VerifyKey"""
return "%s:%s" % (key.alg, key.version)
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx") as ctx:
# we set the "request" prop to make it easier to follow what's going on in the
# logs.
ctx.request = "testctx"
rv = yield f(*args, **kwargs)
return rv
def _verify_json_for_server(kr, *args):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield kr.verify_json_for_server(*args)
return rv1
return run_in_context(v)
| open_redirect | {
"code": [
" hs = self.setup_test_homeserver(http_client=self.http_client)",
" return self.setup_test_homeserver(http_client=self.http_client, config=config)"
],
"line_no": [
318,
398
]
} | {
"code": [
" hs = self.setup_test_homeserver(federation_http_client=self.http_client)",
" return self.setup_test_homeserver(",
" )"
],
"line_no": [
318,
398,
400
]
} |
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import .encode_verify_key_base64, get_verify_key
from twisted.internet import .defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import .keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class CLASS_0:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def FUNC_3(self):
VAR_17 = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (VAR_17.alg, VAR_17.version): encode_verify_key_base64(VAR_17)}
def FUNC_4(self, VAR_5, VAR_6):
VAR_18 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_7 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {VAR_18: {"key": encode_verify_key_base64(VAR_6)}},
}
self.sign_response(VAR_7)
return VAR_7
def FUNC_5(self, VAR_7):
signedjson.sign.sign_json(VAR_7, self.server_name, self.key)
@logcontext_clean
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_6(self, VAR_8, VAR_9):
self.assertEquals(getattr(current_context(), "request", None), VAR_9)
return VAR_8
def FUNC_7(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock()
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server10", VAR_20)
VAR_22 = Deferred()
async def FUNC_20(VAR_23):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(VAR_23, {"server10": {FUNC_0(VAR_20): 0}})
await make_deferred_yieldable(VAR_22)
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.side_effect = FUNC_20
async def FUNC_21():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
VAR_52 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test10"), ("server11", {}, 0, "test11")]
)
self.assertTrue(VAR_52[1].called)
try:
await VAR_52[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(VAR_52[0].called)
VAR_52[0].addBoth(self.check_context, None)
await make_deferred_yieldable(VAR_52[0])
VAR_24 = ensureDeferred(FUNC_21())
VAR_19.get_keys.assert_called_once()
async def FUNC_22(VAR_23):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.reset_mock()
VAR_19.get_keys.side_effect = FUNC_22
VAR_25 = [0]
async def FUNC_23():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
VAR_53 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test")]
)
VAR_53[0].addBoth(self.check_context, None)
VAR_25[0] = 1
await make_deferred_yieldable(VAR_53[0])
VAR_25[0] = 2
VAR_26 = ensureDeferred(FUNC_23())
self.pump()
self.assertEqual(VAR_25[0], 1)
VAR_19.get_keys.assert_not_called()
VAR_22.callback(None)
self.get_success(VAR_24)
self.get_success(VAR_26)
def FUNC_8(self):
VAR_4 = keyring.Keyring(self.hs)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), 1000))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(VAR_4, "server9", VAR_21, 500, "test signed")
self.get_success(VAR_28)
def FUNC_9(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(return_value=make_awaitable({}))
VAR_4 = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), VAR_19)
)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), None))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 500, "test signed non-zero min"
)
self.get_failure(VAR_28, SynapseError)
VAR_19.get_keys.assert_called_once_with(
{"server9": {FUNC_0(VAR_20): 500}}
)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 0, "test signed with zero min"
)
self.get_success(VAR_28)
def FUNC_10(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_24(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(side_effect=FUNC_24)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 500, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_19.get_keys.assert_called_once()
def FUNC_11(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_25(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 800)}
}
async def FUNC_26(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_31 = keyring.KeyFetcher()
VAR_31.get_keys = Mock(side_effect=FUNC_25)
VAR_32 = keyring.KeyFetcher()
VAR_32.get_keys = Mock(side_effect=FUNC_26)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_31, VAR_32))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 1200, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_31.get_keys.assert_called_once()
VAR_32.get_keys.assert_called_once()
@logcontext_clean
class CLASS_2(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.http_client = Mock()
VAR_33 = self.setup_test_homeserver(http_client=self.http_client)
return VAR_33
def FUNC_13(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_35 = ServerKeyFetcher(self.hs)
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = {
"server_name": VAR_34,
"old_verify_keys": {},
"valid_until_ts": VAR_39,
"verify_keys": {
VAR_38: {
"key": signedjson.key.encode_verify_key_base64(VAR_37)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_34, VAR_36)
async def FUNC_27(VAR_40, VAR_41, **VAR_3):
self.assertEqual(VAR_40, VAR_34)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/server/key1")
return VAR_16
self.http_client.get_json.side_effect = FUNC_27
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], VAR_34)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
VAR_16["server_name"] = "OTHER_SERVER"
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertEqual(VAR_42, {})
class CLASS_3(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.mock_perspective_server = CLASS_0()
self.http_client = Mock()
VAR_46 = self.default_config()
VAR_46["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(http_client=self.http_client, VAR_46=config)
def FUNC_14(
self, VAR_5: str, VAR_12: SigningKey, VAR_13: int,
) -> dict:
VAR_6 = signedjson.key.get_verify_key(VAR_12)
VAR_47 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_16 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": VAR_13,
"verify_keys": {
VAR_47: {
"key": signedjson.key.encode_verify_key_base64(VAR_6)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_5, VAR_12)
self.mock_perspective_server.sign_response(VAR_16)
return VAR_16
def FUNC_15(
self, VAR_14: str, VAR_15: str, VAR_16: dict
) -> None:
async def FUNC_28(VAR_40, VAR_41, VAR_48, **VAR_3):
self.assertEqual(VAR_40, self.mock_perspective_server.server_name)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/query")
VAR_51 = VAR_48["server_keys"]
self.assertEqual(list(VAR_51[VAR_14].keys()), [VAR_15])
return {"server_keys": [VAR_16]}
self.http_client.post_json.side_effect = FUNC_28
def FUNC_16(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39,
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_17(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = self.mock_perspective_server.server_name
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_18(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
def FUNC_29():
return self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
def FUNC_30(VAR_16):
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_23 = {VAR_34: {"key1": 0}}
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
return self.get_success(VAR_35.get_keys(VAR_23))
VAR_16 = FUNC_29()
VAR_42 = FUNC_30(VAR_16)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.verify_key, VAR_37)
VAR_16 = FUNC_29()
del VAR_16["signatures"][self.mock_perspective_server.server_name]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing persp server sig")
VAR_16 = FUNC_29()
del VAR_16["signatures"][VAR_34]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing origin server sig")
def FUNC_0(VAR_0):
return "%s:%s" % (VAR_0.alg, VAR_0.version)
@defer.inlineCallbacks
def FUNC_1(VAR_1, *VAR_2, **VAR_3):
with LoggingContext("testctx") as ctx:
ctx.request = "testctx"
VAR_49 = yield VAR_1(*VAR_2, **VAR_3)
return VAR_49
def FUNC_2(VAR_4, *VAR_2):
@defer.inlineCallbacks
def FUNC_19():
VAR_50 = yield VAR_4.verify_json_for_server(*VAR_2)
return VAR_50
return FUNC_1(FUNC_19)
|
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import .encode_verify_key_base64, get_verify_key
from twisted.internet import .defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import .keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class CLASS_0:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def FUNC_3(self):
VAR_17 = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (VAR_17.alg, VAR_17.version): encode_verify_key_base64(VAR_17)}
def FUNC_4(self, VAR_5, VAR_6):
VAR_18 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_7 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {VAR_18: {"key": encode_verify_key_base64(VAR_6)}},
}
self.sign_response(VAR_7)
return VAR_7
def FUNC_5(self, VAR_7):
signedjson.sign.sign_json(VAR_7, self.server_name, self.key)
@logcontext_clean
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_6(self, VAR_8, VAR_9):
self.assertEquals(getattr(current_context(), "request", None), VAR_9)
return VAR_8
def FUNC_7(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock()
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server10", VAR_20)
VAR_22 = Deferred()
async def FUNC_20(VAR_23):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(VAR_23, {"server10": {FUNC_0(VAR_20): 0}})
await make_deferred_yieldable(VAR_22)
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.side_effect = FUNC_20
async def FUNC_21():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
VAR_52 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test10"), ("server11", {}, 0, "test11")]
)
self.assertTrue(VAR_52[1].called)
try:
await VAR_52[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(VAR_52[0].called)
VAR_52[0].addBoth(self.check_context, None)
await make_deferred_yieldable(VAR_52[0])
VAR_24 = ensureDeferred(FUNC_21())
VAR_19.get_keys.assert_called_once()
async def FUNC_22(VAR_23):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.reset_mock()
VAR_19.get_keys.side_effect = FUNC_22
VAR_25 = [0]
async def FUNC_23():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
VAR_53 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test")]
)
VAR_53[0].addBoth(self.check_context, None)
VAR_25[0] = 1
await make_deferred_yieldable(VAR_53[0])
VAR_25[0] = 2
VAR_26 = ensureDeferred(FUNC_23())
self.pump()
self.assertEqual(VAR_25[0], 1)
VAR_19.get_keys.assert_not_called()
VAR_22.callback(None)
self.get_success(VAR_24)
self.get_success(VAR_26)
def FUNC_8(self):
VAR_4 = keyring.Keyring(self.hs)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), 1000))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(VAR_4, "server9", VAR_21, 500, "test signed")
self.get_success(VAR_28)
def FUNC_9(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(return_value=make_awaitable({}))
VAR_4 = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), VAR_19)
)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), None))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 500, "test signed non-zero min"
)
self.get_failure(VAR_28, SynapseError)
VAR_19.get_keys.assert_called_once_with(
{"server9": {FUNC_0(VAR_20): 500}}
)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 0, "test signed with zero min"
)
self.get_success(VAR_28)
def FUNC_10(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_24(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(side_effect=FUNC_24)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 500, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_19.get_keys.assert_called_once()
def FUNC_11(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_25(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 800)}
}
async def FUNC_26(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_31 = keyring.KeyFetcher()
VAR_31.get_keys = Mock(side_effect=FUNC_25)
VAR_32 = keyring.KeyFetcher()
VAR_32.get_keys = Mock(side_effect=FUNC_26)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_31, VAR_32))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 1200, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_31.get_keys.assert_called_once()
VAR_32.get_keys.assert_called_once()
@logcontext_clean
class CLASS_2(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.http_client = Mock()
VAR_33 = self.setup_test_homeserver(federation_http_client=self.http_client)
return VAR_33
def FUNC_13(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_35 = ServerKeyFetcher(self.hs)
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = {
"server_name": VAR_34,
"old_verify_keys": {},
"valid_until_ts": VAR_39,
"verify_keys": {
VAR_38: {
"key": signedjson.key.encode_verify_key_base64(VAR_37)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_34, VAR_36)
async def FUNC_27(VAR_40, VAR_41, **VAR_3):
self.assertEqual(VAR_40, VAR_34)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/server/key1")
return VAR_16
self.http_client.get_json.side_effect = FUNC_27
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], VAR_34)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
VAR_16["server_name"] = "OTHER_SERVER"
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertEqual(VAR_42, {})
class CLASS_3(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.mock_perspective_server = CLASS_0()
self.http_client = Mock()
VAR_46 = self.default_config()
VAR_46["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(
federation_http_client=self.http_client, VAR_46=config
)
def FUNC_14(
self, VAR_5: str, VAR_12: SigningKey, VAR_13: int,
) -> dict:
VAR_6 = signedjson.key.get_verify_key(VAR_12)
VAR_47 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_16 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": VAR_13,
"verify_keys": {
VAR_47: {
"key": signedjson.key.encode_verify_key_base64(VAR_6)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_5, VAR_12)
self.mock_perspective_server.sign_response(VAR_16)
return VAR_16
def FUNC_15(
self, VAR_14: str, VAR_15: str, VAR_16: dict
) -> None:
async def FUNC_28(VAR_40, VAR_41, VAR_48, **VAR_3):
self.assertEqual(VAR_40, self.mock_perspective_server.server_name)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/query")
VAR_51 = VAR_48["server_keys"]
self.assertEqual(list(VAR_51[VAR_14].keys()), [VAR_15])
return {"server_keys": [VAR_16]}
self.http_client.post_json.side_effect = FUNC_28
def FUNC_16(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39,
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_17(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = self.mock_perspective_server.server_name
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_18(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
def FUNC_29():
return self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
def FUNC_30(VAR_16):
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_23 = {VAR_34: {"key1": 0}}
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
return self.get_success(VAR_35.get_keys(VAR_23))
VAR_16 = FUNC_29()
VAR_42 = FUNC_30(VAR_16)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.verify_key, VAR_37)
VAR_16 = FUNC_29()
del VAR_16["signatures"][self.mock_perspective_server.server_name]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing persp server sig")
VAR_16 = FUNC_29()
del VAR_16["signatures"][VAR_34]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing origin server sig")
def FUNC_0(VAR_0):
return "%s:%s" % (VAR_0.alg, VAR_0.version)
@defer.inlineCallbacks
def FUNC_1(VAR_1, *VAR_2, **VAR_3):
with LoggingContext("testctx") as ctx:
ctx.request = "testctx"
VAR_49 = yield VAR_1(*VAR_2, **VAR_3)
return VAR_49
def FUNC_2(VAR_4, *VAR_2):
@defer.inlineCallbacks
def FUNC_19():
VAR_50 = yield VAR_4.verify_json_for_server(*VAR_2)
return VAR_50
return FUNC_1(FUNC_19)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
24,
27,
41,
45,
46,
51,
55,
66,
69,
70,
76,
81,
82,
86,
87,
88,
90,
94,
101,
103,
107,
111,
112,
119,
122,
124,
126,
128,
129,
130,
131,
139,
143,
147,
155,
157,
159,
160,
163,
164,
166,
167,
170,
173,
181,
184,
185,
188,
189,
191,
193,
200,
204,
212,
215,
216,
219,
220,
221,
226,
227,
231,
232,
237,
241,
243,
245,
251,
255,
258,
259,
260,
269,
270,
272,
276,
282,
290,
296,
299,
308,
309,
312,
313,
320,
322,
324,
331,
332,
344,
349,
351,
359,
360,
372,
373,
377,
378,
380,
383,
384,
389,
397,
399,
408,
419,
420,
424,
431,
435,
436,
440,
442,
444,
446,
448,
454,
458,
460,
469,
470,
482,
486,
489,
493,
494,
496,
498,
504,
508,
510,
519,
520,
532,
536,
539,
541,
547,
552,
558,
559,
564,
565,
570,
571,
576,
577,
581,
582,
586,
587,
591,
592,
597,
602,
604,
579,
594,
595,
596,
195,
196,
197,
239,
274,
403,
404,
405,
428,
429,
430,
488,
489,
490,
491,
492,
538
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
24,
27,
41,
45,
46,
51,
55,
66,
69,
70,
76,
81,
82,
86,
87,
88,
90,
94,
101,
103,
107,
111,
112,
119,
122,
124,
126,
128,
129,
130,
131,
139,
143,
147,
155,
157,
159,
160,
163,
164,
166,
167,
170,
173,
181,
184,
185,
188,
189,
191,
193,
200,
204,
212,
215,
216,
219,
220,
221,
226,
227,
231,
232,
237,
241,
243,
245,
251,
255,
258,
259,
260,
269,
270,
272,
276,
282,
290,
296,
299,
308,
309,
312,
313,
320,
322,
324,
331,
332,
344,
349,
351,
359,
360,
372,
373,
377,
378,
380,
383,
384,
389,
397,
401,
410,
421,
422,
426,
433,
437,
438,
442,
444,
446,
448,
450,
456,
460,
462,
471,
472,
484,
488,
491,
495,
496,
498,
500,
506,
510,
512,
521,
522,
534,
538,
541,
543,
549,
554,
560,
561,
566,
567,
572,
573,
578,
579,
583,
584,
588,
589,
593,
594,
599,
604,
606,
581,
596,
597,
598,
195,
196,
197,
239,
274,
405,
406,
407,
430,
431,
432,
490,
491,
492,
493,
494,
540
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cgi
import logging
import random
import sys
import urllib.parse
from io import BytesIO
from typing import Callable, Dict, List, Optional, Tuple, Union
import attr
import treq
from canonicaljson import encode_canonical_json
from prometheus_client import Counter
from signedjson.sign import sign_json
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import IReactorPluggableNameResolver, IReactorTime
from twisted.internet.task import _EPSILON, Cooperator
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, IResponse
import synapse.metrics
import synapse.util.retryutils
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
RequestSendFailed,
)
from synapse.http import QuieterFileBodyProducer
from synapse.http.client import (
BlacklistingAgentWrapper,
IPBlacklistingResolver,
encode_query_args,
readBodyToFile,
)
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import (
inject_active_span_byte_dict,
set_tag,
start_active_span,
tags,
)
from synapse.types import JsonDict
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
outgoing_requests_counter = Counter(
"synapse_http_matrixfederationclient_requests", "", ["method"]
)
incoming_responses_counter = Counter(
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
)
MAX_LONG_RETRIES = 10
MAX_SHORT_RETRIES = 3
MAXINT = sys.maxsize
_next_id = 1
QueryArgs = Dict[str, Union[str, List[str]]]
@attr.s(slots=True, frozen=True)
class MatrixFederationRequest:
method = attr.ib(type=str)
"""HTTP method
"""
path = attr.ib(type=str)
"""HTTP path
"""
destination = attr.ib(type=str)
"""The remote server to send the HTTP request to.
"""
json = attr.ib(default=None, type=Optional[JsonDict])
"""JSON to send in the body.
"""
json_callback = attr.ib(default=None, type=Optional[Callable[[], JsonDict]])
"""A callback to generate the JSON.
"""
query = attr.ib(default=None, type=Optional[dict])
"""Query arguments.
"""
txn_id = attr.ib(default=None, type=Optional[str])
"""Unique ID for this request (for logging)
"""
uri = attr.ib(init=False, type=bytes)
"""The URI of this request
"""
def __attrs_post_init__(self) -> None:
global _next_id
txn_id = "%s-O-%s" % (self.method, _next_id)
_next_id = (_next_id + 1) % (MAXINT - 1)
object.__setattr__(self, "txn_id", txn_id)
destination_bytes = self.destination.encode("ascii")
path_bytes = self.path.encode("ascii")
if self.query:
query_bytes = encode_query_args(self.query)
else:
query_bytes = b""
# The object is frozen so we can pre-compute this.
uri = urllib.parse.urlunparse(
(b"matrix", destination_bytes, path_bytes, None, query_bytes, b"")
)
object.__setattr__(self, "uri", uri)
def get_json(self) -> Optional[JsonDict]:
if self.json_callback:
return self.json_callback()
return self.json
async def _handle_json_response(
reactor: IReactorTime,
timeout_sec: float,
request: MatrixFederationRequest,
response: IResponse,
start_ms: int,
) -> JsonDict:
"""
Reads the JSON body of a response, with a timeout
Args:
reactor: twisted reactor, for the timeout
timeout_sec: number of seconds to wait for response to complete
request: the request that triggered the response
response: response to the request
start_ms: Timestamp when request was made
Returns:
The parsed JSON response
"""
try:
check_content_type_is_json(response.headers)
# Use the custom JSON decoder (partially re-implements treq.json_content).
d = treq.text_content(response, encoding="utf-8")
d.addCallback(json_decoder.decode)
d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor)
body = await make_deferred_yieldable(d)
except defer.TimeoutError as e:
logger.warning(
"{%s} [%s] Timed out reading response - %s %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
)
raise RequestSendFailed(e, can_retry=True) from e
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response %s %s: %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
e,
)
raise
time_taken_secs = reactor.seconds() - start_ms / 1000
logger.info(
"{%s} [%s] Completed request: %d %s in %.2f secs - %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
time_taken_secs,
request.method,
request.uri.decode("ascii"),
)
return body
class MatrixFederationHttpClient:
"""HTTP client used to talk to other homeservers over the federation
protocol. Send client certificates and signs requests.
Attributes:
agent (twisted.web.client.Agent): The twisted Agent used to send the
requests.
"""
def __init__(self, hs, tls_client_options_factory):
self.hs = hs
self.signing_key = hs.signing_key
self.server_name = hs.hostname
real_reactor = hs.get_reactor()
# We need to use a DNS resolver which filters out blacklisted IP
# addresses, to prevent DNS rebinding.
nameResolver = IPBlacklistingResolver(
real_reactor, None, hs.config.federation_ip_range_blacklist
)
@implementer(IReactorPluggableNameResolver)
class Reactor:
def __getattr__(_self, attr):
if attr == "nameResolver":
return nameResolver
else:
return getattr(real_reactor, attr)
self.reactor = Reactor()
user_agent = hs.version_string
if hs.config.user_agent_suffix:
user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix)
user_agent = user_agent.encode("ascii")
self.agent = MatrixFederationAgent(
self.reactor, tls_client_options_factory, user_agent
)
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
# blacklist via IP literals in server names
self.agent = BlacklistingAgentWrapper(
self.agent, ip_blacklist=hs.config.federation_ip_range_blacklist,
)
self.clock = hs.get_clock()
self._store = hs.get_datastore()
self.version_string_bytes = hs.version_string.encode("ascii")
self.default_timeout = 60
def schedule(x):
self.reactor.callLater(_EPSILON, x)
self._cooperator = Cooperator(scheduler=schedule)
async def _send_request_with_optional_trailing_slash(
self,
request: MatrixFederationRequest,
try_trailing_slash_on_400: bool = False,
**send_request_args
) -> IResponse:
"""Wrapper for _send_request which can optionally retry the request
upon receiving a combination of a 400 HTTP response code and a
'M_UNRECOGNIZED' errcode. This is a workaround for Synapse <= v0.99.3
due to #3622.
Args:
request: details of request to be sent
try_trailing_slash_on_400: Whether on receiving a 400
'M_UNRECOGNIZED' from the server to retry the request with a
trailing slash appended to the request path.
send_request_args: A dictionary of arguments to pass to `_send_request()`.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
Returns:
Parsed JSON response body.
"""
try:
response = await self._send_request(request, **send_request_args)
except HttpResponseException as e:
# Received an HTTP error > 300. Check if it meets the requirements
# to retry with a trailing slash
if not try_trailing_slash_on_400:
raise
if e.code != 400 or e.to_synapse_error().errcode != "M_UNRECOGNIZED":
raise
# Retry with a trailing slash if we received a 400 with
# 'M_UNRECOGNIZED' which some endpoints can return when omitting a
# trailing slash on Synapse <= v0.99.3.
logger.info("Retrying request with trailing slash")
# Request is frozen so we create a new instance
request = attr.evolve(request, path=request.path + "/")
response = await self._send_request(request, **send_request_args)
return response
async def _send_request(
self,
request: MatrixFederationRequest,
retry_on_dns_fail: bool = True,
timeout: Optional[int] = None,
long_retries: bool = False,
ignore_backoff: bool = False,
backoff_on_404: bool = False,
) -> IResponse:
"""
Sends a request to the given server.
Args:
request: details of request to be sent
retry_on_dns_fail: true if the request should be retied on DNS failures
timeout: number of milliseconds to wait for the response headers
(including connecting to the server), *for each attempt*.
60s by default.
long_retries: whether to use the long retry algorithm.
The regular retry algorithm makes 4 attempts, with intervals
[0.5s, 1s, 2s].
The long retry algorithm makes 11 attempts, with intervals
[4s, 16s, 60s, 60s, ...]
Both algorithms add -20%/+40% jitter to the retry intervals.
Note that the above intervals are *in addition* to the time spent
waiting for the request to complete (up to `timeout` ms).
NB: the long retry algorithm takes over 20 minutes to complete, with
a default timeout of 60s!
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
backoff_on_404: Back off if we get a 404
Returns:
Resolves with the HTTP response object on success.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
if timeout:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
if (
self.hs.config.federation_domain_whitelist is not None
and request.destination not in self.hs.config.federation_domain_whitelist
):
raise FederationDeniedError(request.destination)
limiter = await synapse.util.retryutils.get_retry_limiter(
request.destination,
self.clock,
self._store,
backoff_on_404=backoff_on_404,
ignore_backoff=ignore_backoff,
)
method_bytes = request.method.encode("ascii")
destination_bytes = request.destination.encode("ascii")
path_bytes = request.path.encode("ascii")
if request.query:
query_bytes = encode_query_args(request.query)
else:
query_bytes = b""
scope = start_active_span(
"outgoing-federation-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.PEER_ADDRESS: request.destination,
tags.HTTP_METHOD: request.method,
tags.HTTP_URL: request.path,
},
finish_on_close=True,
)
# Inject the span into the headers
headers_dict = {} # type: Dict[bytes, List[bytes]]
inject_active_span_byte_dict(headers_dict, request.destination)
headers_dict[b"User-Agent"] = [self.version_string_bytes]
with limiter, scope:
# XXX: Would be much nicer to retry only at the transaction-layer
# (once we have reliable transactions in place)
if long_retries:
retries_left = MAX_LONG_RETRIES
else:
retries_left = MAX_SHORT_RETRIES
url_bytes = request.uri
url_str = url_bytes.decode("ascii")
url_to_sign_bytes = urllib.parse.urlunparse(
(b"", b"", path_bytes, None, query_bytes, b"")
)
while True:
try:
json = request.get_json()
if json:
headers_dict[b"Content-Type"] = [b"application/json"]
auth_headers = self.build_auth_headers(
destination_bytes, method_bytes, url_to_sign_bytes, json
)
data = encode_canonical_json(json)
producer = QuieterFileBodyProducer(
BytesIO(data), cooperator=self._cooperator
) # type: Optional[IBodyProducer]
else:
producer = None
auth_headers = self.build_auth_headers(
destination_bytes, method_bytes, url_to_sign_bytes
)
headers_dict[b"Authorization"] = auth_headers
logger.debug(
"{%s} [%s] Sending request: %s %s; timeout %fs",
request.txn_id,
request.destination,
request.method,
url_str,
_sec_timeout,
)
outgoing_requests_counter.labels(request.method).inc()
try:
with Measure(self.clock, "outbound_request"):
# we don't want all the fancy cookie and redirect handling
# that treq.request gives: just use the raw Agent.
request_deferred = self.agent.request(
method_bytes,
url_bytes,
headers=Headers(headers_dict),
bodyProducer=producer,
)
request_deferred = timeout_deferred(
request_deferred,
timeout=_sec_timeout,
reactor=self.reactor,
)
response = await request_deferred
except DNSLookupError as e:
raise RequestSendFailed(e, can_retry=retry_on_dns_fail) from e
except Exception as e:
raise RequestSendFailed(e, can_retry=True) from e
incoming_responses_counter.labels(
request.method, response.code
).inc()
set_tag(tags.HTTP_STATUS_CODE, response.code)
response_phrase = response.phrase.decode("ascii", errors="replace")
if 200 <= response.code < 300:
logger.debug(
"{%s} [%s] Got response headers: %d %s",
request.txn_id,
request.destination,
response.code,
response_phrase,
)
pass
else:
logger.info(
"{%s} [%s] Got response headers: %d %s",
request.txn_id,
request.destination,
response.code,
response_phrase,
)
# :'(
# Update transactions table?
d = treq.content(response)
d = timeout_deferred(
d, timeout=_sec_timeout, reactor=self.reactor
)
try:
body = await make_deferred_yieldable(d)
except Exception as e:
# Eh, we're already going to raise an exception so lets
# ignore if this fails.
logger.warning(
"{%s} [%s] Failed to get error response: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e),
)
body = None
exc = HttpResponseException(
response.code, response_phrase, body
)
# Retry if the error is a 429 (Too Many Requests),
# otherwise just raise a standard HttpResponseException
if response.code == 429:
raise RequestSendFailed(exc, can_retry=True) from exc
else:
raise exc
break
except RequestSendFailed as e:
logger.info(
"{%s} [%s] Request failed: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e.inner_exception),
)
if not e.can_retry:
raise
if retries_left and not timeout:
if long_retries:
delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
delay = min(delay, 60)
delay *= random.uniform(0.8, 1.4)
else:
delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
delay = min(delay, 2)
delay *= random.uniform(0.8, 1.4)
logger.debug(
"{%s} [%s] Waiting %ss before re-sending...",
request.txn_id,
request.destination,
delay,
)
await self.clock.sleep(delay)
retries_left -= 1
else:
raise
except Exception as e:
logger.warning(
"{%s} [%s] Request failed: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e),
)
raise
return response
def build_auth_headers(
self,
destination: Optional[bytes],
method: bytes,
url_bytes: bytes,
content: Optional[JsonDict] = None,
destination_is: Optional[bytes] = None,
) -> List[bytes]:
"""
Builds the Authorization headers for a federation request
Args:
destination: The destination homeserver of the request.
May be None if the destination is an identity server, in which case
destination_is must be non-None.
method: The HTTP method of the request
url_bytes: The URI path of the request
content: The body of the request
destination_is: As 'destination', but if the destination is an
identity server
Returns:
A list of headers to be added as "Authorization:" headers
"""
request = {
"method": method.decode("ascii"),
"uri": url_bytes.decode("ascii"),
"origin": self.server_name,
}
if destination is not None:
request["destination"] = destination.decode("ascii")
if destination_is is not None:
request["destination_is"] = destination_is.decode("ascii")
if content is not None:
request["content"] = content
request = sign_json(request, self.server_name, self.signing_key)
auth_headers = []
for key, sig in request["signatures"][self.server_name].items():
auth_headers.append(
(
'X-Matrix origin=%s,key="%s",sig="%s"'
% (self.server_name, key, sig)
).encode("ascii")
)
return auth_headers
async def put_json(
self,
destination: str,
path: str,
args: Optional[QueryArgs] = None,
data: Optional[JsonDict] = None,
json_data_callback: Optional[Callable[[], JsonDict]] = None,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
backoff_on_404: bool = False,
try_trailing_slash_on_400: bool = False,
) -> Union[JsonDict, list]:
""" Sends the specified json data using PUT
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
args: query params
data: A dict containing the data that will be used as
the request body. This will be encoded as JSON.
json_data_callback: A callable returning the dict to
use as the request body.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
backoff_on_404: True if we should count a 404 response as
a failure of the server (and should therefore back off future
requests).
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
response we should try appending a trailing slash to the end
of the request. Workaround for #3622 in Synapse <= v0.99.3. This
will be attempted before backing off if backing off has been
enabled.
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="PUT",
destination=destination,
path=path,
query=args,
json_callback=json_data_callback,
json=data,
)
start_ms = self.clock.time_msec()
response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
backoff_on_404=backoff_on_404,
ignore_backoff=ignore_backoff,
long_retries=long_retries,
timeout=timeout,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def post_json(
self,
destination: str,
path: str,
data: Optional[JsonDict] = None,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
args: Optional[QueryArgs] = None,
) -> Union[JsonDict, list]:
""" Sends the specified json data using POST
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
data: A dict containing the data that will be used as
the request body. This will be encoded as JSON.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data and
try the request anyway.
args: query params
Returns:
dict|list: Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="POST", destination=destination, path=path, query=args, json=data
)
start_ms = self.clock.time_msec()
response = await self._send_request(
request,
long_retries=long_retries,
timeout=timeout,
ignore_backoff=ignore_backoff,
)
if timeout:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms,
)
return body
async def get_json(
self,
destination: str,
path: str,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
try_trailing_slash_on_400: bool = False,
) -> Union[JsonDict, list]:
""" GETs some json from the given host homeserver and path
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
args: A dictionary used to create query strings, defaults to
None.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
response we should try appending a trailing slash to the end of
the request. Workaround for #3622 in Synapse <= v0.99.3.
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
start_ms = self.clock.time_msec()
response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
backoff_on_404=False,
ignore_backoff=ignore_backoff,
retry_on_dns_fail=retry_on_dns_fail,
timeout=timeout,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def delete_json(
self,
destination: str,
path: str,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
args: Optional[QueryArgs] = None,
) -> Union[JsonDict, list]:
"""Send a DELETE request to the remote expecting some json response
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data and
try the request anyway.
args: query params
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="DELETE", destination=destination, path=path, query=args
)
start_ms = self.clock.time_msec()
response = await self._send_request(
request,
long_retries=long_retries,
timeout=timeout,
ignore_backoff=ignore_backoff,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def get_file(
self,
destination: str,
path: str,
output_stream,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
max_size: Optional[int] = None,
ignore_backoff: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
"""GETs a file from a given homeserver
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path to GET.
output_stream: File to write the response body to.
args: Optional dictionary used to create the query string.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
Returns:
Resolves with an (int,dict) tuple of
the file length and a dict of the response headers.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
response = await self._send_request(
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
)
headers = dict(response.headers.getAllRawHeaders())
try:
d = readBodyToFile(response, output_stream, max_size)
d.addTimeout(self.default_timeout, self.reactor)
length = await make_deferred_yieldable(d)
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response: %s",
request.txn_id,
request.destination,
e,
)
raise
logger.info(
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
length,
request.method,
request.uri.decode("ascii"),
)
return (length, headers)
def _flatten_response_never_received(e):
if hasattr(e, "reasons"):
reasons = ", ".join(
_flatten_response_never_received(f.value) for f in e.reasons
)
return "%s:[%s]" % (type(e).__name__, reasons)
else:
return repr(e)
def check_content_type_is_json(headers: Headers) -> None:
"""
Check that a set of HTTP headers have a Content-Type header, and that it
is application/json.
Args:
headers: headers to check
Raises:
RequestSendFailed: if the Content-Type header is missing or isn't JSON
"""
c_type = headers.getRawHeaders(b"Content-Type")
if c_type is None:
raise RequestSendFailed(
RuntimeError("No Content-Type header received from remote server"),
can_retry=False,
)
c_type = c_type[0].decode("ascii") # only the first header
val, options = cgi.parse_header(c_type)
if val != "application/json":
raise RequestSendFailed(
RuntimeError(
"Remote server sent Content-Type header of '%s', not 'application/json'"
% c_type,
),
can_retry=False,
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cgi
import logging
import random
import sys
import urllib.parse
from io import BytesIO
from typing import Callable, Dict, List, Optional, Tuple, Union
import attr
import treq
from canonicaljson import encode_canonical_json
from prometheus_client import Counter
from signedjson.sign import sign_json
from twisted.internet import defer
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import IReactorTime
from twisted.internet.task import _EPSILON, Cooperator
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, IResponse
import synapse.metrics
import synapse.util.retryutils
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
RequestSendFailed,
)
from synapse.http import QuieterFileBodyProducer
from synapse.http.client import (
BlacklistingAgentWrapper,
BlacklistingReactorWrapper,
encode_query_args,
readBodyToFile,
)
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import (
inject_active_span_byte_dict,
set_tag,
start_active_span,
tags,
)
from synapse.types import JsonDict
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
outgoing_requests_counter = Counter(
"synapse_http_matrixfederationclient_requests", "", ["method"]
)
incoming_responses_counter = Counter(
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
)
MAX_LONG_RETRIES = 10
MAX_SHORT_RETRIES = 3
MAXINT = sys.maxsize
_next_id = 1
QueryArgs = Dict[str, Union[str, List[str]]]
@attr.s(slots=True, frozen=True)
class MatrixFederationRequest:
method = attr.ib(type=str)
"""HTTP method
"""
path = attr.ib(type=str)
"""HTTP path
"""
destination = attr.ib(type=str)
"""The remote server to send the HTTP request to.
"""
json = attr.ib(default=None, type=Optional[JsonDict])
"""JSON to send in the body.
"""
json_callback = attr.ib(default=None, type=Optional[Callable[[], JsonDict]])
"""A callback to generate the JSON.
"""
query = attr.ib(default=None, type=Optional[dict])
"""Query arguments.
"""
txn_id = attr.ib(default=None, type=Optional[str])
"""Unique ID for this request (for logging)
"""
uri = attr.ib(init=False, type=bytes)
"""The URI of this request
"""
def __attrs_post_init__(self) -> None:
global _next_id
txn_id = "%s-O-%s" % (self.method, _next_id)
_next_id = (_next_id + 1) % (MAXINT - 1)
object.__setattr__(self, "txn_id", txn_id)
destination_bytes = self.destination.encode("ascii")
path_bytes = self.path.encode("ascii")
if self.query:
query_bytes = encode_query_args(self.query)
else:
query_bytes = b""
# The object is frozen so we can pre-compute this.
uri = urllib.parse.urlunparse(
(b"matrix", destination_bytes, path_bytes, None, query_bytes, b"")
)
object.__setattr__(self, "uri", uri)
def get_json(self) -> Optional[JsonDict]:
if self.json_callback:
return self.json_callback()
return self.json
async def _handle_json_response(
reactor: IReactorTime,
timeout_sec: float,
request: MatrixFederationRequest,
response: IResponse,
start_ms: int,
) -> JsonDict:
"""
Reads the JSON body of a response, with a timeout
Args:
reactor: twisted reactor, for the timeout
timeout_sec: number of seconds to wait for response to complete
request: the request that triggered the response
response: response to the request
start_ms: Timestamp when request was made
Returns:
The parsed JSON response
"""
try:
check_content_type_is_json(response.headers)
# Use the custom JSON decoder (partially re-implements treq.json_content).
d = treq.text_content(response, encoding="utf-8")
d.addCallback(json_decoder.decode)
d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor)
body = await make_deferred_yieldable(d)
except defer.TimeoutError as e:
logger.warning(
"{%s} [%s] Timed out reading response - %s %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
)
raise RequestSendFailed(e, can_retry=True) from e
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response %s %s: %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
e,
)
raise
time_taken_secs = reactor.seconds() - start_ms / 1000
logger.info(
"{%s} [%s] Completed request: %d %s in %.2f secs - %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
time_taken_secs,
request.method,
request.uri.decode("ascii"),
)
return body
class MatrixFederationHttpClient:
"""HTTP client used to talk to other homeservers over the federation
protocol. Send client certificates and signs requests.
Attributes:
agent (twisted.web.client.Agent): The twisted Agent used to send the
requests.
"""
def __init__(self, hs, tls_client_options_factory):
self.hs = hs
self.signing_key = hs.signing_key
self.server_name = hs.hostname
# We need to use a DNS resolver which filters out blacklisted IP
# addresses, to prevent DNS rebinding.
self.reactor = BlacklistingReactorWrapper(
hs.get_reactor(), None, hs.config.federation_ip_range_blacklist
)
user_agent = hs.version_string
if hs.config.user_agent_suffix:
user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix)
user_agent = user_agent.encode("ascii")
self.agent = MatrixFederationAgent(
self.reactor,
tls_client_options_factory,
user_agent,
hs.config.federation_ip_range_blacklist,
)
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
# blacklist via IP literals in server names
self.agent = BlacklistingAgentWrapper(
self.agent, ip_blacklist=hs.config.federation_ip_range_blacklist,
)
self.clock = hs.get_clock()
self._store = hs.get_datastore()
self.version_string_bytes = hs.version_string.encode("ascii")
self.default_timeout = 60
def schedule(x):
self.reactor.callLater(_EPSILON, x)
self._cooperator = Cooperator(scheduler=schedule)
async def _send_request_with_optional_trailing_slash(
self,
request: MatrixFederationRequest,
try_trailing_slash_on_400: bool = False,
**send_request_args
) -> IResponse:
"""Wrapper for _send_request which can optionally retry the request
upon receiving a combination of a 400 HTTP response code and a
'M_UNRECOGNIZED' errcode. This is a workaround for Synapse <= v0.99.3
due to #3622.
Args:
request: details of request to be sent
try_trailing_slash_on_400: Whether on receiving a 400
'M_UNRECOGNIZED' from the server to retry the request with a
trailing slash appended to the request path.
send_request_args: A dictionary of arguments to pass to `_send_request()`.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
Returns:
Parsed JSON response body.
"""
try:
response = await self._send_request(request, **send_request_args)
except HttpResponseException as e:
# Received an HTTP error > 300. Check if it meets the requirements
# to retry with a trailing slash
if not try_trailing_slash_on_400:
raise
if e.code != 400 or e.to_synapse_error().errcode != "M_UNRECOGNIZED":
raise
# Retry with a trailing slash if we received a 400 with
# 'M_UNRECOGNIZED' which some endpoints can return when omitting a
# trailing slash on Synapse <= v0.99.3.
logger.info("Retrying request with trailing slash")
# Request is frozen so we create a new instance
request = attr.evolve(request, path=request.path + "/")
response = await self._send_request(request, **send_request_args)
return response
async def _send_request(
self,
request: MatrixFederationRequest,
retry_on_dns_fail: bool = True,
timeout: Optional[int] = None,
long_retries: bool = False,
ignore_backoff: bool = False,
backoff_on_404: bool = False,
) -> IResponse:
"""
Sends a request to the given server.
Args:
request: details of request to be sent
retry_on_dns_fail: true if the request should be retied on DNS failures
timeout: number of milliseconds to wait for the response headers
(including connecting to the server), *for each attempt*.
60s by default.
long_retries: whether to use the long retry algorithm.
The regular retry algorithm makes 4 attempts, with intervals
[0.5s, 1s, 2s].
The long retry algorithm makes 11 attempts, with intervals
[4s, 16s, 60s, 60s, ...]
Both algorithms add -20%/+40% jitter to the retry intervals.
Note that the above intervals are *in addition* to the time spent
waiting for the request to complete (up to `timeout` ms).
NB: the long retry algorithm takes over 20 minutes to complete, with
a default timeout of 60s!
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
backoff_on_404: Back off if we get a 404
Returns:
Resolves with the HTTP response object on success.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
if timeout:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
if (
self.hs.config.federation_domain_whitelist is not None
and request.destination not in self.hs.config.federation_domain_whitelist
):
raise FederationDeniedError(request.destination)
limiter = await synapse.util.retryutils.get_retry_limiter(
request.destination,
self.clock,
self._store,
backoff_on_404=backoff_on_404,
ignore_backoff=ignore_backoff,
)
method_bytes = request.method.encode("ascii")
destination_bytes = request.destination.encode("ascii")
path_bytes = request.path.encode("ascii")
if request.query:
query_bytes = encode_query_args(request.query)
else:
query_bytes = b""
scope = start_active_span(
"outgoing-federation-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.PEER_ADDRESS: request.destination,
tags.HTTP_METHOD: request.method,
tags.HTTP_URL: request.path,
},
finish_on_close=True,
)
# Inject the span into the headers
headers_dict = {} # type: Dict[bytes, List[bytes]]
inject_active_span_byte_dict(headers_dict, request.destination)
headers_dict[b"User-Agent"] = [self.version_string_bytes]
with limiter, scope:
# XXX: Would be much nicer to retry only at the transaction-layer
# (once we have reliable transactions in place)
if long_retries:
retries_left = MAX_LONG_RETRIES
else:
retries_left = MAX_SHORT_RETRIES
url_bytes = request.uri
url_str = url_bytes.decode("ascii")
url_to_sign_bytes = urllib.parse.urlunparse(
(b"", b"", path_bytes, None, query_bytes, b"")
)
while True:
try:
json = request.get_json()
if json:
headers_dict[b"Content-Type"] = [b"application/json"]
auth_headers = self.build_auth_headers(
destination_bytes, method_bytes, url_to_sign_bytes, json
)
data = encode_canonical_json(json)
producer = QuieterFileBodyProducer(
BytesIO(data), cooperator=self._cooperator
) # type: Optional[IBodyProducer]
else:
producer = None
auth_headers = self.build_auth_headers(
destination_bytes, method_bytes, url_to_sign_bytes
)
headers_dict[b"Authorization"] = auth_headers
logger.debug(
"{%s} [%s] Sending request: %s %s; timeout %fs",
request.txn_id,
request.destination,
request.method,
url_str,
_sec_timeout,
)
outgoing_requests_counter.labels(request.method).inc()
try:
with Measure(self.clock, "outbound_request"):
# we don't want all the fancy cookie and redirect handling
# that treq.request gives: just use the raw Agent.
request_deferred = self.agent.request(
method_bytes,
url_bytes,
headers=Headers(headers_dict),
bodyProducer=producer,
)
request_deferred = timeout_deferred(
request_deferred,
timeout=_sec_timeout,
reactor=self.reactor,
)
response = await request_deferred
except DNSLookupError as e:
raise RequestSendFailed(e, can_retry=retry_on_dns_fail) from e
except Exception as e:
raise RequestSendFailed(e, can_retry=True) from e
incoming_responses_counter.labels(
request.method, response.code
).inc()
set_tag(tags.HTTP_STATUS_CODE, response.code)
response_phrase = response.phrase.decode("ascii", errors="replace")
if 200 <= response.code < 300:
logger.debug(
"{%s} [%s] Got response headers: %d %s",
request.txn_id,
request.destination,
response.code,
response_phrase,
)
pass
else:
logger.info(
"{%s} [%s] Got response headers: %d %s",
request.txn_id,
request.destination,
response.code,
response_phrase,
)
# :'(
# Update transactions table?
d = treq.content(response)
d = timeout_deferred(
d, timeout=_sec_timeout, reactor=self.reactor
)
try:
body = await make_deferred_yieldable(d)
except Exception as e:
# Eh, we're already going to raise an exception so lets
# ignore if this fails.
logger.warning(
"{%s} [%s] Failed to get error response: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e),
)
body = None
exc = HttpResponseException(
response.code, response_phrase, body
)
# Retry if the error is a 429 (Too Many Requests),
# otherwise just raise a standard HttpResponseException
if response.code == 429:
raise RequestSendFailed(exc, can_retry=True) from exc
else:
raise exc
break
except RequestSendFailed as e:
logger.info(
"{%s} [%s] Request failed: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e.inner_exception),
)
if not e.can_retry:
raise
if retries_left and not timeout:
if long_retries:
delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
delay = min(delay, 60)
delay *= random.uniform(0.8, 1.4)
else:
delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
delay = min(delay, 2)
delay *= random.uniform(0.8, 1.4)
logger.debug(
"{%s} [%s] Waiting %ss before re-sending...",
request.txn_id,
request.destination,
delay,
)
await self.clock.sleep(delay)
retries_left -= 1
else:
raise
except Exception as e:
logger.warning(
"{%s} [%s] Request failed: %s %s: %s",
request.txn_id,
request.destination,
request.method,
url_str,
_flatten_response_never_received(e),
)
raise
return response
def build_auth_headers(
self,
destination: Optional[bytes],
method: bytes,
url_bytes: bytes,
content: Optional[JsonDict] = None,
destination_is: Optional[bytes] = None,
) -> List[bytes]:
"""
Builds the Authorization headers for a federation request
Args:
destination: The destination homeserver of the request.
May be None if the destination is an identity server, in which case
destination_is must be non-None.
method: The HTTP method of the request
url_bytes: The URI path of the request
content: The body of the request
destination_is: As 'destination', but if the destination is an
identity server
Returns:
A list of headers to be added as "Authorization:" headers
"""
request = {
"method": method.decode("ascii"),
"uri": url_bytes.decode("ascii"),
"origin": self.server_name,
}
if destination is not None:
request["destination"] = destination.decode("ascii")
if destination_is is not None:
request["destination_is"] = destination_is.decode("ascii")
if content is not None:
request["content"] = content
request = sign_json(request, self.server_name, self.signing_key)
auth_headers = []
for key, sig in request["signatures"][self.server_name].items():
auth_headers.append(
(
'X-Matrix origin=%s,key="%s",sig="%s"'
% (self.server_name, key, sig)
).encode("ascii")
)
return auth_headers
async def put_json(
self,
destination: str,
path: str,
args: Optional[QueryArgs] = None,
data: Optional[JsonDict] = None,
json_data_callback: Optional[Callable[[], JsonDict]] = None,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
backoff_on_404: bool = False,
try_trailing_slash_on_400: bool = False,
) -> Union[JsonDict, list]:
""" Sends the specified json data using PUT
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
args: query params
data: A dict containing the data that will be used as
the request body. This will be encoded as JSON.
json_data_callback: A callable returning the dict to
use as the request body.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
backoff_on_404: True if we should count a 404 response as
a failure of the server (and should therefore back off future
requests).
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
response we should try appending a trailing slash to the end
of the request. Workaround for #3622 in Synapse <= v0.99.3. This
will be attempted before backing off if backing off has been
enabled.
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="PUT",
destination=destination,
path=path,
query=args,
json_callback=json_data_callback,
json=data,
)
start_ms = self.clock.time_msec()
response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
backoff_on_404=backoff_on_404,
ignore_backoff=ignore_backoff,
long_retries=long_retries,
timeout=timeout,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def post_json(
self,
destination: str,
path: str,
data: Optional[JsonDict] = None,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
args: Optional[QueryArgs] = None,
) -> Union[JsonDict, list]:
""" Sends the specified json data using POST
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
data: A dict containing the data that will be used as
the request body. This will be encoded as JSON.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data and
try the request anyway.
args: query params
Returns:
dict|list: Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="POST", destination=destination, path=path, query=args, json=data
)
start_ms = self.clock.time_msec()
response = await self._send_request(
request,
long_retries=long_retries,
timeout=timeout,
ignore_backoff=ignore_backoff,
)
if timeout:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms,
)
return body
async def get_json(
self,
destination: str,
path: str,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
try_trailing_slash_on_400: bool = False,
) -> Union[JsonDict, list]:
""" GETs some json from the given host homeserver and path
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
args: A dictionary used to create query strings, defaults to
None.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
response we should try appending a trailing slash to the end of
the request. Workaround for #3622 in Synapse <= v0.99.3.
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
start_ms = self.clock.time_msec()
response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
backoff_on_404=False,
ignore_backoff=ignore_backoff,
retry_on_dns_fail=retry_on_dns_fail,
timeout=timeout,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def delete_json(
self,
destination: str,
path: str,
long_retries: bool = False,
timeout: Optional[int] = None,
ignore_backoff: bool = False,
args: Optional[QueryArgs] = None,
) -> Union[JsonDict, list]:
"""Send a DELETE request to the remote expecting some json response
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path.
long_retries: whether to use the long retry algorithm. See
docs on _send_request for details.
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
Note that we may make several attempts to send the request; this
timeout applies to the time spent waiting for response headers for
*each* attempt (including connection time) as well as the time spent
reading the response body after a 200 response.
ignore_backoff: true to ignore the historical backoff data and
try the request anyway.
args: query params
Returns:
Succeeds when we get a 2xx HTTP response. The
result will be the decoded JSON body.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="DELETE", destination=destination, path=path, query=args
)
start_ms = self.clock.time_msec()
response = await self._send_request(
request,
long_retries=long_retries,
timeout=timeout,
ignore_backoff=ignore_backoff,
)
if timeout is not None:
_sec_timeout = timeout / 1000
else:
_sec_timeout = self.default_timeout
body = await _handle_json_response(
self.reactor, _sec_timeout, request, response, start_ms
)
return body
async def get_file(
self,
destination: str,
path: str,
output_stream,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
max_size: Optional[int] = None,
ignore_backoff: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
"""GETs a file from a given homeserver
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path to GET.
output_stream: File to write the response body to.
args: Optional dictionary used to create the query string.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
Returns:
Resolves with an (int,dict) tuple of
the file length and a dict of the response headers.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
response = await self._send_request(
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
)
headers = dict(response.headers.getAllRawHeaders())
try:
d = readBodyToFile(response, output_stream, max_size)
d.addTimeout(self.default_timeout, self.reactor)
length = await make_deferred_yieldable(d)
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response: %s",
request.txn_id,
request.destination,
e,
)
raise
logger.info(
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
length,
request.method,
request.uri.decode("ascii"),
)
return (length, headers)
def _flatten_response_never_received(e):
if hasattr(e, "reasons"):
reasons = ", ".join(
_flatten_response_never_received(f.value) for f in e.reasons
)
return "%s:[%s]" % (type(e).__name__, reasons)
else:
return repr(e)
def check_content_type_is_json(headers: Headers) -> None:
"""
Check that a set of HTTP headers have a Content-Type header, and that it
is application/json.
Args:
headers: headers to check
Raises:
RequestSendFailed: if the Content-Type header is missing or isn't JSON
"""
c_type = headers.getRawHeaders(b"Content-Type")
if c_type is None:
raise RequestSendFailed(
RuntimeError("No Content-Type header received from remote server"),
can_retry=False,
)
c_type = c_type[0].decode("ascii") # only the first header
val, options = cgi.parse_header(c_type)
if val != "application/json":
raise RequestSendFailed(
RuntimeError(
"Remote server sent Content-Type header of '%s', not 'application/json'"
% c_type,
),
can_retry=False,
)
| open_redirect | {
"code": [
"from zope.interface import implementer",
"from twisted.internet.interfaces import IReactorPluggableNameResolver, IReactorTime",
" IPBlacklistingResolver,",
" real_reactor = hs.get_reactor()",
" nameResolver = IPBlacklistingResolver(",
" real_reactor, None, hs.config.federation_ip_range_blacklist",
" @implementer(IReactorPluggableNameResolver)",
" class Reactor:",
" def __getattr__(_self, attr):",
" if attr == \"nameResolver\":",
" return nameResolver",
" else:",
" return getattr(real_reactor, attr)",
" self.reactor = Reactor()",
" self.reactor, tls_client_options_factory, user_agent"
],
"line_no": [
29,
33,
48,
224,
228,
229,
232,
233,
234,
235,
236,
237,
238,
240,
248
]
} | {
"code": [
"from twisted.internet.interfaces import IReactorTime",
" BlacklistingReactorWrapper,",
" hs.get_reactor(), None, hs.config.federation_ip_range_blacklist",
" self.reactor,",
" tls_client_options_factory,",
" user_agent,",
" hs.config.federation_ip_range_blacklist,"
],
"line_no": [
32,
47,
226,
235,
236,
237,
238
]
} |
import cgi
import logging
import random
import sys
import urllib.parse
from io import BytesIO
from typing import Callable, Dict, List, Optional, Tuple, Union
import .attr
import treq
from canonicaljson import .encode_canonical_json
from prometheus_client import Counter
from signedjson.sign import sign_json
from zope.interface import implementer
from twisted.internet import .defer
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import IReactorPluggableNameResolver, IReactorTime
from twisted.internet.task import _EPSILON, Cooperator
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, IResponse
import synapse.metrics
import synapse.util.retryutils
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
RequestSendFailed,
)
from synapse.http import QuieterFileBodyProducer
from synapse.http.client import (
BlacklistingAgentWrapper,
IPBlacklistingResolver,
encode_query_args,
readBodyToFile,
)
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import (
inject_active_span_byte_dict,
set_tag,
start_active_span,
tags,
)
from synapse.types import JsonDict
from synapse.util import .json_decoder
from synapse.util.async_helpers import .timeout_deferred
from synapse.util.metrics import Measure
VAR_0 = logging.getLogger(__name__)
VAR_1 = Counter(
"synapse_http_matrixfederationclient_requests", "", ["method"]
)
VAR_2 = Counter(
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
)
VAR_3 = 10
VAR_4 = 3
VAR_5 = sys.maxsize
VAR_6 = 1
VAR_7 = Dict[str, Union[str, List[str]]]
@VAR_60.s(slots=True, frozen=True)
class CLASS_0:
VAR_15 = VAR_60.ib(type=str)
"""HTTP VAR_15
"""
VAR_16 = VAR_60.ib(type=str)
"""HTTP VAR_16
"""
VAR_17 = VAR_60.ib(type=str)
"""The remote server to send the HTTP VAR_10 to.
"""
VAR_18 = VAR_60.ib(default=None, type=Optional[JsonDict])
"""JSON to send in the VAR_47.
"""
VAR_19 = VAR_60.ib(default=None, type=Optional[Callable[[], JsonDict]])
"""A callback to generate the JSON.
"""
VAR_20 = VAR_60.ib(default=None, type=Optional[dict])
"""Query arguments.
"""
VAR_21 = VAR_60.ib(default=None, type=Optional[str])
"""Unique ID for this VAR_10 (for logging)
"""
VAR_22 = VAR_60.ib(init=False, type=bytes)
"""The URI of this VAR_10
"""
def __attrs_post_init__(self) -> None:
global VAR_6
VAR_21 = "%s-O-%s" % (self.method, VAR_6)
_next_id = (VAR_6 + 1) % (VAR_5 - 1)
object.__setattr__(self, "txn_id", VAR_21)
VAR_44 = self.destination.encode("ascii")
VAR_45 = self.path.encode("ascii")
if self.query:
VAR_58 = encode_query_args(self.query)
else:
VAR_58 = b""
VAR_22 = urllib.parse.urlunparse(
(b"matrix", VAR_44, VAR_45, None, VAR_58, b"")
)
object.__setattr__(self, "uri", VAR_22)
def FUNC_3(self) -> Optional[JsonDict]:
if self.json_callback:
return self.json_callback()
return self.json
async def FUNC_0(
VAR_8: IReactorTime,
VAR_9: float,
VAR_10: CLASS_0,
VAR_11: IResponse,
VAR_12: int,
) -> JsonDict:
try:
FUNC_2(VAR_11.headers)
VAR_46 = treq.text_content(VAR_11, encoding="utf-8")
VAR_46.addCallback(json_decoder.decode)
VAR_46 = timeout_deferred(VAR_46, VAR_29=VAR_9, VAR_8=reactor)
VAR_47 = await make_deferred_yieldable(VAR_46)
except defer.TimeoutError as VAR_13:
VAR_0.warning(
"{%s} [%s] Timed out reading VAR_11 - %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
raise RequestSendFailed(VAR_13, can_retry=True) from VAR_13
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Error reading VAR_11 %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_10.uri.decode("ascii"),
VAR_13,
)
raise
VAR_23 = VAR_8.seconds() - VAR_12 / 1000
VAR_0.info(
"{%s} [%s] Completed VAR_10: %VAR_46 %s in %.2f secs - %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_11.phrase.decode("ascii", errors="replace"),
VAR_23,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
return VAR_47
class CLASS_1:
def __init__(self, VAR_24, VAR_25):
self.hs = VAR_24
self.signing_key = VAR_24.signing_key
self.server_name = VAR_24.hostname
VAR_48 = VAR_24.get_reactor()
VAR_49 = IPBlacklistingResolver(
VAR_48, None, VAR_24.config.federation_ip_range_blacklist
)
@implementer(IReactorPluggableNameResolver)
class CLASS_2:
def __getattr__(VAR_59, VAR_60):
if VAR_60 == "nameResolver":
return VAR_49
else:
return getattr(VAR_48, VAR_60)
self.reactor = CLASS_2()
VAR_50 = VAR_24.version_string
if VAR_24.config.user_agent_suffix:
VAR_50 = "%s %s" % (VAR_50, VAR_24.config.user_agent_suffix)
VAR_50 = user_agent.encode("ascii")
self.agent = MatrixFederationAgent(
self.reactor, VAR_25, VAR_50
)
self.agent = BlacklistingAgentWrapper(
self.agent, ip_blacklist=VAR_24.config.federation_ip_range_blacklist,
)
self.clock = VAR_24.get_clock()
self._store = VAR_24.get_datastore()
self.version_string_bytes = VAR_24.version_string.encode("ascii")
self.default_timeout = 60
def FUNC_11(VAR_51):
self.reactor.callLater(_EPSILON, VAR_51)
self._cooperator = Cooperator(scheduler=FUNC_11)
async def FUNC_4(
self,
VAR_10: CLASS_0,
VAR_26: bool = False,
**VAR_27
) -> IResponse:
try:
VAR_11 = await self._send_request(VAR_10, **VAR_27)
except HttpResponseException as VAR_13:
if not VAR_26:
raise
if VAR_13.code != 400 or VAR_13.to_synapse_error().errcode != "M_UNRECOGNIZED":
raise
VAR_0.info("Retrying VAR_10 with trailing slash")
VAR_10 = VAR_60.evolve(VAR_10, VAR_16=VAR_10.path + "/")
VAR_11 = await self._send_request(VAR_10, **VAR_27)
return VAR_11
async def FUNC_5(
self,
VAR_10: CLASS_0,
VAR_28: bool = True,
VAR_29: Optional[int] = None,
VAR_30: bool = False,
VAR_31: bool = False,
VAR_32: bool = False,
) -> IResponse:
if VAR_29:
VAR_61 = VAR_29 / 1000
else:
VAR_61 = self.default_timeout
if (
self.hs.config.federation_domain_whitelist is not None
and VAR_10.destination not in self.hs.config.federation_domain_whitelist
):
raise FederationDeniedError(VAR_10.destination)
VAR_52 = await synapse.util.retryutils.get_retry_limiter(
VAR_10.destination,
self.clock,
self._store,
VAR_32=backoff_on_404,
VAR_31=ignore_backoff,
)
VAR_53 = VAR_10.method.encode("ascii")
VAR_44 = VAR_10.destination.encode("ascii")
VAR_45 = VAR_10.path.encode("ascii")
if VAR_10.query:
VAR_58 = encode_query_args(VAR_10.query)
else:
VAR_58 = b""
VAR_54 = start_active_span(
"outgoing-federation-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.PEER_ADDRESS: VAR_10.destination,
tags.HTTP_METHOD: VAR_10.method,
tags.HTTP_URL: VAR_10.path,
},
finish_on_close=True,
)
VAR_55 = {} # type: Dict[bytes, List[bytes]]
inject_active_span_byte_dict(VAR_55, VAR_10.destination)
VAR_55[b"User-Agent"] = [self.version_string_bytes]
with VAR_52, VAR_54:
if VAR_30:
VAR_65 = VAR_3
else:
VAR_65 = VAR_4
VAR_33 = VAR_10.uri
VAR_62 = VAR_33.decode("ascii")
VAR_63 = urllib.parse.urlunparse(
(b"", b"", VAR_45, None, VAR_58, b"")
)
while True:
try:
VAR_18 = VAR_10.get_json()
if VAR_18:
VAR_55[b"Content-Type"] = [b"application/json"]
VAR_56 = self.build_auth_headers(
VAR_44, VAR_53, VAR_63, VAR_18
)
VAR_37 = encode_canonical_json(VAR_18)
VAR_67 = QuieterFileBodyProducer(
BytesIO(VAR_37), cooperator=self._cooperator
) # type: Optional[IBodyProducer]
else:
VAR_67 = None
VAR_56 = self.build_auth_headers(
VAR_44, VAR_53, VAR_63
)
VAR_55[b"Authorization"] = VAR_56
VAR_0.debug(
"{%s} [%s] Sending VAR_10: %s %s; VAR_29 %fs",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_62,
VAR_61,
)
VAR_1.labels(VAR_10.method).inc()
try:
with Measure(self.clock, "outbound_request"):
VAR_69 = self.agent.request(
VAR_53,
VAR_33,
VAR_14=Headers(VAR_55),
bodyProducer=VAR_67,
)
VAR_69 = timeout_deferred(
VAR_69,
VAR_29=VAR_61,
VAR_8=self.reactor,
)
VAR_11 = await VAR_69
except DNSLookupError as VAR_13:
raise RequestSendFailed(VAR_13, can_retry=VAR_28) from VAR_13
except Exception as VAR_13:
raise RequestSendFailed(VAR_13, can_retry=True) from VAR_13
VAR_2.labels(
VAR_10.method, VAR_11.code
).inc()
set_tag(tags.HTTP_STATUS_CODE, VAR_11.code)
VAR_66 = VAR_11.phrase.decode("ascii", errors="replace")
if 200 <= VAR_11.code < 300:
VAR_0.debug(
"{%s} [%s] Got VAR_11 VAR_14: %VAR_46 %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_66,
)
pass
else:
VAR_0.info(
"{%s} [%s] Got VAR_11 VAR_14: %VAR_46 %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_66,
)
VAR_46 = treq.content(VAR_11)
VAR_46 = timeout_deferred(
VAR_46, VAR_29=VAR_61, VAR_8=self.reactor
)
try:
VAR_47 = await make_deferred_yieldable(VAR_46)
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Failed to get error VAR_11: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_62,
FUNC_1(VAR_13),
)
VAR_47 = None
VAR_68 = HttpResponseException(
VAR_11.code, VAR_66, VAR_47
)
if VAR_11.code == 429:
raise RequestSendFailed(VAR_68, can_retry=True) from VAR_68
else:
raise VAR_68
break
except RequestSendFailed as VAR_13:
VAR_0.info(
"{%s} [%s] Request failed: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_62,
FUNC_1(VAR_13.inner_exception),
)
if not VAR_13.can_retry:
raise
if VAR_65 and not VAR_29:
if VAR_30:
VAR_70 = 4 ** (VAR_3 + 1 - VAR_65)
VAR_70 = min(VAR_70, 60)
VAR_70 *= random.uniform(0.8, 1.4)
else:
VAR_70 = 0.5 * 2 ** (VAR_4 - VAR_65)
VAR_70 = min(VAR_70, 2)
VAR_70 *= random.uniform(0.8, 1.4)
VAR_0.debug(
"{%s} [%s] Waiting %ss before re-sending...",
VAR_10.txn_id,
VAR_10.destination,
VAR_70,
)
await self.clock.sleep(VAR_70)
VAR_65 -= 1
else:
raise
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Request failed: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_62,
FUNC_1(VAR_13),
)
raise
return VAR_11
def FUNC_6(
self,
VAR_17: Optional[bytes],
VAR_15: bytes,
VAR_33: bytes,
VAR_34: Optional[JsonDict] = None,
VAR_35: Optional[bytes] = None,
) -> List[bytes]:
VAR_10 = {
"method": VAR_15.decode("ascii"),
"uri": VAR_33.decode("ascii"),
"origin": self.server_name,
}
if VAR_17 is not None:
VAR_10["destination"] = VAR_17.decode("ascii")
if VAR_35 is not None:
VAR_10["destination_is"] = VAR_35.decode("ascii")
if VAR_34 is not None:
VAR_10["content"] = VAR_34
VAR_10 = sign_json(VAR_10, self.server_name, self.signing_key)
VAR_56 = []
for key, sig in VAR_10["signatures"][self.server_name].items():
VAR_56.append(
(
'X-Matrix origin=%s,key="%s",sig="%s"'
% (self.server_name, key, sig)
).encode("ascii")
)
return VAR_56
async def FUNC_7(
self,
VAR_17: str,
VAR_16: str,
VAR_36: Optional[VAR_7] = None,
VAR_37: Optional[JsonDict] = None,
VAR_38: Optional[Callable[[], JsonDict]] = None,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_32: bool = False,
VAR_26: bool = False,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="PUT",
VAR_17=destination,
VAR_16=path,
VAR_20=VAR_36,
VAR_19=VAR_38,
VAR_18=VAR_37,
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request_with_optional_trailing_slash(
VAR_10,
VAR_26,
VAR_32=backoff_on_404,
VAR_31=ignore_backoff,
VAR_30=long_retries,
VAR_29=timeout,
)
if VAR_29 is not None:
VAR_61 = VAR_29 / 1000
else:
VAR_61 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_61, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_8(
self,
VAR_17: str,
VAR_16: str,
VAR_37: Optional[JsonDict] = None,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_36: Optional[VAR_7] = None,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="POST", VAR_17=destination, VAR_16=path, VAR_20=VAR_36, VAR_18=VAR_37
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request(
VAR_10,
VAR_30=long_retries,
VAR_29=timeout,
VAR_31=ignore_backoff,
)
if VAR_29:
VAR_61 = VAR_29 / 1000
else:
VAR_61 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_61, VAR_10, VAR_11, VAR_12,
)
return VAR_47
async def FUNC_3(
self,
VAR_17: str,
VAR_16: str,
VAR_36: Optional[VAR_7] = None,
VAR_28: bool = True,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_26: bool = False,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="GET", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request_with_optional_trailing_slash(
VAR_10,
VAR_26,
VAR_32=False,
VAR_31=ignore_backoff,
VAR_28=retry_on_dns_fail,
VAR_29=timeout,
)
if VAR_29 is not None:
VAR_61 = VAR_29 / 1000
else:
VAR_61 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_61, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_9(
self,
VAR_17: str,
VAR_16: str,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_36: Optional[VAR_7] = None,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="DELETE", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request(
VAR_10,
VAR_30=long_retries,
VAR_29=timeout,
VAR_31=ignore_backoff,
)
if VAR_29 is not None:
VAR_61 = VAR_29 / 1000
else:
VAR_61 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_61, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_10(
self,
VAR_17: str,
VAR_16: str,
VAR_39,
VAR_36: Optional[VAR_7] = None,
VAR_28: bool = True,
VAR_40: Optional[int] = None,
VAR_31: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
VAR_10 = CLASS_0(
VAR_15="GET", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_11 = await self._send_request(
VAR_10, VAR_28=retry_on_dns_fail, VAR_31=ignore_backoff
)
VAR_14 = dict(VAR_11.headers.getAllRawHeaders())
try:
VAR_46 = readBodyToFile(VAR_11, VAR_39, VAR_40)
VAR_46.addTimeout(self.default_timeout, self.reactor)
VAR_64 = await make_deferred_yieldable(VAR_46)
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Error reading VAR_11: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_13,
)
raise
VAR_0.info(
"{%s} [%s] Completed: %VAR_46 %s [%VAR_46 bytes] %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_11.phrase.decode("ascii", errors="replace"),
VAR_64,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
return (VAR_64, VAR_14)
def FUNC_1(VAR_13):
if hasattr(VAR_13, "reasons"):
VAR_57 = ", ".join(
FUNC_1(f.value) for f in VAR_13.reasons
)
return "%s:[%s]" % (type(VAR_13).__name__, VAR_57)
else:
return repr(VAR_13)
def FUNC_2(VAR_14: Headers) -> None:
VAR_41 = VAR_14.getRawHeaders(b"Content-Type")
if VAR_41 is None:
raise RequestSendFailed(
RuntimeError("No Content-Type header received from remote server"),
can_retry=False,
)
VAR_41 = c_type[0].decode("ascii") # only the first header
VAR_42, VAR_43 = cgi.parse_header(VAR_41)
if VAR_42 != "application/json":
raise RequestSendFailed(
RuntimeError(
"Remote server sent Content-Type header of '%s', not 'application/json'"
% VAR_41,
),
can_retry=False,
)
|
import cgi
import logging
import random
import sys
import urllib.parse
from io import BytesIO
from typing import Callable, Dict, List, Optional, Tuple, Union
import attr
import treq
from canonicaljson import .encode_canonical_json
from prometheus_client import Counter
from signedjson.sign import sign_json
from twisted.internet import .defer
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import IReactorTime
from twisted.internet.task import _EPSILON, Cooperator
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, IResponse
import synapse.metrics
import synapse.util.retryutils
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
RequestSendFailed,
)
from synapse.http import QuieterFileBodyProducer
from synapse.http.client import (
BlacklistingAgentWrapper,
BlacklistingReactorWrapper,
encode_query_args,
readBodyToFile,
)
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import (
inject_active_span_byte_dict,
set_tag,
start_active_span,
tags,
)
from synapse.types import JsonDict
from synapse.util import .json_decoder
from synapse.util.async_helpers import .timeout_deferred
from synapse.util.metrics import Measure
VAR_0 = logging.getLogger(__name__)
VAR_1 = Counter(
"synapse_http_matrixfederationclient_requests", "", ["method"]
)
VAR_2 = Counter(
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
)
VAR_3 = 10
VAR_4 = 3
VAR_5 = sys.maxsize
VAR_6 = 1
VAR_7 = Dict[str, Union[str, List[str]]]
@attr.s(slots=True, frozen=True)
class CLASS_0:
VAR_15 = attr.ib(type=str)
"""HTTP VAR_15
"""
VAR_16 = attr.ib(type=str)
"""HTTP VAR_16
"""
VAR_17 = attr.ib(type=str)
"""The remote server to send the HTTP VAR_10 to.
"""
VAR_18 = attr.ib(default=None, type=Optional[JsonDict])
"""JSON to send in the VAR_47.
"""
VAR_19 = attr.ib(default=None, type=Optional[Callable[[], JsonDict]])
"""A callback to generate the JSON.
"""
VAR_20 = attr.ib(default=None, type=Optional[dict])
"""Query arguments.
"""
VAR_21 = attr.ib(default=None, type=Optional[str])
"""Unique ID for this VAR_10 (for logging)
"""
VAR_22 = attr.ib(init=False, type=bytes)
"""The URI of this VAR_10
"""
def __attrs_post_init__(self) -> None:
global VAR_6
VAR_21 = "%s-O-%s" % (self.method, VAR_6)
_next_id = (VAR_6 + 1) % (VAR_5 - 1)
object.__setattr__(self, "txn_id", VAR_21)
VAR_44 = self.destination.encode("ascii")
VAR_45 = self.path.encode("ascii")
if self.query:
VAR_56 = encode_query_args(self.query)
else:
VAR_56 = b""
VAR_22 = urllib.parse.urlunparse(
(b"matrix", VAR_44, VAR_45, None, VAR_56, b"")
)
object.__setattr__(self, "uri", VAR_22)
def FUNC_3(self) -> Optional[JsonDict]:
if self.json_callback:
return self.json_callback()
return self.json
async def FUNC_0(
VAR_8: IReactorTime,
VAR_9: float,
VAR_10: CLASS_0,
VAR_11: IResponse,
VAR_12: int,
) -> JsonDict:
try:
FUNC_2(VAR_11.headers)
VAR_46 = treq.text_content(VAR_11, encoding="utf-8")
VAR_46.addCallback(json_decoder.decode)
VAR_46 = timeout_deferred(VAR_46, VAR_29=VAR_9, VAR_8=reactor)
VAR_47 = await make_deferred_yieldable(VAR_46)
except defer.TimeoutError as VAR_13:
VAR_0.warning(
"{%s} [%s] Timed out reading VAR_11 - %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
raise RequestSendFailed(VAR_13, can_retry=True) from VAR_13
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Error reading VAR_11 %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_10.uri.decode("ascii"),
VAR_13,
)
raise
VAR_23 = VAR_8.seconds() - VAR_12 / 1000
VAR_0.info(
"{%s} [%s] Completed VAR_10: %VAR_46 %s in %.2f secs - %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_11.phrase.decode("ascii", errors="replace"),
VAR_23,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
return VAR_47
class CLASS_1:
def __init__(self, VAR_24, VAR_25):
self.hs = VAR_24
self.signing_key = VAR_24.signing_key
self.server_name = VAR_24.hostname
self.reactor = BlacklistingReactorWrapper(
VAR_24.get_reactor(), None, VAR_24.config.federation_ip_range_blacklist
)
VAR_48 = VAR_24.version_string
if VAR_24.config.user_agent_suffix:
VAR_48 = "%s %s" % (VAR_48, VAR_24.config.user_agent_suffix)
VAR_48 = user_agent.encode("ascii")
self.agent = MatrixFederationAgent(
self.reactor,
VAR_25,
VAR_48,
VAR_24.config.federation_ip_range_blacklist,
)
self.agent = BlacklistingAgentWrapper(
self.agent, ip_blacklist=VAR_24.config.federation_ip_range_blacklist,
)
self.clock = VAR_24.get_clock()
self._store = VAR_24.get_datastore()
self.version_string_bytes = VAR_24.version_string.encode("ascii")
self.default_timeout = 60
def FUNC_11(VAR_49):
self.reactor.callLater(_EPSILON, VAR_49)
self._cooperator = Cooperator(scheduler=FUNC_11)
async def FUNC_4(
self,
VAR_10: CLASS_0,
VAR_26: bool = False,
**VAR_27
) -> IResponse:
try:
VAR_11 = await self._send_request(VAR_10, **VAR_27)
except HttpResponseException as VAR_13:
if not VAR_26:
raise
if VAR_13.code != 400 or VAR_13.to_synapse_error().errcode != "M_UNRECOGNIZED":
raise
VAR_0.info("Retrying VAR_10 with trailing slash")
VAR_10 = attr.evolve(VAR_10, VAR_16=VAR_10.path + "/")
VAR_11 = await self._send_request(VAR_10, **VAR_27)
return VAR_11
async def FUNC_5(
self,
VAR_10: CLASS_0,
VAR_28: bool = True,
VAR_29: Optional[int] = None,
VAR_30: bool = False,
VAR_31: bool = False,
VAR_32: bool = False,
) -> IResponse:
if VAR_29:
VAR_57 = VAR_29 / 1000
else:
VAR_57 = self.default_timeout
if (
self.hs.config.federation_domain_whitelist is not None
and VAR_10.destination not in self.hs.config.federation_domain_whitelist
):
raise FederationDeniedError(VAR_10.destination)
VAR_50 = await synapse.util.retryutils.get_retry_limiter(
VAR_10.destination,
self.clock,
self._store,
VAR_32=backoff_on_404,
VAR_31=ignore_backoff,
)
VAR_51 = VAR_10.method.encode("ascii")
VAR_44 = VAR_10.destination.encode("ascii")
VAR_45 = VAR_10.path.encode("ascii")
if VAR_10.query:
VAR_56 = encode_query_args(VAR_10.query)
else:
VAR_56 = b""
VAR_52 = start_active_span(
"outgoing-federation-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.PEER_ADDRESS: VAR_10.destination,
tags.HTTP_METHOD: VAR_10.method,
tags.HTTP_URL: VAR_10.path,
},
finish_on_close=True,
)
VAR_53 = {} # type: Dict[bytes, List[bytes]]
inject_active_span_byte_dict(VAR_53, VAR_10.destination)
VAR_53[b"User-Agent"] = [self.version_string_bytes]
with VAR_50, VAR_52:
if VAR_30:
VAR_61 = VAR_3
else:
VAR_61 = VAR_4
VAR_33 = VAR_10.uri
VAR_58 = VAR_33.decode("ascii")
VAR_59 = urllib.parse.urlunparse(
(b"", b"", VAR_45, None, VAR_56, b"")
)
while True:
try:
VAR_18 = VAR_10.get_json()
if VAR_18:
VAR_53[b"Content-Type"] = [b"application/json"]
VAR_54 = self.build_auth_headers(
VAR_44, VAR_51, VAR_59, VAR_18
)
VAR_37 = encode_canonical_json(VAR_18)
VAR_63 = QuieterFileBodyProducer(
BytesIO(VAR_37), cooperator=self._cooperator
) # type: Optional[IBodyProducer]
else:
VAR_63 = None
VAR_54 = self.build_auth_headers(
VAR_44, VAR_51, VAR_59
)
VAR_53[b"Authorization"] = VAR_54
VAR_0.debug(
"{%s} [%s] Sending VAR_10: %s %s; VAR_29 %fs",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_58,
VAR_57,
)
VAR_1.labels(VAR_10.method).inc()
try:
with Measure(self.clock, "outbound_request"):
VAR_65 = self.agent.request(
VAR_51,
VAR_33,
VAR_14=Headers(VAR_53),
bodyProducer=VAR_63,
)
VAR_65 = timeout_deferred(
VAR_65,
VAR_29=VAR_57,
VAR_8=self.reactor,
)
VAR_11 = await VAR_65
except DNSLookupError as VAR_13:
raise RequestSendFailed(VAR_13, can_retry=VAR_28) from VAR_13
except Exception as VAR_13:
raise RequestSendFailed(VAR_13, can_retry=True) from VAR_13
VAR_2.labels(
VAR_10.method, VAR_11.code
).inc()
set_tag(tags.HTTP_STATUS_CODE, VAR_11.code)
VAR_62 = VAR_11.phrase.decode("ascii", errors="replace")
if 200 <= VAR_11.code < 300:
VAR_0.debug(
"{%s} [%s] Got VAR_11 VAR_14: %VAR_46 %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_62,
)
pass
else:
VAR_0.info(
"{%s} [%s] Got VAR_11 VAR_14: %VAR_46 %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_62,
)
VAR_46 = treq.content(VAR_11)
VAR_46 = timeout_deferred(
VAR_46, VAR_29=VAR_57, VAR_8=self.reactor
)
try:
VAR_47 = await make_deferred_yieldable(VAR_46)
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Failed to get error VAR_11: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_58,
FUNC_1(VAR_13),
)
VAR_47 = None
VAR_64 = HttpResponseException(
VAR_11.code, VAR_62, VAR_47
)
if VAR_11.code == 429:
raise RequestSendFailed(VAR_64, can_retry=True) from VAR_64
else:
raise VAR_64
break
except RequestSendFailed as VAR_13:
VAR_0.info(
"{%s} [%s] Request failed: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_58,
FUNC_1(VAR_13.inner_exception),
)
if not VAR_13.can_retry:
raise
if VAR_61 and not VAR_29:
if VAR_30:
VAR_66 = 4 ** (VAR_3 + 1 - VAR_61)
VAR_66 = min(VAR_66, 60)
VAR_66 *= random.uniform(0.8, 1.4)
else:
VAR_66 = 0.5 * 2 ** (VAR_4 - VAR_61)
VAR_66 = min(VAR_66, 2)
VAR_66 *= random.uniform(0.8, 1.4)
VAR_0.debug(
"{%s} [%s] Waiting %ss before re-sending...",
VAR_10.txn_id,
VAR_10.destination,
VAR_66,
)
await self.clock.sleep(VAR_66)
VAR_61 -= 1
else:
raise
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Request failed: %s %s: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_10.method,
VAR_58,
FUNC_1(VAR_13),
)
raise
return VAR_11
def FUNC_6(
self,
VAR_17: Optional[bytes],
VAR_15: bytes,
VAR_33: bytes,
VAR_34: Optional[JsonDict] = None,
VAR_35: Optional[bytes] = None,
) -> List[bytes]:
VAR_10 = {
"method": VAR_15.decode("ascii"),
"uri": VAR_33.decode("ascii"),
"origin": self.server_name,
}
if VAR_17 is not None:
VAR_10["destination"] = VAR_17.decode("ascii")
if VAR_35 is not None:
VAR_10["destination_is"] = VAR_35.decode("ascii")
if VAR_34 is not None:
VAR_10["content"] = VAR_34
VAR_10 = sign_json(VAR_10, self.server_name, self.signing_key)
VAR_54 = []
for key, sig in VAR_10["signatures"][self.server_name].items():
VAR_54.append(
(
'X-Matrix origin=%s,key="%s",sig="%s"'
% (self.server_name, key, sig)
).encode("ascii")
)
return VAR_54
async def FUNC_7(
self,
VAR_17: str,
VAR_16: str,
VAR_36: Optional[VAR_7] = None,
VAR_37: Optional[JsonDict] = None,
VAR_38: Optional[Callable[[], JsonDict]] = None,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_32: bool = False,
VAR_26: bool = False,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="PUT",
VAR_17=destination,
VAR_16=path,
VAR_20=VAR_36,
VAR_19=VAR_38,
VAR_18=VAR_37,
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request_with_optional_trailing_slash(
VAR_10,
VAR_26,
VAR_32=backoff_on_404,
VAR_31=ignore_backoff,
VAR_30=long_retries,
VAR_29=timeout,
)
if VAR_29 is not None:
VAR_57 = VAR_29 / 1000
else:
VAR_57 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_57, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_8(
self,
VAR_17: str,
VAR_16: str,
VAR_37: Optional[JsonDict] = None,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_36: Optional[VAR_7] = None,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="POST", VAR_17=destination, VAR_16=path, VAR_20=VAR_36, VAR_18=VAR_37
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request(
VAR_10,
VAR_30=long_retries,
VAR_29=timeout,
VAR_31=ignore_backoff,
)
if VAR_29:
VAR_57 = VAR_29 / 1000
else:
VAR_57 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_57, VAR_10, VAR_11, VAR_12,
)
return VAR_47
async def FUNC_3(
self,
VAR_17: str,
VAR_16: str,
VAR_36: Optional[VAR_7] = None,
VAR_28: bool = True,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_26: bool = False,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="GET", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request_with_optional_trailing_slash(
VAR_10,
VAR_26,
VAR_32=False,
VAR_31=ignore_backoff,
VAR_28=retry_on_dns_fail,
VAR_29=timeout,
)
if VAR_29 is not None:
VAR_57 = VAR_29 / 1000
else:
VAR_57 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_57, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_9(
self,
VAR_17: str,
VAR_16: str,
VAR_30: bool = False,
VAR_29: Optional[int] = None,
VAR_31: bool = False,
VAR_36: Optional[VAR_7] = None,
) -> Union[JsonDict, list]:
VAR_10 = CLASS_0(
VAR_15="DELETE", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_12 = self.clock.time_msec()
VAR_11 = await self._send_request(
VAR_10,
VAR_30=long_retries,
VAR_29=timeout,
VAR_31=ignore_backoff,
)
if VAR_29 is not None:
VAR_57 = VAR_29 / 1000
else:
VAR_57 = self.default_timeout
VAR_47 = await FUNC_0(
self.reactor, VAR_57, VAR_10, VAR_11, VAR_12
)
return VAR_47
async def FUNC_10(
self,
VAR_17: str,
VAR_16: str,
VAR_39,
VAR_36: Optional[VAR_7] = None,
VAR_28: bool = True,
VAR_40: Optional[int] = None,
VAR_31: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
VAR_10 = CLASS_0(
VAR_15="GET", VAR_17=destination, VAR_16=path, VAR_20=VAR_36
)
VAR_11 = await self._send_request(
VAR_10, VAR_28=retry_on_dns_fail, VAR_31=ignore_backoff
)
VAR_14 = dict(VAR_11.headers.getAllRawHeaders())
try:
VAR_46 = readBodyToFile(VAR_11, VAR_39, VAR_40)
VAR_46.addTimeout(self.default_timeout, self.reactor)
VAR_60 = await make_deferred_yieldable(VAR_46)
except Exception as VAR_13:
VAR_0.warning(
"{%s} [%s] Error reading VAR_11: %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_13,
)
raise
VAR_0.info(
"{%s} [%s] Completed: %VAR_46 %s [%VAR_46 bytes] %s %s",
VAR_10.txn_id,
VAR_10.destination,
VAR_11.code,
VAR_11.phrase.decode("ascii", errors="replace"),
VAR_60,
VAR_10.method,
VAR_10.uri.decode("ascii"),
)
return (VAR_60, VAR_14)
def FUNC_1(VAR_13):
if hasattr(VAR_13, "reasons"):
VAR_55 = ", ".join(
FUNC_1(f.value) for f in VAR_13.reasons
)
return "%s:[%s]" % (type(VAR_13).__name__, VAR_55)
else:
return repr(VAR_13)
def FUNC_2(VAR_14: Headers) -> None:
VAR_41 = VAR_14.getRawHeaders(b"Content-Type")
if VAR_41 is None:
raise RequestSendFailed(
RuntimeError("No Content-Type header received from remote server"),
can_retry=False,
)
VAR_41 = c_type[0].decode("ascii") # only the first header
VAR_42, VAR_43 = cgi.parse_header(VAR_41)
if VAR_42 != "application/json":
raise RequestSendFailed(
RuntimeError(
"Remote server sent Content-Type header of '%s', not 'application/json'"
% VAR_41,
),
can_retry=False,
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
23,
30,
37,
64,
66,
73,
74,
78,
79,
81,
82,
84,
85,
91,
95,
99,
103,
107,
111,
115,
119,
124,
126,
133,
134,
139,
144,
145,
155,
162,
168,
169,
173,
194,
196,
208,
209,
213,
218,
223,
225,
226,
227,
231,
239,
241,
246,
250,
251,
252,
256,
261,
264,
266,
277,
284,
288,
295,
296,
299,
302,
303,
304,
305,
307,
308,
310,
312,
314,
326,
329,
331,
335,
337,
340,
343,
345,
348,
351,
354,
356,
359,
374,
380,
388,
396,
407,
408,
411,
413,
415,
416,
421,
424,
428,
446,
448,
457,
459,
462,
463,
470,
476,
482,
486,
489,
507,
508,
513,
517,
518,
528,
532,
533,
534,
539,
550,
553,
563,
570,
575,
587,
607,
616,
619,
622,
625,
627,
629,
638,
653,
662,
665,
668,
673,
684,
688,
707,
709,
718,
723,
727,
729,
741,
744,
746,
749,
752,
755,
760,
763,
768,
779,
783,
785,
792,
797,
802,
814,
817,
819,
822,
825,
830,
833,
840,
854,
856,
865,
870,
874,
876,
887,
891,
894,
897,
902,
905,
910,
924,
926,
933,
938,
943,
962,
966,
980,
984,
986,
1010,
1011,
1017,
1021,
1022,
1027,
1030,
1033,
1041,
1052,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
211,
212,
213,
214,
215,
216,
217,
1024,
1025,
1026,
1027,
1028,
1029,
1030,
1031,
1032,
1033,
1034,
273,
274,
275,
276,
277,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
596,
597,
598,
599,
600,
601,
602,
603,
604,
605,
606,
607,
608,
609,
610,
652,
653,
654,
655,
656,
657,
658,
659,
660,
661,
662,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
689,
690,
691,
692,
693,
694,
695,
696,
697,
698,
740,
741,
742,
743,
744,
745,
746,
747,
748,
749,
750,
751,
752,
753,
754,
755,
756,
757,
758,
759,
760,
761,
762,
763,
764,
765,
766,
767,
768,
769,
770,
771,
772,
773,
774,
775,
776,
777,
778,
813,
814,
815,
816,
817,
818,
819,
820,
821,
822,
823,
824,
825,
826,
827,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
838,
839,
840,
841,
842,
843,
844,
845,
846,
847,
848,
849,
850,
886,
887,
888,
889,
890,
891,
892,
893,
894,
895,
896,
897,
898,
899,
900,
901,
902,
903,
904,
905,
906,
907,
908,
909,
910,
911,
912,
913,
914,
915,
916,
917,
918,
919,
920,
954,
955,
956,
957,
958,
959,
960,
961,
962,
963,
964,
965,
966,
967,
968,
969,
970,
971,
972,
973,
974,
975,
976
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
23,
29,
36,
63,
65,
72,
73,
77,
78,
80,
81,
83,
84,
90,
94,
98,
102,
106,
110,
114,
118,
123,
125,
132,
133,
138,
143,
144,
154,
161,
167,
168,
172,
193,
195,
207,
208,
212,
217,
222,
223,
224,
228,
233,
240,
241,
242,
246,
251,
254,
256,
267,
274,
278,
285,
286,
289,
292,
293,
294,
295,
297,
298,
300,
302,
304,
316,
319,
321,
325,
327,
330,
333,
335,
338,
341,
344,
346,
349,
364,
370,
378,
386,
397,
398,
401,
403,
405,
406,
411,
414,
418,
436,
438,
447,
449,
452,
453,
460,
466,
472,
476,
479,
497,
498,
503,
507,
508,
518,
522,
523,
524,
529,
540,
543,
553,
560,
565,
577,
597,
606,
609,
612,
615,
617,
619,
628,
643,
652,
655,
658,
663,
674,
678,
697,
699,
708,
713,
717,
719,
731,
734,
736,
739,
742,
745,
750,
753,
758,
769,
773,
775,
782,
787,
792,
804,
807,
809,
812,
815,
820,
823,
830,
844,
846,
855,
860,
864,
866,
877,
881,
884,
887,
892,
895,
900,
914,
916,
923,
928,
933,
952,
956,
970,
974,
976,
1000,
1001,
1007,
1011,
1012,
1017,
1020,
1023,
1031,
1042,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
210,
211,
212,
213,
214,
215,
216,
1014,
1015,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
263,
264,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
277,
278,
279,
280,
281,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
586,
587,
588,
589,
590,
591,
592,
593,
594,
595,
596,
597,
598,
599,
600,
642,
643,
644,
645,
646,
647,
648,
649,
650,
651,
652,
653,
654,
655,
656,
657,
658,
659,
660,
661,
662,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
730,
731,
732,
733,
734,
735,
736,
737,
738,
739,
740,
741,
742,
743,
744,
745,
746,
747,
748,
749,
750,
751,
752,
753,
754,
755,
756,
757,
758,
759,
760,
761,
762,
763,
764,
765,
766,
767,
768,
803,
804,
805,
806,
807,
808,
809,
810,
811,
812,
813,
814,
815,
816,
817,
818,
819,
820,
821,
822,
823,
824,
825,
826,
827,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
838,
839,
840,
876,
877,
878,
879,
880,
881,
882,
883,
884,
885,
886,
887,
888,
889,
890,
891,
892,
893,
894,
895,
896,
897,
898,
899,
900,
901,
902,
903,
904,
905,
906,
907,
908,
909,
910,
944,
945,
946,
947,
948,
949,
950,
951,
952,
953,
954,
955,
956,
957,
958,
959,
960,
961,
962,
963,
964,
965,
966
] |
2CWE-601
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Login==0.2.10',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security==1.7.4',
'Flask-WTF==0.9.5',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.5.2',
'bcrypt==2.0.0',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.3.2',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
# 'Flask-Security==1.7.4',
'Flask-Security-Fork==1.8.2',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.6.2',
'bcrypt==3.1.2',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.7.1',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| open_redirect | {
"code": [
" 'Flask-Login==0.2.10',",
" 'Flask-Security==1.7.4',",
" 'Flask-WTF==0.9.5',",
" 'psycopg2==2.5.2',",
" 'bcrypt==2.0.0',",
" 'cryptography==1.3.2',"
],
"line_no": [
26,
33,
34,
40,
41,
44
]
} | {
"code": [
" 'Flask-Security-Fork==1.8.2',",
" 'psycopg2==2.6.2',",
" 'bcrypt==3.1.2',",
" 'cryptography==1.7.1',"
],
"line_no": [
33,
39,
40,
43
]
} |
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Login==0.2.10',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security==1.7.4',
'Flask-WTF==0.9.5',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.5.2',
'bcrypt==2.0.0',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.3.2',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
|
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security-Fork==1.8.2',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.6.2',
'bcrypt==3.1.2',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.7.1',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
64
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
32,
63
] |
0CWE-22
| """ Regenerate golden-master """
import shutil
from pathlib import Path
from typer.testing import CliRunner
from openapi_python_client.cli import app
if __name__ == "__main__":
from .fastapi_app import generate_openapi_json
generate_openapi_json()
runner = CliRunner()
openapi_path = Path(__file__).parent / "fastapi_app" / "openapi.json"
gm_path = Path(__file__).parent / "golden-master"
shutil.rmtree(gm_path, ignore_errors=True)
output_path = Path.cwd() / "my-test-api-client"
shutil.rmtree(output_path, ignore_errors=True)
config_path = Path(__file__).parent / "config.yml"
result = runner.invoke(app, [f"--config={config_path}", "generate", f"--path={openapi_path}"])
if result.stdout:
print(result.stdout)
if result.exception:
raise result.exception
output_path.rename(gm_path)
| """ Regenerate golden-master """
import shutil
from pathlib import Path
from typer.testing import CliRunner
from openapi_python_client.cli import app
if __name__ == "__main__":
runner = CliRunner()
openapi_path = Path(__file__).parent / "fastapi_app" / "openapi.json"
gm_path = Path(__file__).parent / "golden-master"
shutil.rmtree(gm_path, ignore_errors=True)
output_path = Path.cwd() / "my-test-api-client"
shutil.rmtree(output_path, ignore_errors=True)
config_path = Path(__file__).parent / "config.yml"
result = runner.invoke(app, [f"--config={config_path}", "generate", f"--path={openapi_path}"])
if result.stdout:
print(result.stdout)
if result.exception:
raise result.exception
output_path.rename(gm_path)
| path_disclosure | {
"code": [
" from .fastapi_app import generate_openapi_json",
" generate_openapi_json()"
],
"line_no": [
10,
12
]
} | {
"code": [],
"line_no": []
} |
import shutil
from pathlib import Path
from typer.testing import CliRunner
from openapi_python_client.cli import app
if __name__ == "__main__":
from .fastapi_app import generate_openapi_json
generate_openapi_json()
VAR_0 = CliRunner()
VAR_1 = Path(__file__).parent / "fastapi_app" / "openapi.json"
VAR_2 = Path(__file__).parent / "golden-master"
shutil.rmtree(VAR_2, ignore_errors=True)
VAR_3 = Path.cwd() / "my-test-api-client"
shutil.rmtree(VAR_3, ignore_errors=True)
VAR_4 = Path(__file__).parent / "config.yml"
VAR_5 = VAR_0.invoke(app, [f"--config={VAR_4}", "generate", f"--path={VAR_1}"])
if VAR_5.stdout:
print(VAR_5.stdout)
if VAR_5.exception:
raise VAR_5.exception
VAR_3.rename(VAR_2)
|
import shutil
from pathlib import Path
from typer.testing import CliRunner
from openapi_python_client.cli import app
if __name__ == "__main__":
VAR_0 = CliRunner()
VAR_1 = Path(__file__).parent / "fastapi_app" / "openapi.json"
VAR_2 = Path(__file__).parent / "golden-master"
shutil.rmtree(VAR_2, ignore_errors=True)
VAR_3 = Path.cwd() / "my-test-api-client"
shutil.rmtree(VAR_3, ignore_errors=True)
VAR_4 = Path(__file__).parent / "config.yml"
VAR_5 = VAR_0.invoke(app, [f"--config={VAR_4}", "generate", f"--path={VAR_1}"])
if VAR_5.stdout:
print(VAR_5.stdout)
if VAR_5.exception:
raise VAR_5.exception
VAR_3.rename(VAR_2)
| [
4,
6,
8,
11,
20,
27,
1
] | [
4,
6,
8,
17,
24,
1
] |
2CWE-601
| import io
import os
import re
from setuptools import find_packages, setup
with io.open("flask_appbuilder/__init__.py", "rt", encoding="utf8") as f:
version = re.search(r"__version__ = \"(.*?)\"", f.read()).group(1)
def fpath(name):
return os.path.join(os.path.dirname(__file__), name)
def read(fname):
return open(fpath(fname)).read()
def desc():
return read("README.rst")
setup(
name="Flask-AppBuilder",
version=version,
url="https://github.com/dpgaspar/flask-appbuilder/",
license="BSD",
author="Daniel Vaz Gaspar",
author_email="danielvazgaspar@gmail.com",
description=(
"Simple and rapid application development framework, built on top of Flask."
" includes detailed security, auto CRUD generation for your models,"
" google charts and much more."
),
long_description=desc(),
long_description_content_type="text/x-rst",
packages=find_packages(),
package_data={"": ["LICENSE"]},
entry_points={
"flask.commands": ["fab=flask_appbuilder.cli:fab"],
"console_scripts": ["fabmanager = flask_appbuilder.console:cli"],
},
include_package_data=True,
zip_safe=False,
platforms="any",
install_requires=[
"apispec[yaml]>=3.3, <4",
"colorama>=0.3.9, <1",
"click>=6.7, <9",
"email_validator>=1.0.5, <2",
"Flask>=0.12, <2",
"Flask-Babel>=1, <2",
"Flask-Login>=0.3, <0.5",
"Flask-OpenID>=1.2.5, <2",
# SQLAlchemy 1.4.0 breaks flask-sqlalchemy and sqlalchemy-utils
"SQLAlchemy<1.4.0",
"Flask-SQLAlchemy>=2.4, <3",
"Flask-WTF>=0.14.2, <0.15.0",
"Flask-JWT-Extended>=3.18, <4",
"jsonschema>=3.0.1, <4",
"marshmallow>=3, <4",
"marshmallow-enum>=1.5.1, <2",
"marshmallow-sqlalchemy>=0.22.0, <0.24.0",
"python-dateutil>=2.3, <3",
"prison>=0.1.3, <1.0.0",
"PyJWT>=1.7.1, <2.0.0",
"sqlalchemy-utils>=0.32.21, <1",
],
extras_require={"jmespath": ["jmespath>=0.9.5"]},
tests_require=["nose>=1.0", "mockldap>=0.3.0"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires="~=3.6",
test_suite="nose.collector",
)
| import io
import os
import re
from setuptools import find_packages, setup
with io.open("flask_appbuilder/__init__.py", "rt", encoding="utf8") as f:
version = re.search(r"__version__ = \"(.*?)\"", f.read()).group(1)
def fpath(name):
return os.path.join(os.path.dirname(__file__), name)
def read(fname):
return open(fpath(fname)).read()
def desc():
return read("README.rst")
setup(
name="Flask-AppBuilder",
version=version,
url="https://github.com/dpgaspar/flask-appbuilder/",
license="BSD",
author="Daniel Vaz Gaspar",
author_email="danielvazgaspar@gmail.com",
description=(
"Simple and rapid application development framework, built on top of Flask."
" includes detailed security, auto CRUD generation for your models,"
" google charts and much more."
),
long_description=desc(),
long_description_content_type="text/x-rst",
packages=find_packages(),
package_data={"": ["LICENSE"]},
entry_points={
"flask.commands": ["fab=flask_appbuilder.cli:fab"],
"console_scripts": ["fabmanager = flask_appbuilder.console:cli"],
},
include_package_data=True,
zip_safe=False,
platforms="any",
install_requires=[
"apispec[yaml]>=3.3, <4",
"colorama>=0.3.9, <1",
"click>=6.7, <9",
"email_validator>=1.0.5, <2",
"Flask>=0.12, <2",
"Flask-Babel>=1, <2",
"Flask-Login>=0.3, <0.5",
"Flask-OpenID>=1.2.5, <2",
# SQLAlchemy 1.4.0 breaks flask-sqlalchemy and sqlalchemy-utils
"SQLAlchemy<1.4.0",
"Flask-SQLAlchemy>=2.4, <3",
"Flask-WTF>=0.14.2, <0.15.0",
"Flask-JWT-Extended>=3.18, <4",
"jsonschema>=3.0.1, <4",
"marshmallow>=3, <4",
"marshmallow-enum>=1.5.1, <2",
"marshmallow-sqlalchemy>=0.22.0, <0.24.0",
"python-dateutil>=2.3, <3",
"prison>=0.1.3, <1.0.0",
"PyJWT>=1.7.1, <2.0.0",
"sqlalchemy-utils>=0.32.21, <1",
],
extras_require={
"jmespath": ["jmespath>=0.9.5"],
"oauth": ["Authlib>=0.14, <1.0.0"],
},
tests_require=["nose>=1.0", "mockldap>=0.3.0"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires="~=3.6",
test_suite="nose.collector",
)
| open_redirect | {
"code": [
" extras_require={\"jmespath\": [\"jmespath>=0.9.5\"]},"
],
"line_no": [
70
]
} | {
"code": [
" extras_require={",
" \"jmespath\": [\"jmespath>=0.9.5\"],",
" \"oauth\": [\"Authlib>=0.14, <1.0.0\"],",
" },"
],
"line_no": [
70,
71,
72,
73
]
} | import io
import os
import re
from setuptools import find_packages, setup
with io.open("flask_appbuilder/__init__.py", "rt", encoding="utf8") as f:
VAR_2 = re.search(r"__version__ = \"(.*?)\"", f.read()).group(1)
def FUNC_0(VAR_0):
return os.path.join(os.path.dirname(__file__), VAR_0)
def FUNC_1(VAR_1):
return open(FUNC_0(VAR_1)).read()
def FUNC_2():
return FUNC_1("README.rst")
setup(
VAR_0="Flask-AppBuilder",
VAR_2=version,
url="https://github.com/dpgaspar/flask-appbuilder/",
license="BSD",
author="Daniel Vaz Gaspar",
author_email="danielvazgaspar@gmail.com",
description=(
"Simple and rapid application development framework, built on top of Flask."
" includes detailed security, auto CRUD generation for your models,"
" google charts and much more."
),
long_description=FUNC_2(),
long_description_content_type="text/x-rst",
packages=find_packages(),
package_data={"": ["LICENSE"]},
entry_points={
"flask.commands": ["fab=flask_appbuilder.cli:fab"],
"console_scripts": ["fabmanager = flask_appbuilder.console:cli"],
},
include_package_data=True,
zip_safe=False,
platforms="any",
install_requires=[
"apispec[yaml]>=3.3, <4",
"colorama>=0.3.9, <1",
"click>=6.7, <9",
"email_validator>=1.0.5, <2",
"Flask>=0.12, <2",
"Flask-Babel>=1, <2",
"Flask-Login>=0.3, <0.5",
"Flask-OpenID>=1.2.5, <2",
"SQLAlchemy<1.4.0",
"Flask-SQLAlchemy>=2.4, <3",
"Flask-WTF>=0.14.2, <0.15.0",
"Flask-JWT-Extended>=3.18, <4",
"jsonschema>=3.0.1, <4",
"marshmallow>=3, <4",
"marshmallow-enum>=1.5.1, <2",
"marshmallow-sqlalchemy>=0.22.0, <0.24.0",
"python-dateutil>=2.3, <3",
"prison>=0.1.3, <1.0.0",
"PyJWT>=1.7.1, <2.0.0",
"sqlalchemy-utils>=0.32.21, <1",
],
extras_require={"jmespath": ["jmespath>=0.9.5"]},
tests_require=["nose>=1.0", "mockldap>=0.3.0"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires="~=3.6",
test_suite="nose.collector",
)
| import io
import os
import re
from setuptools import find_packages, setup
with io.open("flask_appbuilder/__init__.py", "rt", encoding="utf8") as f:
VAR_2 = re.search(r"__version__ = \"(.*?)\"", f.read()).group(1)
def FUNC_0(VAR_0):
return os.path.join(os.path.dirname(__file__), VAR_0)
def FUNC_1(VAR_1):
return open(FUNC_0(VAR_1)).read()
def FUNC_2():
return FUNC_1("README.rst")
setup(
VAR_0="Flask-AppBuilder",
VAR_2=version,
url="https://github.com/dpgaspar/flask-appbuilder/",
license="BSD",
author="Daniel Vaz Gaspar",
author_email="danielvazgaspar@gmail.com",
description=(
"Simple and rapid application development framework, built on top of Flask."
" includes detailed security, auto CRUD generation for your models,"
" google charts and much more."
),
long_description=FUNC_2(),
long_description_content_type="text/x-rst",
packages=find_packages(),
package_data={"": ["LICENSE"]},
entry_points={
"flask.commands": ["fab=flask_appbuilder.cli:fab"],
"console_scripts": ["fabmanager = flask_appbuilder.console:cli"],
},
include_package_data=True,
zip_safe=False,
platforms="any",
install_requires=[
"apispec[yaml]>=3.3, <4",
"colorama>=0.3.9, <1",
"click>=6.7, <9",
"email_validator>=1.0.5, <2",
"Flask>=0.12, <2",
"Flask-Babel>=1, <2",
"Flask-Login>=0.3, <0.5",
"Flask-OpenID>=1.2.5, <2",
"SQLAlchemy<1.4.0",
"Flask-SQLAlchemy>=2.4, <3",
"Flask-WTF>=0.14.2, <0.15.0",
"Flask-JWT-Extended>=3.18, <4",
"jsonschema>=3.0.1, <4",
"marshmallow>=3, <4",
"marshmallow-enum>=1.5.1, <2",
"marshmallow-sqlalchemy>=0.22.0, <0.24.0",
"python-dateutil>=2.3, <3",
"prison>=0.1.3, <1.0.0",
"PyJWT>=1.7.1, <2.0.0",
"sqlalchemy-utils>=0.32.21, <1",
],
extras_require={
"jmespath": ["jmespath>=0.9.5"],
"oauth": ["Authlib>=0.14, <1.0.0"],
},
tests_require=["nose>=1.0", "mockldap>=0.3.0"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires="~=3.6",
test_suite="nose.collector",
)
| [
4,
6,
7,
10,
11,
14,
15,
18,
19,
22,
23,
56,
86
] | [
4,
6,
7,
10,
11,
14,
15,
18,
19,
22,
23,
56,
89
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
"""
Frappe - Low Code Open Source Framework in Python and JS
Frappe, pronounced fra-pay, is a full stack, batteries-included, web
framework written in Python and Javascript with MariaDB as the database.
It is the framework which powers ERPNext. It is pretty generic and can
be used to build database driven apps.
Read the documentation: https://frappeframework.com/docs
"""
from __future__ import unicode_literals, print_function
from six import iteritems, binary_type, text_type, string_types, PY2
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
import click
# Local application imports
from .exceptions import *
from .utils.jinja import (get_jenv, get_template, render_template, get_email_from_template, get_jloader)
from .utils.lazy_loader import lazy_import
# Lazy imports
faker = lazy_import('faker')
# Harmless for Python 3
# For Python 2 set default encoding to utf-8
if PY2:
reload(sys)
sys.setdefaultencoding("utf-8")
__version__ = '13.0.0-dev'
__title__ = "Frappe Framework"
local = Local()
controllers = {}
class _dict(dict):
"""dict like object that exposes keys as attributes"""
def __getattr__(self, key):
ret = self.get(key)
if not ret and key.startswith("__"):
raise AttributeError()
return ret
def __setattr__(self, key, value):
self[key] = value
def __getstate__(self):
return self
def __setstate__(self, d):
self.update(d)
def update(self, d):
"""update and return self -- the missing dict feature in python"""
super(_dict, self).update(d)
return self
def copy(self):
return _dict(dict(self).copy())
def _(msg, lang=None, context=None):
"""Returns translated string in current lang, if exists.
Usage:
_('Change')
_('Change', context='Coins')
"""
from frappe.translate import get_full_dict
from frappe.utils import strip_html_tags, is_html
if not hasattr(local, 'lang'):
local.lang = lang or 'en'
if not lang:
lang = local.lang
non_translated_string = msg
if is_html(msg):
msg = strip_html_tags(msg)
# msg should always be unicode
msg = as_unicode(msg).strip()
translated_string = ''
if context:
string_key = '{msg}:{context}'.format(msg=msg, context=context)
translated_string = get_full_dict(lang).get(string_key)
if not translated_string:
translated_string = get_full_dict(lang).get(msg)
# return lang_full_dict according to lang passed parameter
return translated_string or non_translated_string
def as_unicode(text, encoding='utf-8'):
'''Convert to unicode if required'''
if isinstance(text, text_type):
return text
elif text==None:
return ''
elif isinstance(text, binary_type):
return text_type(text, encoding)
else:
return text_type(text)
def get_lang_dict(fortype, name=None):
"""Returns the translated language dict for the given type and name.
:param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot`
:param name: name of the document for which assets are to be returned."""
from frappe.translate import get_dict
return get_dict(fortype, name)
def set_user_lang(user, user_language=None):
"""Guess and set user language for the session. `frappe.local.lang`"""
from frappe.translate import get_user_lang
local.lang = get_user_lang(user)
# local-globals
db = local("db")
conf = local("conf")
form = form_dict = local("form_dict")
request = local("request")
response = local("response")
session = local("session")
user = local("user")
flags = local("flags")
error_log = local("error_log")
debug_log = local("debug_log")
message_log = local("message_log")
lang = local("lang")
def init(site, sites_path=None, new_site=False):
"""Initialize frappe for the current site. Reset thread locals `frappe.local`"""
if getattr(local, "initialised", None):
return
if not sites_path:
sites_path = '.'
local.error_log = []
local.message_log = []
local.debug_log = []
local.realtime_log = []
local.flags = _dict({
"currently_saving": [],
"redirect_location": "",
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
"has_dataurl": False,
"new_site": new_site
})
local.rollback_observers = []
local.before_commit = []
local.test_objects = {}
local.site = site
local.sites_path = sites_path
local.site_path = os.path.join(sites_path, site)
local.all_apps = None
local.request_ip = None
local.response = _dict({"docs":[]})
local.task_id = None
local.conf = _dict(get_site_config())
local.lang = local.conf.lang or "en"
local.lang_full_dict = None
local.module_app = None
local.app_modules = None
local.system_settings = _dict()
local.user = None
local.user_perms = None
local.session = None
local.role_permissions = {}
local.valid_columns = {}
local.new_doc_templates = {}
local.link_count = {}
local.jenv = None
local.jloader =None
local.cache = {}
local.document_cache = {}
local.meta_cache = {}
local.form_dict = _dict()
local.session = _dict()
local.dev_server = os.environ.get('DEV_SERVER', False)
setup_module_map()
local.initialised = True
def connect(site=None, db_name=None, set_admin_as_user=True):
"""Connect to site database instance.
:param site: If site is given, calls `frappe.init`.
:param db_name: Optional. Will use from `site_config.json`.
:param set_admin_as_user: Set Administrator as current user.
"""
from frappe.database import get_db
if site:
init(site)
local.db = get_db(user=db_name or local.conf.db_name)
if set_admin_as_user:
set_user("Administrator")
def connect_replica():
from frappe.database import get_db
user = local.conf.db_name
password = local.conf.db_password
if local.conf.different_credentials_for_replica:
user = local.conf.replica_db_name
password = local.conf.replica_db_password
local.replica_db = get_db(host=local.conf.replica_host, user=user, password=password)
# swap db connections
local.primary_db = local.db
local.db = local.replica_db
def get_site_config(sites_path=None, site_path=None):
"""Returns `site_config.json` combined with `sites/common_site_config.json`.
`site_config` is a set of site wide settings like database name, password, email etc."""
config = {}
sites_path = sites_path or getattr(local, "sites_path", None)
site_path = site_path or getattr(local, "site_path", None)
if sites_path:
common_site_config = os.path.join(sites_path, "common_site_config.json")
if os.path.exists(common_site_config):
try:
config.update(get_file_json(common_site_config))
except Exception as error:
click.secho("common_site_config.json is invalid", fg="red")
print(error)
if site_path:
site_config = os.path.join(site_path, "site_config.json")
if os.path.exists(site_config):
try:
config.update(get_file_json(site_config))
except Exception as error:
click.secho("{0}/site_config.json is invalid".format(local.site), fg="red")
print(error)
elif local.site and not local.flags.new_site:
raise IncorrectSitePath("{0} does not exist".format(local.site))
return _dict(config)
def get_conf(site=None):
if hasattr(local, 'conf'):
return local.conf
else:
# if no site, get from common_site_config.json
with init_site(site):
return local.conf
class init_site:
def __init__(self, site=None):
'''If site==None, initialize it for empty site ('') to load common_site_config.json'''
self.site = site or ''
def __enter__(self):
init(self.site)
return local
def __exit__(self, type, value, traceback):
destroy()
def destroy():
"""Closes connection and releases werkzeug local."""
if db:
db.close()
release_local(local)
# memcache
redis_server = None
def cache():
"""Returns redis connection."""
global redis_server
if not redis_server:
from frappe.utils.redis_wrapper import RedisWrapper
redis_server = RedisWrapper.from_url(conf.get('redis_cache')
or "redis://localhost:11311")
return redis_server
def get_traceback():
"""Returns error traceback."""
from frappe.utils import get_traceback
return get_traceback()
def errprint(msg):
"""Log error. This is sent back as `exc` in response.
:param msg: Message."""
msg = as_unicode(msg)
if not request or (not "cmd" in local.form_dict) or conf.developer_mode:
print(msg)
error_log.append({"exc": msg})
def print_sql(enable=True):
return cache().set_value('flag_print_sql', enable)
def log(msg):
"""Add to `debug_log`.
:param msg: Message."""
if not request:
if conf.get("logging") or False:
print(repr(msg))
debug_log.append(as_unicode(msg))
def msgprint(msg, title=None, raise_exception=0, as_table=False, as_list=False, indicator=None, alert=False, primary_action=None, is_minimizable=None, wide=None):
"""Print a message to the user (via HTTP response).
Messages are sent in the `__server_messages` property in the
response JSON and shown in a pop-up / modal.
:param msg: Message.
:param title: [optional] Message title.
:param raise_exception: [optional] Raise given exception and show message.
:param as_table: [optional] If `msg` is a list of lists, render as HTML table.
:param as_list: [optional] If `msg` is a list, render as un-ordered list.
:param primary_action: [optional] Bind a primary server/client side action.
:param is_minimizable: [optional] Allow users to minimize the modal
:param wide: [optional] Show wide modal
"""
from frappe.utils import strip_html_tags
msg = safe_decode(msg)
out = _dict(message=msg)
def _raise_exception():
if raise_exception:
if flags.rollback_on_exception:
db.rollback()
import inspect
if inspect.isclass(raise_exception) and issubclass(raise_exception, Exception):
raise raise_exception(msg)
else:
raise ValidationError(msg)
if flags.mute_messages:
_raise_exception()
return
if as_table and type(msg) in (list, tuple):
out.as_table = 1
if as_list and type(msg) in (list, tuple) and len(msg) > 1:
out.as_list = 1
if flags.print_messages and out.message:
print(f"Message: {strip_html_tags(out.message)}")
if title:
out.title = title
if not indicator and raise_exception:
indicator = 'red'
if indicator:
out.indicator = indicator
if is_minimizable:
out.is_minimizable = is_minimizable
if alert:
out.alert = 1
if raise_exception:
out.raise_exception = 1
if primary_action:
out.primary_action = primary_action
if wide:
out.wide = wide
message_log.append(json.dumps(out))
if raise_exception and hasattr(raise_exception, '__name__'):
local.response['exc_type'] = raise_exception.__name__
_raise_exception()
def clear_messages():
local.message_log = []
def get_message_log():
log = []
for msg_out in local.message_log:
log.append(json.loads(msg_out))
return log
def clear_last_message():
if len(local.message_log) > 0:
local.message_log = local.message_log[:-1]
def throw(msg, exc=ValidationError, title=None, is_minimizable=None, wide=None, as_list=False):
"""Throw execption and show message (`msgprint`).
:param msg: Message.
:param exc: Exception class. Default `frappe.ValidationError`"""
msgprint(msg, raise_exception=exc, title=title, indicator='red', is_minimizable=is_minimizable, wide=wide, as_list=as_list)
def emit_js(js, user=False, **kwargs):
if user == False:
user = session.user
publish_realtime('eval_js', js, user=user, **kwargs)
def create_folder(path, with_init=False):
"""Create a folder in the given path and add an `__init__.py` file (optional).
:param path: Folder path.
:param with_init: Create `__init__.py` in the new folder."""
from frappe.utils import touch_file
if not os.path.exists(path):
os.makedirs(path)
if with_init:
touch_file(os.path.join(path, "__init__.py"))
def set_user(username):
"""Set current user.
:param username: **User** name to set as current user."""
local.session.user = username
local.session.sid = username
local.cache = {}
local.form_dict = _dict()
local.jenv = None
local.session.data = _dict()
local.role_permissions = {}
local.new_doc_templates = {}
local.user_perms = None
def get_user():
from frappe.utils.user import UserPermissions
if not local.user_perms:
local.user_perms = UserPermissions(local.session.user)
return local.user_perms
def get_roles(username=None):
"""Returns roles of current user."""
if not local.session:
return ["Guest"]
import frappe.permissions
return frappe.permissions.get_roles(username or local.session.user)
def get_request_header(key, default=None):
"""Return HTTP request header.
:param key: HTTP header key.
:param default: Default value."""
return request.headers.get(key, default)
def sendmail(recipients=[], sender="", subject="No Subject", message="No Message",
as_markdown=False, delayed=True, reference_doctype=None, reference_name=None,
unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None, add_unsubscribe_link=1,
attachments=None, content=None, doctype=None, name=None, reply_to=None, queue_separately=False,
cc=[], bcc=[], message_id=None, in_reply_to=None, send_after=None, expose_recipients=None,
send_priority=1, communication=None, retry=1, now=None, read_receipt=None, is_notification=False,
inline_images=None, template=None, args=None, header=None, print_letterhead=False, with_container=False):
"""Send email using user's default **Email Account** or global default **Email Account**.
:param recipients: List of recipients.
:param sender: Email sender. Default is current user or default outgoing account.
:param subject: Email Subject.
:param message: (or `content`) Email Content.
:param as_markdown: Convert content markdown to HTML.
:param delayed: Send via scheduled email sender **Email Queue**. Don't send immediately. Default is true
:param send_priority: Priority for Email Queue, default 1.
:param reference_doctype: (or `doctype`) Append as communication to this DocType.
:param reference_name: (or `name`) Append as communication to this document name.
:param unsubscribe_method: Unsubscribe url with options email, doctype, name. e.g. `/api/method/unsubscribe`
:param unsubscribe_params: Unsubscribe paramaters to be loaded on the unsubscribe_method [optional] (dict).
:param attachments: List of attachments.
:param reply_to: Reply-To Email Address.
:param message_id: Used for threading. If a reply is received to this email, Message-Id is sent back as In-Reply-To in received email.
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send after the given datetime.
:param expose_recipients: Display all recipients in the footer message - "This email was sent to"
:param communication: Communication link to be set in Email Queue record
:param inline_images: List of inline images as {"filename", "filecontent"}. All src properties will be replaced with random Content-Id
:param template: Name of html template from templates/emails folder
:param args: Arguments for rendering the template
:param header: Append header in email
:param with_container: Wraps email inside a styled container
"""
text_content = None
if template:
message, text_content = get_email_from_template(template, args)
message = content or message
if as_markdown:
from frappe.utils import md_to_html
message = md_to_html(message)
if not delayed:
now = True
from frappe.email import queue
queue.send(recipients=recipients, sender=sender,
subject=subject, message=message, text_content=text_content,
reference_doctype = doctype or reference_doctype, reference_name = name or reference_name, add_unsubscribe_link=add_unsubscribe_link,
unsubscribe_method=unsubscribe_method, unsubscribe_params=unsubscribe_params, unsubscribe_message=unsubscribe_message,
attachments=attachments, reply_to=reply_to, cc=cc, bcc=bcc, message_id=message_id, in_reply_to=in_reply_to,
send_after=send_after, expose_recipients=expose_recipients, send_priority=send_priority, queue_separately=queue_separately,
communication=communication, now=now, read_receipt=read_receipt, is_notification=is_notification,
inline_images=inline_images, header=header, print_letterhead=print_letterhead, with_container=with_container)
whitelisted = []
guest_methods = []
xss_safe_methods = []
allowed_http_methods_for_whitelisted_func = {}
def whitelist(allow_guest=False, xss_safe=False, methods=None):
"""
Decorator for whitelisting a function and making it accessible via HTTP.
Standard request will be `/api/method/[path.to.method]`
:param allow_guest: Allow non logged-in user to access this method.
:param methods: Allowed http method to access the method.
Use as:
@frappe.whitelist()
def myfunc(param1, param2):
pass
"""
if not methods:
methods = ['GET', 'POST', 'PUT', 'DELETE']
def innerfn(fn):
global whitelisted, guest_methods, xss_safe_methods, allowed_http_methods_for_whitelisted_func
whitelisted.append(fn)
allowed_http_methods_for_whitelisted_func[fn] = methods
if allow_guest:
guest_methods.append(fn)
if xss_safe:
xss_safe_methods.append(fn)
return fn
return innerfn
def read_only():
def innfn(fn):
def wrapper_fn(*args, **kwargs):
if conf.read_from_replica:
connect_replica()
try:
retval = fn(*args, **get_newargs(fn, kwargs))
except:
raise
finally:
if local and hasattr(local, 'primary_db'):
local.db.close()
local.db = local.primary_db
return retval
return wrapper_fn
return innfn
def only_for(roles, message=False):
"""Raise `frappe.PermissionError` if the user does not have any of the given **Roles**.
:param roles: List of roles to check."""
if local.flags.in_test:
return
if not isinstance(roles, (tuple, list)):
roles = (roles,)
roles = set(roles)
myroles = set(get_roles())
if not roles.intersection(myroles):
if message:
msgprint(_('This action is only allowed for {}').format(bold(', '.join(roles))), _('Not Permitted'))
raise PermissionError
def get_domain_data(module):
try:
domain_data = get_hooks('domains')
if module in domain_data:
return _dict(get_attr(get_hooks('domains')[module][0] + '.data'))
else:
return _dict()
except ImportError:
if local.flags.in_test:
return _dict()
else:
raise
def clear_cache(user=None, doctype=None):
"""Clear **User**, **DocType** or global cache.
:param user: If user is given, only user cache is cleared.
:param doctype: If doctype is given, only DocType cache is cleared."""
import frappe.cache_manager
if doctype:
frappe.cache_manager.clear_doctype_cache(doctype)
reset_metadata_version()
elif user:
frappe.cache_manager.clear_user_cache(user)
else: # everything
from frappe import translate
frappe.cache_manager.clear_user_cache()
frappe.cache_manager.clear_domain_cache()
translate.clear_cache()
reset_metadata_version()
local.cache = {}
local.new_doc_templates = {}
for fn in get_hooks("clear_cache"):
get_attr(fn)()
local.role_permissions = {}
def only_has_select_perm(doctype, user=None, ignore_permissions=False):
if ignore_permissions:
return False
if not user:
user = local.session.user
import frappe.permissions
permissions = frappe.permissions.get_role_permissions(doctype, user=user)
if permissions.get('select') and not permissions.get('read'):
return True
else:
return False
def has_permission(doctype=None, ptype="read", doc=None, user=None, verbose=False, throw=False):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: [optional] Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not doctype and doc:
doctype = doc.doctype
import frappe.permissions
out = frappe.permissions.has_permission(doctype, ptype, doc=doc, verbose=verbose, user=user, raise_exception=throw)
if throw and not out:
if doc:
frappe.throw(_("No permission for {0}").format(doc.doctype + " " + doc.name))
else:
frappe.throw(_("No permission for {0}").format(doctype))
return out
def has_website_permission(doc=None, ptype='read', user=None, verbose=False, doctype=None):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not user:
user = session.user
if doc:
if isinstance(doc, string_types):
doc = get_doc(doctype, doc)
doctype = doc.doctype
if doc.flags.ignore_permissions:
return True
# check permission in controller
if hasattr(doc, 'has_website_permission'):
return doc.has_website_permission(ptype, user, verbose=verbose)
hooks = (get_hooks("has_website_permission") or {}).get(doctype, [])
if hooks:
for method in hooks:
result = call(method, doc=doc, ptype=ptype, user=user, verbose=verbose)
# if even a single permission check is Falsy
if not result:
return False
# else it is Truthy
return True
else:
return False
def is_table(doctype):
"""Returns True if `istable` property (indicating child Table) is set for given DocType."""
def get_tables():
return db.sql_list("select name from tabDocType where istable=1")
tables = cache().get_value("is_table", get_tables)
return doctype in tables
def get_precision(doctype, fieldname, currency=None, doc=None):
"""Get precision for a given field"""
from frappe.model.meta import get_field_precision
return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency)
def generate_hash(txt=None, length=None):
"""Generates random hash for given text + current timestamp + random string."""
import hashlib, time
from .utils import random_string
digest = hashlib.sha224(((txt or "") + repr(time.time()) + repr(random_string(8))).encode()).hexdigest()
if length:
digest = digest[:length]
return digest
def reset_metadata_version():
"""Reset `metadata_version` (Client (Javascript) build ID) hash."""
v = generate_hash()
cache().set_value("metadata_version", v)
return v
def new_doc(doctype, parent_doc=None, parentfield=None, as_dict=False):
"""Returns a new document of the given DocType with defaults set.
:param doctype: DocType of the new document.
:param parent_doc: [optional] add to parent document.
:param parentfield: [optional] add against this `parentfield`."""
from frappe.model.create_new import get_new_doc
return get_new_doc(doctype, parent_doc, parentfield, as_dict=as_dict)
def set_value(doctype, docname, fieldname, value=None):
"""Set document value. Calls `frappe.client.set_value`"""
import frappe.client
return frappe.client.set_value(doctype, docname, fieldname, value)
def get_cached_doc(*args, **kwargs):
if args and len(args) > 1 and isinstance(args[1], text_type):
key = get_document_cache_key(args[0], args[1])
# local cache
doc = local.document_cache.get(key)
if doc:
return doc
# redis cache
doc = cache().hget('document_cache', key)
if doc:
doc = get_doc(doc)
local.document_cache[key] = doc
return doc
# database
doc = get_doc(*args, **kwargs)
return doc
def get_document_cache_key(doctype, name):
return '{0}::{1}'.format(doctype, name)
def clear_document_cache(doctype, name):
cache().hdel("last_modified", doctype)
key = get_document_cache_key(doctype, name)
if key in local.document_cache:
del local.document_cache[key]
cache().hdel('document_cache', key)
def get_cached_value(doctype, name, fieldname, as_dict=False):
doc = get_cached_doc(doctype, name)
if isinstance(fieldname, string_types):
if as_dict:
throw('Cannot make dict for single fieldname')
return doc.get(fieldname)
values = [doc.get(f) for f in fieldname]
if as_dict:
return _dict(zip(fieldname, values))
return values
def get_doc(*args, **kwargs):
"""Return a `frappe.model.document.Document` object of the given type and name.
:param arg1: DocType name as string **or** document JSON.
:param arg2: [optional] Document name as string.
Examples:
# insert a new document
todo = frappe.get_doc({"doctype":"ToDo", "description": "test"})
todo.insert()
# open an existing document
todo = frappe.get_doc("ToDo", "TD0001")
"""
import frappe.model.document
doc = frappe.model.document.get_doc(*args, **kwargs)
# set in cache
if args and len(args) > 1:
key = get_document_cache_key(args[0], args[1])
local.document_cache[key] = doc
cache().hset('document_cache', key, doc.as_dict())
return doc
def get_last_doc(doctype, filters=None, order_by="creation desc"):
"""Get last created document of this type."""
d = get_all(
doctype,
filters=filters,
limit_page_length=1,
order_by=order_by,
pluck="name"
)
if d:
return get_doc(doctype, d[0])
else:
raise DoesNotExistError
def get_single(doctype):
"""Return a `frappe.model.document.Document` object of the given Single doctype."""
return get_doc(doctype, doctype)
def get_meta(doctype, cached=True):
"""Get `frappe.model.meta.Meta` instance of given doctype name."""
import frappe.model.meta
return frappe.model.meta.get_meta(doctype, cached=cached)
def get_meta_module(doctype):
import frappe.modules
return frappe.modules.load_doctype_module(doctype)
def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reload=False,
ignore_permissions=False, flags=None, ignore_on_trash=False, ignore_missing=True, delete_permanently=False):
"""Delete a document. Calls `frappe.model.delete_doc.delete_doc`.
:param doctype: DocType of document to be delete.
:param name: Name of document to be delete.
:param force: Allow even if document is linked. Warning: This may lead to data integrity errors.
:param ignore_doctypes: Ignore if child table is one of these.
:param for_reload: Call `before_reload` trigger before deleting.
:param ignore_permissions: Ignore user permissions.
:param delete_permanently: Do not create a Deleted Document for the document."""
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(doctype, name, force, ignore_doctypes, for_reload,
ignore_permissions, flags, ignore_on_trash, ignore_missing, delete_permanently)
def delete_doc_if_exists(doctype, name, force=0):
"""Delete document if exists."""
if db.exists(doctype, name):
delete_doc(doctype, name, force=force)
def reload_doctype(doctype, force=False, reset_permissions=False):
"""Reload DocType from model (`[module]/[doctype]/[name]/[name].json`) files."""
reload_doc(scrub(db.get_value("DocType", doctype, "module")), "doctype", scrub(doctype),
force=force, reset_permissions=reset_permissions)
def reload_doc(module, dt=None, dn=None, force=False, reset_permissions=False):
"""Reload Document from model (`[module]/[doctype]/[name]/[name].json`) files.
:param module: Module name.
:param dt: DocType name.
:param dn: Document name.
:param force: Reload even if `modified` timestamp matches.
"""
import frappe.modules
return frappe.modules.reload_doc(module, dt, dn, force=force, reset_permissions=reset_permissions)
@whitelist()
def rename_doc(*args, **kwargs):
"""
Renames a doc(dt, old) to doc(dt, new) and updates all linked fields of type "Link"
Calls `frappe.model.rename_doc.rename_doc`
"""
kwargs.pop('ignore_permissions', None)
kwargs.pop('cmd', None)
from frappe.model.rename_doc import rename_doc
return rename_doc(*args, **kwargs)
def get_module(modulename):
"""Returns a module object for given Python module name using `importlib.import_module`."""
return importlib.import_module(modulename)
def scrub(txt):
"""Returns sluggified string. e.g. `Sales Order` becomes `sales_order`."""
return txt.replace(' ', '_').replace('-', '_').lower()
def unscrub(txt):
"""Returns titlified string. e.g. `sales_order` becomes `Sales Order`."""
return txt.replace('_', ' ').replace('-', ' ').title()
def get_module_path(module, *joins):
"""Get the path of the given module name.
:param module: Module name.
:param *joins: Join additional path elements using `os.path.join`."""
module = scrub(module)
return get_pymodule_path(local.module_app[module] + "." + module, *joins)
def get_app_path(app_name, *joins):
"""Return path of given app.
:param app: App name.
:param *joins: Join additional path elements using `os.path.join`."""
return get_pymodule_path(app_name, *joins)
def get_site_path(*joins):
"""Return path of current site.
:param *joins: Join additional path elements using `os.path.join`."""
return os.path.join(local.site_path, *joins)
def get_pymodule_path(modulename, *joins):
"""Return path of given Python module name.
:param modulename: Python module name.
:param *joins: Join additional path elements using `os.path.join`."""
if not "public" in joins:
joins = [scrub(part) for part in joins]
return os.path.join(os.path.dirname(get_module(scrub(modulename)).__file__), *joins)
def get_module_list(app_name):
"""Get list of modules for given all via `app/modules.txt`."""
return get_file_items(os.path.join(os.path.dirname(get_module(app_name).__file__), "modules.txt"))
def get_all_apps(with_internal_apps=True, sites_path=None):
"""Get list of all apps via `sites/apps.txt`."""
if not sites_path:
sites_path = local.sites_path
apps = get_file_items(os.path.join(sites_path, "apps.txt"), raise_not_found=True)
if with_internal_apps:
for app in get_file_items(os.path.join(local.site_path, "apps.txt")):
if app not in apps:
apps.append(app)
if "frappe" in apps:
apps.remove("frappe")
apps.insert(0, 'frappe')
return apps
def get_installed_apps(sort=False, frappe_last=False):
"""Get list of installed apps in current site."""
if getattr(flags, "in_install_db", True):
return []
if not db:
connect()
if not local.all_apps:
local.all_apps = cache().get_value('all_apps', get_all_apps)
installed = json.loads(db.get_global("installed_apps") or "[]")
if sort:
installed = [app for app in local.all_apps if app in installed]
if frappe_last:
if 'frappe' in installed:
installed.remove('frappe')
installed.append('frappe')
return installed
def get_doc_hooks():
'''Returns hooked methods for given doc. It will expand the dict tuple if required.'''
if not hasattr(local, 'doc_events_hooks'):
hooks = get_hooks('doc_events', {})
out = {}
for key, value in iteritems(hooks):
if isinstance(key, tuple):
for doctype in key:
append_hook(out, doctype, value)
else:
append_hook(out, key, value)
local.doc_events_hooks = out
return local.doc_events_hooks
def get_hooks(hook=None, default=None, app_name=None):
"""Get hooks via `app/hooks.py`
:param hook: Name of the hook. Will gather all hooks for this name and return as a list.
:param default: Default if no hook found.
:param app_name: Filter by app."""
def load_app_hooks(app_name=None):
hooks = {}
for app in [app_name] if app_name else get_installed_apps(sort=True):
app = "frappe" if app=="webnotes" else app
try:
app_hooks = get_module(app + ".hooks")
except ImportError:
if local.flags.in_install_app:
# if app is not installed while restoring
# ignore it
pass
print('Could not find app "{0}"'.format(app_name))
if not request:
sys.exit(1)
raise
for key in dir(app_hooks):
if not key.startswith("_"):
append_hook(hooks, key, getattr(app_hooks, key))
return hooks
no_cache = conf.developer_mode or False
if app_name:
hooks = _dict(load_app_hooks(app_name))
else:
if no_cache:
hooks = _dict(load_app_hooks())
else:
hooks = _dict(cache().get_value("app_hooks", load_app_hooks))
if hook:
return hooks.get(hook) or (default if default is not None else [])
else:
return hooks
def append_hook(target, key, value):
'''appends a hook to the the target dict.
If the hook key, exists, it will make it a key.
If the hook value is a dict, like doc_events, it will
listify the values against the key.
'''
if isinstance(value, dict):
# dict? make a list of values against each key
target.setdefault(key, {})
for inkey in value:
append_hook(target[key], inkey, value[inkey])
else:
# make a list
target.setdefault(key, [])
if not isinstance(value, list):
value = [value]
target[key].extend(value)
def setup_module_map():
"""Rebuild map of all modules (internal)."""
_cache = cache()
if conf.db_name:
local.app_modules = _cache.get_value("app_modules")
local.module_app = _cache.get_value("module_app")
if not (local.app_modules and local.module_app):
local.module_app, local.app_modules = {}, {}
for app in get_all_apps(True):
if app == "webnotes":
app = "frappe"
local.app_modules.setdefault(app, [])
for module in get_module_list(app):
module = scrub(module)
local.module_app[module] = app
local.app_modules[app].append(module)
if conf.db_name:
_cache.set_value("app_modules", local.app_modules)
_cache.set_value("module_app", local.module_app)
def get_file_items(path, raise_not_found=False, ignore_empty_lines=True):
"""Returns items from text file as a list. Ignores empty lines."""
import frappe.utils
content = read_file(path, raise_not_found=raise_not_found)
if content:
content = frappe.utils.strip(content)
return [
p.strip() for p in content.splitlines()
if (not ignore_empty_lines) or (p.strip() and not p.startswith("#"))
]
else:
return []
def get_file_json(path):
"""Read a file and return parsed JSON object."""
with open(path, 'r') as f:
return json.load(f)
def read_file(path, raise_not_found=False):
"""Open a file and return its content as Unicode."""
if isinstance(path, text_type):
path = path.encode("utf-8")
if os.path.exists(path):
with open(path, "r") as f:
return as_unicode(f.read())
elif raise_not_found:
raise IOError("{} Not Found".format(path))
else:
return None
def get_attr(method_string):
"""Get python method object from its name."""
app_name = method_string.split(".")[0]
if not local.flags.in_install and app_name not in get_installed_apps():
throw(_("App {0} is not installed").format(app_name), AppNotInstalledError)
modulename = '.'.join(method_string.split('.')[:-1])
methodname = method_string.split('.')[-1]
return getattr(get_module(modulename), methodname)
def call(fn, *args, **kwargs):
"""Call a function and match arguments."""
if isinstance(fn, string_types):
fn = get_attr(fn)
newargs = get_newargs(fn, kwargs)
return fn(*args, **newargs)
def get_newargs(fn, kwargs):
if hasattr(fn, 'fnargs'):
fnargs = fn.fnargs
else:
try:
fnargs, varargs, varkw, defaults = inspect.getargspec(fn)
except ValueError:
fnargs = inspect.getfullargspec(fn).args
varargs = inspect.getfullargspec(fn).varargs
varkw = inspect.getfullargspec(fn).varkw
defaults = inspect.getfullargspec(fn).defaults
newargs = {}
for a in kwargs:
if (a in fnargs) or varkw:
newargs[a] = kwargs.get(a)
newargs.pop("ignore_permissions", None)
newargs.pop("flags", None)
return newargs
def make_property_setter(args, ignore_validate=False, validate_fields_for_doctype=True):
"""Create a new **Property Setter** (for overriding DocType and DocField properties).
If doctype is not specified, it will create a property setter for all fields with the
given fieldname"""
args = _dict(args)
if not args.doctype_or_field:
args.doctype_or_field = 'DocField'
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': 'DocField', 'fieldname': args.property}, 'fieldtype') or 'Data'
if not args.doctype:
doctype_list = db.sql_list('select distinct parent from tabDocField where fieldname=%s', args.fieldname)
else:
doctype_list = [args.doctype]
for doctype in doctype_list:
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': doctype, 'fieldname': args.fieldname}, 'fieldtype') or 'Data'
ps = get_doc({
'doctype': "Property Setter",
'doctype_or_field': args.doctype_or_field,
'doc_type': doctype,
'field_name': args.fieldname,
'row_name': args.row_name,
'property': args.property,
'value': args.value,
'property_type': args.property_type or "Data",
'__islocal': 1
})
ps.flags.ignore_validate = ignore_validate
ps.flags.validate_fields_for_doctype = validate_fields_for_doctype
ps.validate_fieldtype_change()
ps.insert()
def import_doc(path):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""
import copy
def remove_no_copy_fields(d):
for df in d.meta.get("fields", {"no_copy": 1}):
if hasattr(d, df.fieldname):
d.set(df.fieldname, None)
fields_to_clear = ['name', 'owner', 'creation', 'modified', 'modified_by']
if not local.flags.in_test:
fields_to_clear.append("docstatus")
if not isinstance(doc, dict):
d = doc.as_dict()
else:
d = doc
newdoc = get_doc(copy.deepcopy(d))
newdoc.set("__islocal", 1)
for fieldname in (fields_to_clear + ['amended_from', 'amendment_date']):
newdoc.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(newdoc)
for i, d in enumerate(newdoc.get_all_children()):
d.set("__islocal", 1)
for fieldname in fields_to_clear:
d.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(d)
return newdoc
def compare(val1, condition, val2):
"""Compare two values using `frappe.utils.compare`
`condition` could be:
- "^"
- "in"
- "not in"
- "="
- "!="
- ">"
- "<"
- ">="
- "<="
- "not None"
- "None"
"""
import frappe.utils
return frappe.utils.compare(val1, condition, val2)
def respond_as_web_page(title, html, success=None, http_status_code=None, context=None,
indicator_color=None, primary_action='/', primary_label = None, fullpage=False,
width=None, template='message'):
"""Send response as a web page with a message rather than JSON. Used to show permission errors etc.
:param title: Page title and heading.
:param message: Message to be shown.
:param success: Alert message.
:param http_status_code: HTTP status code
:param context: web template context
:param indicator_color: color of indicator in title
:param primary_action: route on primary button (default is `/`)
:param primary_label: label on primary button (default is "Home")
:param fullpage: hide header / footer
:param width: Width of message in pixels
:param template: Optionally pass view template
"""
local.message_title = title
local.message = html
local.response['type'] = 'page'
local.response['route'] = template
local.no_cache = 1
if http_status_code:
local.response['http_status_code'] = http_status_code
if not context:
context = {}
if not indicator_color:
if success:
indicator_color = 'green'
elif http_status_code and http_status_code > 300:
indicator_color = 'red'
else:
indicator_color = 'blue'
context['indicator_color'] = indicator_color
context['primary_label'] = primary_label
context['primary_action'] = primary_action
context['error_code'] = http_status_code
context['fullpage'] = fullpage
if width:
context['card_width'] = width
local.response['context'] = context
def redirect_to_message(title, html, http_status_code=None, context=None, indicator_color=None):
"""Redirects to /message?id=random
Similar to respond_as_web_page, but used to 'redirect' and show message pages like success, failure, etc. with a detailed message
:param title: Page title and heading.
:param message: Message to be shown.
:param http_status_code: HTTP status code.
Example Usage:
frappe.redirect_to_message(_('Thank you'), "<div><p>You will receive an email at test@example.com</p></div>")
"""
message_id = generate_hash(length=8)
message = {
'context': context or {},
'http_status_code': http_status_code or 200
}
message['context'].update({
'header': title,
'title': title,
'message': html
})
if indicator_color:
message['context'].update({
"indicator_color": indicator_color
})
cache().set_value("message_id:{0}".format(message_id), message, expires_in_sec=60)
location = '/message?id={0}'.format(message_id)
if not getattr(local, 'is_ajax', False):
local.response["type"] = "redirect"
local.response["location"] = location
else:
return location
def build_match_conditions(doctype, as_condition=True):
"""Return match (User permissions) for given doctype as list or SQL."""
import frappe.desk.reportview
return frappe.desk.reportview.build_match_conditions(doctype, as_condition=as_condition)
def get_list(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will also check for permissions.
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_page_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_list("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_list("ToDo", fields="*", filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_list("ToDo", fields="*", filters = {"description": ("like", "test%")})
"""
import frappe.model.db_query
return frappe.model.db_query.DatabaseQuery(doctype).execute(None, *args, **kwargs)
def get_all(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will **not** check for permissions.
Parameters are same as `frappe.get_list`
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`. Default is: `["name"]`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_all("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_all("ToDo", fields=["*"], filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_all("ToDo", fields=["*"], filters = {"description": ("like", "test%")})
"""
kwargs["ignore_permissions"] = True
if not "limit_page_length" in kwargs:
kwargs["limit_page_length"] = 0
return get_list(doctype, *args, **kwargs)
def get_value(*args, **kwargs):
"""Returns a document property or list of properties.
Alias for `frappe.db.get_value`
:param doctype: DocType name.
:param filters: Filters like `{"x":"y"}` or name of the document. `None` if Single DocType.
:param fieldname: Column name.
:param ignore: Don't raise exception if table, column is missing.
:param as_dict: Return values as dict.
:param debug: Print query in error log.
"""
return db.get_value(*args, **kwargs)
def as_json(obj, indent=1):
from frappe.utils.response import json_handler
return json.dumps(obj, indent=indent, sort_keys=True, default=json_handler, separators=(',', ': '))
def are_emails_muted():
from frappe.utils import cint
return flags.mute_emails or cint(conf.get("mute_emails") or 0) or False
def get_test_records(doctype):
"""Returns list of objects from `test_records.json` in the given doctype's folder."""
from frappe.modules import get_doctype_module, get_module_path
path = os.path.join(get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json")
if os.path.exists(path):
with open(path, "r") as f:
return json.loads(f.read())
else:
return []
def format_value(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def format(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def get_print(doctype=None, name=None, print_format=None, style=None,
html=None, as_pdf=False, doc=None, output=None, no_letterhead=0, password=None):
"""Get Print Format for given document.
:param doctype: DocType of document.
:param name: Name of document.
:param print_format: Print Format name. Default 'Standard',
:param style: Print Format style.
:param as_pdf: Return as PDF. Default False.
:param password: Password to encrypt the pdf with. Default None"""
from frappe.website.render import build_page
from frappe.utils.pdf import get_pdf
local.form_dict.doctype = doctype
local.form_dict.name = name
local.form_dict.format = print_format
local.form_dict.style = style
local.form_dict.doc = doc
local.form_dict.no_letterhead = no_letterhead
options = None
if password:
options = {'password': password}
if not html:
html = build_page("printview")
if as_pdf:
return get_pdf(html, output = output, options = options)
else:
return html
def attach_print(doctype, name, file_name=None, print_format=None,
style=None, html=None, doc=None, lang=None, print_letterhead=True, password=None):
from frappe.utils import scrub_urls
if not file_name: file_name = name
file_name = file_name.replace(' ','').replace('/','-')
print_settings = db.get_singles_dict("Print Settings")
_lang = local.lang
#set lang as specified in print format attachment
if lang: local.lang = lang
local.flags.ignore_print_permissions = True
no_letterhead = not print_letterhead
kwargs = dict(
print_format=print_format,
style=style,
html=html,
doc=doc,
no_letterhead=no_letterhead,
password=password
)
content = ''
if int(print_settings.send_print_as_pdf or 0):
ext = ".pdf"
kwargs["as_pdf"] = True
content = get_print(doctype, name, **kwargs)
else:
ext = ".html"
content = scrub_urls(get_print(doctype, name, **kwargs)).encode('utf-8')
out = {
"fname": file_name + ext,
"fcontent": content
}
local.flags.ignore_print_permissions = False
#reset lang to original local lang
local.lang = _lang
return out
def publish_progress(*args, **kwargs):
"""Show the user progress for a long request
:param percent: Percent progress
:param title: Title
:param doctype: Optional, for document type
:param docname: Optional, for document name
:param description: Optional description
"""
import frappe.realtime
return frappe.realtime.publish_progress(*args, **kwargs)
def publish_realtime(*args, **kwargs):
"""Publish real-time updates
:param event: Event name, like `task_progress` etc.
:param message: JSON message object. For async must contain `task_id`
:param room: Room in which to publish update (default entire site)
:param user: Transmit to user
:param doctype: Transmit to doctype, docname
:param docname: Transmit to doctype, docname
:param after_commit: (default False) will emit after current transaction is committed
"""
import frappe.realtime
return frappe.realtime.publish_realtime(*args, **kwargs)
def local_cache(namespace, key, generator, regenerate_if_none=False):
"""A key value store for caching within a request
:param namespace: frappe.local.cache[namespace]
:param key: frappe.local.cache[namespace][key] used to retrieve value
:param generator: method to generate a value if not found in store
"""
if namespace not in local.cache:
local.cache[namespace] = {}
if key not in local.cache[namespace]:
local.cache[namespace][key] = generator()
elif local.cache[namespace][key]==None and regenerate_if_none:
# if key exists but the previous result was None
local.cache[namespace][key] = generator()
return local.cache[namespace][key]
def enqueue(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param event: this is passed to enable clearing of jobs from queues
:param is_async: (optional) if is_async=False, the method is executed immediately, else via a worker
:param job_name: (optional) can be used to name an enqueue call, which can be used to prevent duplicate calls
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue(*args, **kwargs)
def enqueue_doc(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param doctype: DocType of the document on which you want to run the event
:param name: Name of the document on which you want to run the event
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue_doc(*args, **kwargs)
def get_doctype_app(doctype):
def _get_doctype_app():
doctype_module = local.db.get_value("DocType", doctype, "module")
return local.module_app[scrub(doctype_module)]
return local_cache("doctype_app", doctype, generator=_get_doctype_app)
loggers = {}
log_level = None
def logger(module=None, with_more_info=False, allow_site=True, filter=None, max_size=100_000, file_count=20):
'''Returns a python logger that uses StreamHandler'''
from frappe.utils.logger import get_logger
return get_logger(module=module, with_more_info=with_more_info, allow_site=allow_site, filter=filter, max_size=max_size, file_count=file_count)
def log_error(message=None, title=_("Error")):
'''Log error to Error Log'''
# AI ALERT:
# the title and message may be swapped
# the better API for this is log_error(title, message), and used in many cases this way
# this hack tries to be smart about whats a title (single line ;-)) and fixes it
if message:
if '\n' in title:
error, title = title, message
else:
error = message
else:
error = get_traceback()
return get_doc(dict(doctype='Error Log', error=as_unicode(error),
method=title)).insert(ignore_permissions=True)
def get_desk_link(doctype, name):
html = '<a href="/app/Form/{doctype}/{name}" style="font-weight: bold;">{doctype_local} {name}</a>'
return html.format(
doctype=doctype,
name=name,
doctype_local=_(doctype)
)
def bold(text):
return '<b>{0}</b>'.format(text)
def safe_eval(code, eval_globals=None, eval_locals=None):
'''A safer `eval`'''
whitelisted_globals = {
"int": int,
"float": float,
"long": int,
"round": round
}
if '__' in code:
throw('Illegal rule {0}. Cannot use "__"'.format(bold(code)))
if not eval_globals:
eval_globals = {}
eval_globals['__builtins__'] = {}
eval_globals.update(whitelisted_globals)
return eval(code, eval_globals, eval_locals)
def get_system_settings(key):
if key not in local.system_settings:
local.system_settings.update({key: db.get_single_value('System Settings', key)})
return local.system_settings.get(key)
def get_active_domains():
from frappe.core.doctype.domain_settings.domain_settings import get_active_domains
return get_active_domains()
def get_version(doctype, name, limit=None, head=False, raise_err=True):
'''
Returns a list of version information of a given DocType.
Note: Applicable only if DocType has changes tracked.
Example
>>> frappe.get_version('User', 'foobar@gmail.com')
>>>
[
{
"version": [version.data], # Refer Version DocType get_diff method and data attribute
"user": "admin@gmail.com", # User that created this version
"creation": <datetime.datetime> # Creation timestamp of that object.
}
]
'''
meta = get_meta(doctype)
if meta.track_changes:
names = db.get_all('Version', filters={
'ref_doctype': doctype,
'docname': name,
'order_by': 'creation' if head else None,
'limit': limit
}, as_list=1)
from frappe.chat.util import squashify, dictify, safe_json_loads
versions = []
for name in names:
name = squashify(name)
doc = get_doc('Version', name)
data = doc.data
data = safe_json_loads(data)
data = dictify(dict(
version=data,
user=doc.owner,
creation=doc.creation
))
versions.append(data)
return versions
else:
if raise_err:
raise ValueError(_('{0} has no versions tracked.').format(doctype))
@whitelist(allow_guest=True)
def ping():
return "pong"
def safe_encode(param, encoding='utf-8'):
try:
param = param.encode(encoding)
except Exception:
pass
return param
def safe_decode(param, encoding='utf-8'):
try:
param = param.decode(encoding)
except Exception:
pass
return param
def parse_json(val):
from frappe.utils import parse_json
return parse_json(val)
def mock(type, size=1, locale='en'):
results = []
fake = faker.Faker(locale)
if type not in dir(fake):
raise ValueError('Not a valid mock type.')
else:
for i in range(size):
data = getattr(fake, type)()
results.append(data)
from frappe.chat.util import squashify
return squashify(results)
def validate_and_sanitize_search_inputs(fn):
from frappe.desk.search import validate_and_sanitize_search_inputs as func
return func(fn)
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
"""
Frappe - Low Code Open Source Framework in Python and JS
Frappe, pronounced fra-pay, is a full stack, batteries-included, web
framework written in Python and Javascript with MariaDB as the database.
It is the framework which powers ERPNext. It is pretty generic and can
be used to build database driven apps.
Read the documentation: https://frappeframework.com/docs
"""
from __future__ import unicode_literals, print_function
from six import iteritems, binary_type, text_type, string_types, PY2
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
import click
# Local application imports
from .exceptions import *
from .utils.jinja import (get_jenv, get_template, render_template, get_email_from_template, get_jloader)
from .utils.lazy_loader import lazy_import
# Lazy imports
faker = lazy_import('faker')
# Harmless for Python 3
# For Python 2 set default encoding to utf-8
if PY2:
reload(sys)
sys.setdefaultencoding("utf-8")
__version__ = '13.0.0-dev'
__title__ = "Frappe Framework"
local = Local()
controllers = {}
class _dict(dict):
"""dict like object that exposes keys as attributes"""
def __getattr__(self, key):
ret = self.get(key)
if not ret and key.startswith("__"):
raise AttributeError()
return ret
def __setattr__(self, key, value):
self[key] = value
def __getstate__(self):
return self
def __setstate__(self, d):
self.update(d)
def update(self, d):
"""update and return self -- the missing dict feature in python"""
super(_dict, self).update(d)
return self
def copy(self):
return _dict(dict(self).copy())
def _(msg, lang=None, context=None):
"""Returns translated string in current lang, if exists.
Usage:
_('Change')
_('Change', context='Coins')
"""
from frappe.translate import get_full_dict
from frappe.utils import strip_html_tags, is_html
if not hasattr(local, 'lang'):
local.lang = lang or 'en'
if not lang:
lang = local.lang
non_translated_string = msg
if is_html(msg):
msg = strip_html_tags(msg)
# msg should always be unicode
msg = as_unicode(msg).strip()
translated_string = ''
if context:
string_key = '{msg}:{context}'.format(msg=msg, context=context)
translated_string = get_full_dict(lang).get(string_key)
if not translated_string:
translated_string = get_full_dict(lang).get(msg)
# return lang_full_dict according to lang passed parameter
return translated_string or non_translated_string
def as_unicode(text, encoding='utf-8'):
'''Convert to unicode if required'''
if isinstance(text, text_type):
return text
elif text==None:
return ''
elif isinstance(text, binary_type):
return text_type(text, encoding)
else:
return text_type(text)
def get_lang_dict(fortype, name=None):
"""Returns the translated language dict for the given type and name.
:param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot`
:param name: name of the document for which assets are to be returned."""
from frappe.translate import get_dict
return get_dict(fortype, name)
def set_user_lang(user, user_language=None):
"""Guess and set user language for the session. `frappe.local.lang`"""
from frappe.translate import get_user_lang
local.lang = get_user_lang(user)
# local-globals
db = local("db")
conf = local("conf")
form = form_dict = local("form_dict")
request = local("request")
response = local("response")
session = local("session")
user = local("user")
flags = local("flags")
error_log = local("error_log")
debug_log = local("debug_log")
message_log = local("message_log")
lang = local("lang")
def init(site, sites_path=None, new_site=False):
"""Initialize frappe for the current site. Reset thread locals `frappe.local`"""
if getattr(local, "initialised", None):
return
if not sites_path:
sites_path = '.'
local.error_log = []
local.message_log = []
local.debug_log = []
local.realtime_log = []
local.flags = _dict({
"currently_saving": [],
"redirect_location": "",
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
"has_dataurl": False,
"new_site": new_site
})
local.rollback_observers = []
local.before_commit = []
local.test_objects = {}
local.site = site
local.sites_path = sites_path
local.site_path = os.path.join(sites_path, site)
local.all_apps = None
local.request_ip = None
local.response = _dict({"docs":[]})
local.task_id = None
local.conf = _dict(get_site_config())
local.lang = local.conf.lang or "en"
local.lang_full_dict = None
local.module_app = None
local.app_modules = None
local.system_settings = _dict()
local.user = None
local.user_perms = None
local.session = None
local.role_permissions = {}
local.valid_columns = {}
local.new_doc_templates = {}
local.link_count = {}
local.jenv = None
local.jloader =None
local.cache = {}
local.document_cache = {}
local.meta_cache = {}
local.form_dict = _dict()
local.session = _dict()
local.dev_server = os.environ.get('DEV_SERVER', False)
setup_module_map()
local.initialised = True
def connect(site=None, db_name=None, set_admin_as_user=True):
"""Connect to site database instance.
:param site: If site is given, calls `frappe.init`.
:param db_name: Optional. Will use from `site_config.json`.
:param set_admin_as_user: Set Administrator as current user.
"""
from frappe.database import get_db
if site:
init(site)
local.db = get_db(user=db_name or local.conf.db_name)
if set_admin_as_user:
set_user("Administrator")
def connect_replica():
from frappe.database import get_db
user = local.conf.db_name
password = local.conf.db_password
if local.conf.different_credentials_for_replica:
user = local.conf.replica_db_name
password = local.conf.replica_db_password
local.replica_db = get_db(host=local.conf.replica_host, user=user, password=password)
# swap db connections
local.primary_db = local.db
local.db = local.replica_db
def get_site_config(sites_path=None, site_path=None):
"""Returns `site_config.json` combined with `sites/common_site_config.json`.
`site_config` is a set of site wide settings like database name, password, email etc."""
config = {}
sites_path = sites_path or getattr(local, "sites_path", None)
site_path = site_path or getattr(local, "site_path", None)
if sites_path:
common_site_config = os.path.join(sites_path, "common_site_config.json")
if os.path.exists(common_site_config):
try:
config.update(get_file_json(common_site_config))
except Exception as error:
click.secho("common_site_config.json is invalid", fg="red")
print(error)
if site_path:
site_config = os.path.join(site_path, "site_config.json")
if os.path.exists(site_config):
try:
config.update(get_file_json(site_config))
except Exception as error:
click.secho("{0}/site_config.json is invalid".format(local.site), fg="red")
print(error)
elif local.site and not local.flags.new_site:
raise IncorrectSitePath("{0} does not exist".format(local.site))
return _dict(config)
def get_conf(site=None):
if hasattr(local, 'conf'):
return local.conf
else:
# if no site, get from common_site_config.json
with init_site(site):
return local.conf
class init_site:
def __init__(self, site=None):
'''If site==None, initialize it for empty site ('') to load common_site_config.json'''
self.site = site or ''
def __enter__(self):
init(self.site)
return local
def __exit__(self, type, value, traceback):
destroy()
def destroy():
"""Closes connection and releases werkzeug local."""
if db:
db.close()
release_local(local)
# memcache
redis_server = None
def cache():
"""Returns redis connection."""
global redis_server
if not redis_server:
from frappe.utils.redis_wrapper import RedisWrapper
redis_server = RedisWrapper.from_url(conf.get('redis_cache')
or "redis://localhost:11311")
return redis_server
def get_traceback():
"""Returns error traceback."""
from frappe.utils import get_traceback
return get_traceback()
def errprint(msg):
"""Log error. This is sent back as `exc` in response.
:param msg: Message."""
msg = as_unicode(msg)
if not request or (not "cmd" in local.form_dict) or conf.developer_mode:
print(msg)
error_log.append({"exc": msg})
def print_sql(enable=True):
return cache().set_value('flag_print_sql', enable)
def log(msg):
"""Add to `debug_log`.
:param msg: Message."""
if not request:
if conf.get("logging") or False:
print(repr(msg))
debug_log.append(as_unicode(msg))
def msgprint(msg, title=None, raise_exception=0, as_table=False, as_list=False, indicator=None, alert=False, primary_action=None, is_minimizable=None, wide=None):
"""Print a message to the user (via HTTP response).
Messages are sent in the `__server_messages` property in the
response JSON and shown in a pop-up / modal.
:param msg: Message.
:param title: [optional] Message title.
:param raise_exception: [optional] Raise given exception and show message.
:param as_table: [optional] If `msg` is a list of lists, render as HTML table.
:param as_list: [optional] If `msg` is a list, render as un-ordered list.
:param primary_action: [optional] Bind a primary server/client side action.
:param is_minimizable: [optional] Allow users to minimize the modal
:param wide: [optional] Show wide modal
"""
from frappe.utils import strip_html_tags
msg = safe_decode(msg)
out = _dict(message=msg)
def _raise_exception():
if raise_exception:
if flags.rollback_on_exception:
db.rollback()
import inspect
if inspect.isclass(raise_exception) and issubclass(raise_exception, Exception):
raise raise_exception(msg)
else:
raise ValidationError(msg)
if flags.mute_messages:
_raise_exception()
return
if as_table and type(msg) in (list, tuple):
out.as_table = 1
if as_list and type(msg) in (list, tuple) and len(msg) > 1:
out.as_list = 1
if flags.print_messages and out.message:
print(f"Message: {strip_html_tags(out.message)}")
if title:
out.title = title
if not indicator and raise_exception:
indicator = 'red'
if indicator:
out.indicator = indicator
if is_minimizable:
out.is_minimizable = is_minimizable
if alert:
out.alert = 1
if raise_exception:
out.raise_exception = 1
if primary_action:
out.primary_action = primary_action
if wide:
out.wide = wide
message_log.append(json.dumps(out))
if raise_exception and hasattr(raise_exception, '__name__'):
local.response['exc_type'] = raise_exception.__name__
_raise_exception()
def clear_messages():
local.message_log = []
def get_message_log():
log = []
for msg_out in local.message_log:
log.append(json.loads(msg_out))
return log
def clear_last_message():
if len(local.message_log) > 0:
local.message_log = local.message_log[:-1]
def throw(msg, exc=ValidationError, title=None, is_minimizable=None, wide=None, as_list=False):
"""Throw execption and show message (`msgprint`).
:param msg: Message.
:param exc: Exception class. Default `frappe.ValidationError`"""
msgprint(msg, raise_exception=exc, title=title, indicator='red', is_minimizable=is_minimizable, wide=wide, as_list=as_list)
def emit_js(js, user=False, **kwargs):
if user == False:
user = session.user
publish_realtime('eval_js', js, user=user, **kwargs)
def create_folder(path, with_init=False):
"""Create a folder in the given path and add an `__init__.py` file (optional).
:param path: Folder path.
:param with_init: Create `__init__.py` in the new folder."""
from frappe.utils import touch_file
if not os.path.exists(path):
os.makedirs(path)
if with_init:
touch_file(os.path.join(path, "__init__.py"))
def set_user(username):
"""Set current user.
:param username: **User** name to set as current user."""
local.session.user = username
local.session.sid = username
local.cache = {}
local.form_dict = _dict()
local.jenv = None
local.session.data = _dict()
local.role_permissions = {}
local.new_doc_templates = {}
local.user_perms = None
def get_user():
from frappe.utils.user import UserPermissions
if not local.user_perms:
local.user_perms = UserPermissions(local.session.user)
return local.user_perms
def get_roles(username=None):
"""Returns roles of current user."""
if not local.session:
return ["Guest"]
import frappe.permissions
return frappe.permissions.get_roles(username or local.session.user)
def get_request_header(key, default=None):
"""Return HTTP request header.
:param key: HTTP header key.
:param default: Default value."""
return request.headers.get(key, default)
def sendmail(recipients=[], sender="", subject="No Subject", message="No Message",
as_markdown=False, delayed=True, reference_doctype=None, reference_name=None,
unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None, add_unsubscribe_link=1,
attachments=None, content=None, doctype=None, name=None, reply_to=None, queue_separately=False,
cc=[], bcc=[], message_id=None, in_reply_to=None, send_after=None, expose_recipients=None,
send_priority=1, communication=None, retry=1, now=None, read_receipt=None, is_notification=False,
inline_images=None, template=None, args=None, header=None, print_letterhead=False, with_container=False):
"""Send email using user's default **Email Account** or global default **Email Account**.
:param recipients: List of recipients.
:param sender: Email sender. Default is current user or default outgoing account.
:param subject: Email Subject.
:param message: (or `content`) Email Content.
:param as_markdown: Convert content markdown to HTML.
:param delayed: Send via scheduled email sender **Email Queue**. Don't send immediately. Default is true
:param send_priority: Priority for Email Queue, default 1.
:param reference_doctype: (or `doctype`) Append as communication to this DocType.
:param reference_name: (or `name`) Append as communication to this document name.
:param unsubscribe_method: Unsubscribe url with options email, doctype, name. e.g. `/api/method/unsubscribe`
:param unsubscribe_params: Unsubscribe paramaters to be loaded on the unsubscribe_method [optional] (dict).
:param attachments: List of attachments.
:param reply_to: Reply-To Email Address.
:param message_id: Used for threading. If a reply is received to this email, Message-Id is sent back as In-Reply-To in received email.
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send after the given datetime.
:param expose_recipients: Display all recipients in the footer message - "This email was sent to"
:param communication: Communication link to be set in Email Queue record
:param inline_images: List of inline images as {"filename", "filecontent"}. All src properties will be replaced with random Content-Id
:param template: Name of html template from templates/emails folder
:param args: Arguments for rendering the template
:param header: Append header in email
:param with_container: Wraps email inside a styled container
"""
text_content = None
if template:
message, text_content = get_email_from_template(template, args)
message = content or message
if as_markdown:
from frappe.utils import md_to_html
message = md_to_html(message)
if not delayed:
now = True
from frappe.email import queue
queue.send(recipients=recipients, sender=sender,
subject=subject, message=message, text_content=text_content,
reference_doctype = doctype or reference_doctype, reference_name = name or reference_name, add_unsubscribe_link=add_unsubscribe_link,
unsubscribe_method=unsubscribe_method, unsubscribe_params=unsubscribe_params, unsubscribe_message=unsubscribe_message,
attachments=attachments, reply_to=reply_to, cc=cc, bcc=bcc, message_id=message_id, in_reply_to=in_reply_to,
send_after=send_after, expose_recipients=expose_recipients, send_priority=send_priority, queue_separately=queue_separately,
communication=communication, now=now, read_receipt=read_receipt, is_notification=is_notification,
inline_images=inline_images, header=header, print_letterhead=print_letterhead, with_container=with_container)
whitelisted = []
guest_methods = []
xss_safe_methods = []
allowed_http_methods_for_whitelisted_func = {}
def whitelist(allow_guest=False, xss_safe=False, methods=None):
"""
Decorator for whitelisting a function and making it accessible via HTTP.
Standard request will be `/api/method/[path.to.method]`
:param allow_guest: Allow non logged-in user to access this method.
:param methods: Allowed http method to access the method.
Use as:
@frappe.whitelist()
def myfunc(param1, param2):
pass
"""
if not methods:
methods = ['GET', 'POST', 'PUT', 'DELETE']
def innerfn(fn):
global whitelisted, guest_methods, xss_safe_methods, allowed_http_methods_for_whitelisted_func
# get function from the unbound / bound method
# this is needed because functions can be compared, but not methods
if hasattr(fn, '__func__'):
fn = fn.__func__
whitelisted.append(fn)
allowed_http_methods_for_whitelisted_func[fn] = methods
if allow_guest:
guest_methods.append(fn)
if xss_safe:
xss_safe_methods.append(fn)
return fn
return innerfn
def is_whitelisted(method):
from frappe.utils import sanitize_html
is_guest = session['user'] == 'Guest'
if method not in whitelisted or is_guest and method not in guest_methods:
throw(_("Not permitted"), PermissionError)
if is_guest and method not in xss_safe_methods:
# strictly sanitize form_dict
# escapes html characters like <> except for predefined tags like a, b, ul etc.
for key, value in form_dict.items():
if isinstance(value, string_types):
form_dict[key] = sanitize_html(value)
def read_only():
def innfn(fn):
def wrapper_fn(*args, **kwargs):
if conf.read_from_replica:
connect_replica()
try:
retval = fn(*args, **get_newargs(fn, kwargs))
except:
raise
finally:
if local and hasattr(local, 'primary_db'):
local.db.close()
local.db = local.primary_db
return retval
return wrapper_fn
return innfn
def only_for(roles, message=False):
"""Raise `frappe.PermissionError` if the user does not have any of the given **Roles**.
:param roles: List of roles to check."""
if local.flags.in_test:
return
if not isinstance(roles, (tuple, list)):
roles = (roles,)
roles = set(roles)
myroles = set(get_roles())
if not roles.intersection(myroles):
if message:
msgprint(_('This action is only allowed for {}').format(bold(', '.join(roles))), _('Not Permitted'))
raise PermissionError
def get_domain_data(module):
try:
domain_data = get_hooks('domains')
if module in domain_data:
return _dict(get_attr(get_hooks('domains')[module][0] + '.data'))
else:
return _dict()
except ImportError:
if local.flags.in_test:
return _dict()
else:
raise
def clear_cache(user=None, doctype=None):
"""Clear **User**, **DocType** or global cache.
:param user: If user is given, only user cache is cleared.
:param doctype: If doctype is given, only DocType cache is cleared."""
import frappe.cache_manager
if doctype:
frappe.cache_manager.clear_doctype_cache(doctype)
reset_metadata_version()
elif user:
frappe.cache_manager.clear_user_cache(user)
else: # everything
from frappe import translate
frappe.cache_manager.clear_user_cache()
frappe.cache_manager.clear_domain_cache()
translate.clear_cache()
reset_metadata_version()
local.cache = {}
local.new_doc_templates = {}
for fn in get_hooks("clear_cache"):
get_attr(fn)()
local.role_permissions = {}
def only_has_select_perm(doctype, user=None, ignore_permissions=False):
if ignore_permissions:
return False
if not user:
user = local.session.user
import frappe.permissions
permissions = frappe.permissions.get_role_permissions(doctype, user=user)
if permissions.get('select') and not permissions.get('read'):
return True
else:
return False
def has_permission(doctype=None, ptype="read", doc=None, user=None, verbose=False, throw=False):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: [optional] Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not doctype and doc:
doctype = doc.doctype
import frappe.permissions
out = frappe.permissions.has_permission(doctype, ptype, doc=doc, verbose=verbose, user=user, raise_exception=throw)
if throw and not out:
if doc:
frappe.throw(_("No permission for {0}").format(doc.doctype + " " + doc.name))
else:
frappe.throw(_("No permission for {0}").format(doctype))
return out
def has_website_permission(doc=None, ptype='read', user=None, verbose=False, doctype=None):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not user:
user = session.user
if doc:
if isinstance(doc, string_types):
doc = get_doc(doctype, doc)
doctype = doc.doctype
if doc.flags.ignore_permissions:
return True
# check permission in controller
if hasattr(doc, 'has_website_permission'):
return doc.has_website_permission(ptype, user, verbose=verbose)
hooks = (get_hooks("has_website_permission") or {}).get(doctype, [])
if hooks:
for method in hooks:
result = call(method, doc=doc, ptype=ptype, user=user, verbose=verbose)
# if even a single permission check is Falsy
if not result:
return False
# else it is Truthy
return True
else:
return False
def is_table(doctype):
"""Returns True if `istable` property (indicating child Table) is set for given DocType."""
def get_tables():
return db.sql_list("select name from tabDocType where istable=1")
tables = cache().get_value("is_table", get_tables)
return doctype in tables
def get_precision(doctype, fieldname, currency=None, doc=None):
"""Get precision for a given field"""
from frappe.model.meta import get_field_precision
return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency)
def generate_hash(txt=None, length=None):
"""Generates random hash for given text + current timestamp + random string."""
import hashlib, time
from .utils import random_string
digest = hashlib.sha224(((txt or "") + repr(time.time()) + repr(random_string(8))).encode()).hexdigest()
if length:
digest = digest[:length]
return digest
def reset_metadata_version():
"""Reset `metadata_version` (Client (Javascript) build ID) hash."""
v = generate_hash()
cache().set_value("metadata_version", v)
return v
def new_doc(doctype, parent_doc=None, parentfield=None, as_dict=False):
"""Returns a new document of the given DocType with defaults set.
:param doctype: DocType of the new document.
:param parent_doc: [optional] add to parent document.
:param parentfield: [optional] add against this `parentfield`."""
from frappe.model.create_new import get_new_doc
return get_new_doc(doctype, parent_doc, parentfield, as_dict=as_dict)
def set_value(doctype, docname, fieldname, value=None):
"""Set document value. Calls `frappe.client.set_value`"""
import frappe.client
return frappe.client.set_value(doctype, docname, fieldname, value)
def get_cached_doc(*args, **kwargs):
if args and len(args) > 1 and isinstance(args[1], text_type):
key = get_document_cache_key(args[0], args[1])
# local cache
doc = local.document_cache.get(key)
if doc:
return doc
# redis cache
doc = cache().hget('document_cache', key)
if doc:
doc = get_doc(doc)
local.document_cache[key] = doc
return doc
# database
doc = get_doc(*args, **kwargs)
return doc
def get_document_cache_key(doctype, name):
return '{0}::{1}'.format(doctype, name)
def clear_document_cache(doctype, name):
cache().hdel("last_modified", doctype)
key = get_document_cache_key(doctype, name)
if key in local.document_cache:
del local.document_cache[key]
cache().hdel('document_cache', key)
def get_cached_value(doctype, name, fieldname, as_dict=False):
doc = get_cached_doc(doctype, name)
if isinstance(fieldname, string_types):
if as_dict:
throw('Cannot make dict for single fieldname')
return doc.get(fieldname)
values = [doc.get(f) for f in fieldname]
if as_dict:
return _dict(zip(fieldname, values))
return values
def get_doc(*args, **kwargs):
"""Return a `frappe.model.document.Document` object of the given type and name.
:param arg1: DocType name as string **or** document JSON.
:param arg2: [optional] Document name as string.
Examples:
# insert a new document
todo = frappe.get_doc({"doctype":"ToDo", "description": "test"})
todo.insert()
# open an existing document
todo = frappe.get_doc("ToDo", "TD0001")
"""
import frappe.model.document
doc = frappe.model.document.get_doc(*args, **kwargs)
# set in cache
if args and len(args) > 1:
key = get_document_cache_key(args[0], args[1])
local.document_cache[key] = doc
cache().hset('document_cache', key, doc.as_dict())
return doc
def get_last_doc(doctype, filters=None, order_by="creation desc"):
"""Get last created document of this type."""
d = get_all(
doctype,
filters=filters,
limit_page_length=1,
order_by=order_by,
pluck="name"
)
if d:
return get_doc(doctype, d[0])
else:
raise DoesNotExistError
def get_single(doctype):
"""Return a `frappe.model.document.Document` object of the given Single doctype."""
return get_doc(doctype, doctype)
def get_meta(doctype, cached=True):
"""Get `frappe.model.meta.Meta` instance of given doctype name."""
import frappe.model.meta
return frappe.model.meta.get_meta(doctype, cached=cached)
def get_meta_module(doctype):
import frappe.modules
return frappe.modules.load_doctype_module(doctype)
def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reload=False,
ignore_permissions=False, flags=None, ignore_on_trash=False, ignore_missing=True, delete_permanently=False):
"""Delete a document. Calls `frappe.model.delete_doc.delete_doc`.
:param doctype: DocType of document to be delete.
:param name: Name of document to be delete.
:param force: Allow even if document is linked. Warning: This may lead to data integrity errors.
:param ignore_doctypes: Ignore if child table is one of these.
:param for_reload: Call `before_reload` trigger before deleting.
:param ignore_permissions: Ignore user permissions.
:param delete_permanently: Do not create a Deleted Document for the document."""
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(doctype, name, force, ignore_doctypes, for_reload,
ignore_permissions, flags, ignore_on_trash, ignore_missing, delete_permanently)
def delete_doc_if_exists(doctype, name, force=0):
"""Delete document if exists."""
if db.exists(doctype, name):
delete_doc(doctype, name, force=force)
def reload_doctype(doctype, force=False, reset_permissions=False):
"""Reload DocType from model (`[module]/[doctype]/[name]/[name].json`) files."""
reload_doc(scrub(db.get_value("DocType", doctype, "module")), "doctype", scrub(doctype),
force=force, reset_permissions=reset_permissions)
def reload_doc(module, dt=None, dn=None, force=False, reset_permissions=False):
"""Reload Document from model (`[module]/[doctype]/[name]/[name].json`) files.
:param module: Module name.
:param dt: DocType name.
:param dn: Document name.
:param force: Reload even if `modified` timestamp matches.
"""
import frappe.modules
return frappe.modules.reload_doc(module, dt, dn, force=force, reset_permissions=reset_permissions)
@whitelist()
def rename_doc(*args, **kwargs):
"""
Renames a doc(dt, old) to doc(dt, new) and updates all linked fields of type "Link"
Calls `frappe.model.rename_doc.rename_doc`
"""
kwargs.pop('ignore_permissions', None)
kwargs.pop('cmd', None)
from frappe.model.rename_doc import rename_doc
return rename_doc(*args, **kwargs)
def get_module(modulename):
"""Returns a module object for given Python module name using `importlib.import_module`."""
return importlib.import_module(modulename)
def scrub(txt):
"""Returns sluggified string. e.g. `Sales Order` becomes `sales_order`."""
return txt.replace(' ', '_').replace('-', '_').lower()
def unscrub(txt):
"""Returns titlified string. e.g. `sales_order` becomes `Sales Order`."""
return txt.replace('_', ' ').replace('-', ' ').title()
def get_module_path(module, *joins):
"""Get the path of the given module name.
:param module: Module name.
:param *joins: Join additional path elements using `os.path.join`."""
module = scrub(module)
return get_pymodule_path(local.module_app[module] + "." + module, *joins)
def get_app_path(app_name, *joins):
"""Return path of given app.
:param app: App name.
:param *joins: Join additional path elements using `os.path.join`."""
return get_pymodule_path(app_name, *joins)
def get_site_path(*joins):
"""Return path of current site.
:param *joins: Join additional path elements using `os.path.join`."""
return os.path.join(local.site_path, *joins)
def get_pymodule_path(modulename, *joins):
"""Return path of given Python module name.
:param modulename: Python module name.
:param *joins: Join additional path elements using `os.path.join`."""
if not "public" in joins:
joins = [scrub(part) for part in joins]
return os.path.join(os.path.dirname(get_module(scrub(modulename)).__file__), *joins)
def get_module_list(app_name):
"""Get list of modules for given all via `app/modules.txt`."""
return get_file_items(os.path.join(os.path.dirname(get_module(app_name).__file__), "modules.txt"))
def get_all_apps(with_internal_apps=True, sites_path=None):
"""Get list of all apps via `sites/apps.txt`."""
if not sites_path:
sites_path = local.sites_path
apps = get_file_items(os.path.join(sites_path, "apps.txt"), raise_not_found=True)
if with_internal_apps:
for app in get_file_items(os.path.join(local.site_path, "apps.txt")):
if app not in apps:
apps.append(app)
if "frappe" in apps:
apps.remove("frappe")
apps.insert(0, 'frappe')
return apps
def get_installed_apps(sort=False, frappe_last=False):
"""Get list of installed apps in current site."""
if getattr(flags, "in_install_db", True):
return []
if not db:
connect()
if not local.all_apps:
local.all_apps = cache().get_value('all_apps', get_all_apps)
installed = json.loads(db.get_global("installed_apps") or "[]")
if sort:
installed = [app for app in local.all_apps if app in installed]
if frappe_last:
if 'frappe' in installed:
installed.remove('frappe')
installed.append('frappe')
return installed
def get_doc_hooks():
'''Returns hooked methods for given doc. It will expand the dict tuple if required.'''
if not hasattr(local, 'doc_events_hooks'):
hooks = get_hooks('doc_events', {})
out = {}
for key, value in iteritems(hooks):
if isinstance(key, tuple):
for doctype in key:
append_hook(out, doctype, value)
else:
append_hook(out, key, value)
local.doc_events_hooks = out
return local.doc_events_hooks
def get_hooks(hook=None, default=None, app_name=None):
"""Get hooks via `app/hooks.py`
:param hook: Name of the hook. Will gather all hooks for this name and return as a list.
:param default: Default if no hook found.
:param app_name: Filter by app."""
def load_app_hooks(app_name=None):
hooks = {}
for app in [app_name] if app_name else get_installed_apps(sort=True):
app = "frappe" if app=="webnotes" else app
try:
app_hooks = get_module(app + ".hooks")
except ImportError:
if local.flags.in_install_app:
# if app is not installed while restoring
# ignore it
pass
print('Could not find app "{0}"'.format(app_name))
if not request:
sys.exit(1)
raise
for key in dir(app_hooks):
if not key.startswith("_"):
append_hook(hooks, key, getattr(app_hooks, key))
return hooks
no_cache = conf.developer_mode or False
if app_name:
hooks = _dict(load_app_hooks(app_name))
else:
if no_cache:
hooks = _dict(load_app_hooks())
else:
hooks = _dict(cache().get_value("app_hooks", load_app_hooks))
if hook:
return hooks.get(hook) or (default if default is not None else [])
else:
return hooks
def append_hook(target, key, value):
'''appends a hook to the the target dict.
If the hook key, exists, it will make it a key.
If the hook value is a dict, like doc_events, it will
listify the values against the key.
'''
if isinstance(value, dict):
# dict? make a list of values against each key
target.setdefault(key, {})
for inkey in value:
append_hook(target[key], inkey, value[inkey])
else:
# make a list
target.setdefault(key, [])
if not isinstance(value, list):
value = [value]
target[key].extend(value)
def setup_module_map():
"""Rebuild map of all modules (internal)."""
_cache = cache()
if conf.db_name:
local.app_modules = _cache.get_value("app_modules")
local.module_app = _cache.get_value("module_app")
if not (local.app_modules and local.module_app):
local.module_app, local.app_modules = {}, {}
for app in get_all_apps(True):
if app == "webnotes":
app = "frappe"
local.app_modules.setdefault(app, [])
for module in get_module_list(app):
module = scrub(module)
local.module_app[module] = app
local.app_modules[app].append(module)
if conf.db_name:
_cache.set_value("app_modules", local.app_modules)
_cache.set_value("module_app", local.module_app)
def get_file_items(path, raise_not_found=False, ignore_empty_lines=True):
"""Returns items from text file as a list. Ignores empty lines."""
import frappe.utils
content = read_file(path, raise_not_found=raise_not_found)
if content:
content = frappe.utils.strip(content)
return [
p.strip() for p in content.splitlines()
if (not ignore_empty_lines) or (p.strip() and not p.startswith("#"))
]
else:
return []
def get_file_json(path):
"""Read a file and return parsed JSON object."""
with open(path, 'r') as f:
return json.load(f)
def read_file(path, raise_not_found=False):
"""Open a file and return its content as Unicode."""
if isinstance(path, text_type):
path = path.encode("utf-8")
if os.path.exists(path):
with open(path, "r") as f:
return as_unicode(f.read())
elif raise_not_found:
raise IOError("{} Not Found".format(path))
else:
return None
def get_attr(method_string):
"""Get python method object from its name."""
app_name = method_string.split(".")[0]
if not local.flags.in_install and app_name not in get_installed_apps():
throw(_("App {0} is not installed").format(app_name), AppNotInstalledError)
modulename = '.'.join(method_string.split('.')[:-1])
methodname = method_string.split('.')[-1]
return getattr(get_module(modulename), methodname)
def call(fn, *args, **kwargs):
"""Call a function and match arguments."""
if isinstance(fn, string_types):
fn = get_attr(fn)
newargs = get_newargs(fn, kwargs)
return fn(*args, **newargs)
def get_newargs(fn, kwargs):
if hasattr(fn, 'fnargs'):
fnargs = fn.fnargs
else:
try:
fnargs, varargs, varkw, defaults = inspect.getargspec(fn)
except ValueError:
fnargs = inspect.getfullargspec(fn).args
varargs = inspect.getfullargspec(fn).varargs
varkw = inspect.getfullargspec(fn).varkw
defaults = inspect.getfullargspec(fn).defaults
newargs = {}
for a in kwargs:
if (a in fnargs) or varkw:
newargs[a] = kwargs.get(a)
newargs.pop("ignore_permissions", None)
newargs.pop("flags", None)
return newargs
def make_property_setter(args, ignore_validate=False, validate_fields_for_doctype=True):
"""Create a new **Property Setter** (for overriding DocType and DocField properties).
If doctype is not specified, it will create a property setter for all fields with the
given fieldname"""
args = _dict(args)
if not args.doctype_or_field:
args.doctype_or_field = 'DocField'
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': 'DocField', 'fieldname': args.property}, 'fieldtype') or 'Data'
if not args.doctype:
doctype_list = db.sql_list('select distinct parent from tabDocField where fieldname=%s', args.fieldname)
else:
doctype_list = [args.doctype]
for doctype in doctype_list:
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': doctype, 'fieldname': args.fieldname}, 'fieldtype') or 'Data'
ps = get_doc({
'doctype': "Property Setter",
'doctype_or_field': args.doctype_or_field,
'doc_type': doctype,
'field_name': args.fieldname,
'row_name': args.row_name,
'property': args.property,
'value': args.value,
'property_type': args.property_type or "Data",
'__islocal': 1
})
ps.flags.ignore_validate = ignore_validate
ps.flags.validate_fields_for_doctype = validate_fields_for_doctype
ps.validate_fieldtype_change()
ps.insert()
def import_doc(path):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""
import copy
def remove_no_copy_fields(d):
for df in d.meta.get("fields", {"no_copy": 1}):
if hasattr(d, df.fieldname):
d.set(df.fieldname, None)
fields_to_clear = ['name', 'owner', 'creation', 'modified', 'modified_by']
if not local.flags.in_test:
fields_to_clear.append("docstatus")
if not isinstance(doc, dict):
d = doc.as_dict()
else:
d = doc
newdoc = get_doc(copy.deepcopy(d))
newdoc.set("__islocal", 1)
for fieldname in (fields_to_clear + ['amended_from', 'amendment_date']):
newdoc.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(newdoc)
for i, d in enumerate(newdoc.get_all_children()):
d.set("__islocal", 1)
for fieldname in fields_to_clear:
d.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(d)
return newdoc
def compare(val1, condition, val2):
"""Compare two values using `frappe.utils.compare`
`condition` could be:
- "^"
- "in"
- "not in"
- "="
- "!="
- ">"
- "<"
- ">="
- "<="
- "not None"
- "None"
"""
import frappe.utils
return frappe.utils.compare(val1, condition, val2)
def respond_as_web_page(title, html, success=None, http_status_code=None, context=None,
indicator_color=None, primary_action='/', primary_label = None, fullpage=False,
width=None, template='message'):
"""Send response as a web page with a message rather than JSON. Used to show permission errors etc.
:param title: Page title and heading.
:param message: Message to be shown.
:param success: Alert message.
:param http_status_code: HTTP status code
:param context: web template context
:param indicator_color: color of indicator in title
:param primary_action: route on primary button (default is `/`)
:param primary_label: label on primary button (default is "Home")
:param fullpage: hide header / footer
:param width: Width of message in pixels
:param template: Optionally pass view template
"""
local.message_title = title
local.message = html
local.response['type'] = 'page'
local.response['route'] = template
local.no_cache = 1
if http_status_code:
local.response['http_status_code'] = http_status_code
if not context:
context = {}
if not indicator_color:
if success:
indicator_color = 'green'
elif http_status_code and http_status_code > 300:
indicator_color = 'red'
else:
indicator_color = 'blue'
context['indicator_color'] = indicator_color
context['primary_label'] = primary_label
context['primary_action'] = primary_action
context['error_code'] = http_status_code
context['fullpage'] = fullpage
if width:
context['card_width'] = width
local.response['context'] = context
def redirect_to_message(title, html, http_status_code=None, context=None, indicator_color=None):
"""Redirects to /message?id=random
Similar to respond_as_web_page, but used to 'redirect' and show message pages like success, failure, etc. with a detailed message
:param title: Page title and heading.
:param message: Message to be shown.
:param http_status_code: HTTP status code.
Example Usage:
frappe.redirect_to_message(_('Thank you'), "<div><p>You will receive an email at test@example.com</p></div>")
"""
message_id = generate_hash(length=8)
message = {
'context': context or {},
'http_status_code': http_status_code or 200
}
message['context'].update({
'header': title,
'title': title,
'message': html
})
if indicator_color:
message['context'].update({
"indicator_color": indicator_color
})
cache().set_value("message_id:{0}".format(message_id), message, expires_in_sec=60)
location = '/message?id={0}'.format(message_id)
if not getattr(local, 'is_ajax', False):
local.response["type"] = "redirect"
local.response["location"] = location
else:
return location
def build_match_conditions(doctype, as_condition=True):
"""Return match (User permissions) for given doctype as list or SQL."""
import frappe.desk.reportview
return frappe.desk.reportview.build_match_conditions(doctype, as_condition=as_condition)
def get_list(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will also check for permissions.
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_page_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_list("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_list("ToDo", fields="*", filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_list("ToDo", fields="*", filters = {"description": ("like", "test%")})
"""
import frappe.model.db_query
return frappe.model.db_query.DatabaseQuery(doctype).execute(None, *args, **kwargs)
def get_all(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will **not** check for permissions.
Parameters are same as `frappe.get_list`
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`. Default is: `["name"]`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_all("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_all("ToDo", fields=["*"], filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_all("ToDo", fields=["*"], filters = {"description": ("like", "test%")})
"""
kwargs["ignore_permissions"] = True
if not "limit_page_length" in kwargs:
kwargs["limit_page_length"] = 0
return get_list(doctype, *args, **kwargs)
def get_value(*args, **kwargs):
"""Returns a document property or list of properties.
Alias for `frappe.db.get_value`
:param doctype: DocType name.
:param filters: Filters like `{"x":"y"}` or name of the document. `None` if Single DocType.
:param fieldname: Column name.
:param ignore: Don't raise exception if table, column is missing.
:param as_dict: Return values as dict.
:param debug: Print query in error log.
"""
return db.get_value(*args, **kwargs)
def as_json(obj, indent=1):
from frappe.utils.response import json_handler
return json.dumps(obj, indent=indent, sort_keys=True, default=json_handler, separators=(',', ': '))
def are_emails_muted():
from frappe.utils import cint
return flags.mute_emails or cint(conf.get("mute_emails") or 0) or False
def get_test_records(doctype):
"""Returns list of objects from `test_records.json` in the given doctype's folder."""
from frappe.modules import get_doctype_module, get_module_path
path = os.path.join(get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json")
if os.path.exists(path):
with open(path, "r") as f:
return json.loads(f.read())
else:
return []
def format_value(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def format(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def get_print(doctype=None, name=None, print_format=None, style=None,
html=None, as_pdf=False, doc=None, output=None, no_letterhead=0, password=None):
"""Get Print Format for given document.
:param doctype: DocType of document.
:param name: Name of document.
:param print_format: Print Format name. Default 'Standard',
:param style: Print Format style.
:param as_pdf: Return as PDF. Default False.
:param password: Password to encrypt the pdf with. Default None"""
from frappe.website.render import build_page
from frappe.utils.pdf import get_pdf
local.form_dict.doctype = doctype
local.form_dict.name = name
local.form_dict.format = print_format
local.form_dict.style = style
local.form_dict.doc = doc
local.form_dict.no_letterhead = no_letterhead
options = None
if password:
options = {'password': password}
if not html:
html = build_page("printview")
if as_pdf:
return get_pdf(html, output = output, options = options)
else:
return html
def attach_print(doctype, name, file_name=None, print_format=None,
style=None, html=None, doc=None, lang=None, print_letterhead=True, password=None):
from frappe.utils import scrub_urls
if not file_name: file_name = name
file_name = file_name.replace(' ','').replace('/','-')
print_settings = db.get_singles_dict("Print Settings")
_lang = local.lang
#set lang as specified in print format attachment
if lang: local.lang = lang
local.flags.ignore_print_permissions = True
no_letterhead = not print_letterhead
kwargs = dict(
print_format=print_format,
style=style,
html=html,
doc=doc,
no_letterhead=no_letterhead,
password=password
)
content = ''
if int(print_settings.send_print_as_pdf or 0):
ext = ".pdf"
kwargs["as_pdf"] = True
content = get_print(doctype, name, **kwargs)
else:
ext = ".html"
content = scrub_urls(get_print(doctype, name, **kwargs)).encode('utf-8')
out = {
"fname": file_name + ext,
"fcontent": content
}
local.flags.ignore_print_permissions = False
#reset lang to original local lang
local.lang = _lang
return out
def publish_progress(*args, **kwargs):
"""Show the user progress for a long request
:param percent: Percent progress
:param title: Title
:param doctype: Optional, for document type
:param docname: Optional, for document name
:param description: Optional description
"""
import frappe.realtime
return frappe.realtime.publish_progress(*args, **kwargs)
def publish_realtime(*args, **kwargs):
"""Publish real-time updates
:param event: Event name, like `task_progress` etc.
:param message: JSON message object. For async must contain `task_id`
:param room: Room in which to publish update (default entire site)
:param user: Transmit to user
:param doctype: Transmit to doctype, docname
:param docname: Transmit to doctype, docname
:param after_commit: (default False) will emit after current transaction is committed
"""
import frappe.realtime
return frappe.realtime.publish_realtime(*args, **kwargs)
def local_cache(namespace, key, generator, regenerate_if_none=False):
"""A key value store for caching within a request
:param namespace: frappe.local.cache[namespace]
:param key: frappe.local.cache[namespace][key] used to retrieve value
:param generator: method to generate a value if not found in store
"""
if namespace not in local.cache:
local.cache[namespace] = {}
if key not in local.cache[namespace]:
local.cache[namespace][key] = generator()
elif local.cache[namespace][key]==None and regenerate_if_none:
# if key exists but the previous result was None
local.cache[namespace][key] = generator()
return local.cache[namespace][key]
def enqueue(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param event: this is passed to enable clearing of jobs from queues
:param is_async: (optional) if is_async=False, the method is executed immediately, else via a worker
:param job_name: (optional) can be used to name an enqueue call, which can be used to prevent duplicate calls
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue(*args, **kwargs)
def enqueue_doc(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param doctype: DocType of the document on which you want to run the event
:param name: Name of the document on which you want to run the event
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue_doc(*args, **kwargs)
def get_doctype_app(doctype):
def _get_doctype_app():
doctype_module = local.db.get_value("DocType", doctype, "module")
return local.module_app[scrub(doctype_module)]
return local_cache("doctype_app", doctype, generator=_get_doctype_app)
loggers = {}
log_level = None
def logger(module=None, with_more_info=False, allow_site=True, filter=None, max_size=100_000, file_count=20):
'''Returns a python logger that uses StreamHandler'''
from frappe.utils.logger import get_logger
return get_logger(module=module, with_more_info=with_more_info, allow_site=allow_site, filter=filter, max_size=max_size, file_count=file_count)
def log_error(message=None, title=_("Error")):
'''Log error to Error Log'''
# AI ALERT:
# the title and message may be swapped
# the better API for this is log_error(title, message), and used in many cases this way
# this hack tries to be smart about whats a title (single line ;-)) and fixes it
if message:
if '\n' in title:
error, title = title, message
else:
error = message
else:
error = get_traceback()
return get_doc(dict(doctype='Error Log', error=as_unicode(error),
method=title)).insert(ignore_permissions=True)
def get_desk_link(doctype, name):
html = '<a href="/app/Form/{doctype}/{name}" style="font-weight: bold;">{doctype_local} {name}</a>'
return html.format(
doctype=doctype,
name=name,
doctype_local=_(doctype)
)
def bold(text):
return '<b>{0}</b>'.format(text)
def safe_eval(code, eval_globals=None, eval_locals=None):
'''A safer `eval`'''
whitelisted_globals = {
"int": int,
"float": float,
"long": int,
"round": round
}
if '__' in code:
throw('Illegal rule {0}. Cannot use "__"'.format(bold(code)))
if not eval_globals:
eval_globals = {}
eval_globals['__builtins__'] = {}
eval_globals.update(whitelisted_globals)
return eval(code, eval_globals, eval_locals)
def get_system_settings(key):
if key not in local.system_settings:
local.system_settings.update({key: db.get_single_value('System Settings', key)})
return local.system_settings.get(key)
def get_active_domains():
from frappe.core.doctype.domain_settings.domain_settings import get_active_domains
return get_active_domains()
def get_version(doctype, name, limit=None, head=False, raise_err=True):
'''
Returns a list of version information of a given DocType.
Note: Applicable only if DocType has changes tracked.
Example
>>> frappe.get_version('User', 'foobar@gmail.com')
>>>
[
{
"version": [version.data], # Refer Version DocType get_diff method and data attribute
"user": "admin@gmail.com", # User that created this version
"creation": <datetime.datetime> # Creation timestamp of that object.
}
]
'''
meta = get_meta(doctype)
if meta.track_changes:
names = db.get_all('Version', filters={
'ref_doctype': doctype,
'docname': name,
'order_by': 'creation' if head else None,
'limit': limit
}, as_list=1)
from frappe.chat.util import squashify, dictify, safe_json_loads
versions = []
for name in names:
name = squashify(name)
doc = get_doc('Version', name)
data = doc.data
data = safe_json_loads(data)
data = dictify(dict(
version=data,
user=doc.owner,
creation=doc.creation
))
versions.append(data)
return versions
else:
if raise_err:
raise ValueError(_('{0} has no versions tracked.').format(doctype))
@whitelist(allow_guest=True)
def ping():
return "pong"
def safe_encode(param, encoding='utf-8'):
try:
param = param.encode(encoding)
except Exception:
pass
return param
def safe_decode(param, encoding='utf-8'):
try:
param = param.decode(encoding)
except Exception:
pass
return param
def parse_json(val):
from frappe.utils import parse_json
return parse_json(val)
def mock(type, size=1, locale='en'):
results = []
fake = faker.Faker(locale)
if type not in dir(fake):
raise ValueError('Not a valid mock type.')
else:
for i in range(size):
data = getattr(fake, type)()
results.append(data)
from frappe.chat.util import squashify
return squashify(results)
def validate_and_sanitize_search_inputs(fn):
from frappe.desk.search import validate_and_sanitize_search_inputs as func
return func(fn)
| xss | {
"code": [
"\t\twhitelisted.append(fn)"
],
"line_no": [
558
]
} | {
"code": [
"\t\t\tfn = fn.__func__",
"def is_whitelisted(method):",
"\tfrom frappe.utils import sanitize_html",
"\tis_guest = session['user'] == 'Guest'",
"\tif method not in whitelisted or is_guest and method not in guest_methods:",
"\t\tthrow(_(\"Not permitted\"), PermissionError)",
"\tif is_guest and method not in xss_safe_methods:",
"\t\tfor key, value in form_dict.items():",
"\t\t\tif isinstance(value, string_types):",
"\t\t\t\tform_dict[key] = sanitize_html(value)"
],
"line_no": [
562,
577,
578,
580,
581,
582,
584,
587,
588,
589
]
} |
from __future__ import unicode_literals, print_function
from six import iteritems, binary_type, text_type, string_types, PY2
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
import click
from .exceptions import *
from .utils.jinja import (get_jenv, get_template, render_template, get_email_from_template, get_jloader)
from .utils.lazy_loader import lazy_import
VAR_0 = lazy_import('faker')
if PY2:
reload(sys)
sys.setdefaultencoding("utf-8")
__version__ = '13.0.0-dev'
__title__ = "Frappe Framework"
VAR_1 = Local()
VAR_2 = {}
class CLASS_0(dict):
def __getattr__(self, VAR_46):
VAR_205 = self.get(VAR_46)
if not VAR_205 and VAR_46.startswith("__"):
raise AttributeError()
return VAR_205
def __setattr__(self, VAR_46, VAR_105):
self[VAR_46] = VAR_105
def __getstate__(self):
return self
def __setstate__(self, VAR_174):
self.update(VAR_174)
def FUNC_112(self, VAR_174):
super(CLASS_0, self).update(VAR_174)
return self
def FUNC_113(self):
return CLASS_0(dict(self).copy())
def FUNC_0(VAR_3, VAR_4=None, VAR_5=None):
from frappe.translate import get_full_dict
from frappe.utils import strip_html_tags, is_html
if not hasattr(VAR_1, 'lang'):
VAR_1.lang = VAR_4 or 'en'
if not VAR_4:
lang = VAR_1.lang
VAR_175 = VAR_3
if is_html(VAR_3):
msg = strip_html_tags(VAR_3)
VAR_3 = FUNC_1(VAR_3).strip()
VAR_176 = ''
if VAR_5:
VAR_206 = '{VAR_3}:{VAR_5}'.format(VAR_3=msg, VAR_5=context)
VAR_176 = get_full_dict(VAR_4).get(VAR_206)
if not VAR_176:
translated_string = get_full_dict(VAR_4).get(VAR_3)
return VAR_176 or VAR_175
def FUNC_1(VAR_6, VAR_7='utf-8'):
if isinstance(VAR_6, text_type):
return VAR_6
elif VAR_6==None:
return ''
elif isinstance(VAR_6, binary_type):
return text_type(VAR_6, VAR_7)
else:
return text_type(VAR_6)
def FUNC_2(VAR_8, VAR_9=None):
from frappe.translate import get_dict
return get_dict(VAR_8, VAR_9)
def FUNC_3(VAR_10, VAR_11=None):
from frappe.translate import .get_user_lang
VAR_1.lang = get_user_lang(VAR_10)
VAR_12 = VAR_1("db")
VAR_13 = VAR_1("conf")
VAR_14 = VAR_15 = VAR_1("form_dict")
VAR_16 = VAR_1("request")
VAR_17 = VAR_1("response")
VAR_18 = VAR_1("session")
VAR_10 = VAR_1("user")
VAR_19 = VAR_1("flags")
VAR_20 = VAR_1("error_log")
VAR_21 = VAR_1("debug_log")
VAR_22 = VAR_1("message_log")
VAR_4 = VAR_1("lang")
def FUNC_4(VAR_23, VAR_24=None, VAR_25=False):
if getattr(VAR_1, "initialised", None):
return
if not VAR_24:
sites_path = '.'
VAR_1.error_log = []
VAR_1.message_log = []
VAR_1.debug_log = []
VAR_1.realtime_log = []
VAR_1.flags = CLASS_0({
"currently_saving": [],
"redirect_location": "",
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
"has_dataurl": False,
"new_site": VAR_25
})
VAR_1.rollback_observers = []
VAR_1.before_commit = []
VAR_1.test_objects = {}
VAR_1.site = VAR_23
VAR_1.sites_path = VAR_24
VAR_1.site_path = os.path.join(VAR_24, VAR_23)
VAR_1.all_apps = None
VAR_1.request_ip = None
VAR_1.response = CLASS_0({"docs":[]})
VAR_1.task_id = None
VAR_1.conf = CLASS_0(FUNC_7())
VAR_1.lang = VAR_1.conf.lang or "en"
VAR_1.lang_full_dict = None
VAR_1.module_app = None
VAR_1.app_modules = None
VAR_1.system_settings = CLASS_0()
VAR_1.user = None
VAR_1.user_perms = None
VAR_1.session = None
VAR_1.role_permissions = {}
VAR_1.valid_columns = {}
VAR_1.new_doc_templates = {}
VAR_1.link_count = {}
VAR_1.jenv = None
VAR_1.jloader =None
VAR_1.cache = {}
VAR_1.document_cache = {}
VAR_1.meta_cache = {}
VAR_1.form_dict = CLASS_0()
VAR_1.session = CLASS_0()
VAR_1.dev_server = os.environ.get('DEV_SERVER', False)
FUNC_68()
VAR_1.initialised = True
def FUNC_5(VAR_23=None, VAR_26=None, VAR_27=True):
from frappe.database import get_db
if VAR_23:
FUNC_4(VAR_23)
VAR_1.db = get_db(VAR_10=VAR_26 or VAR_1.conf.db_name)
if VAR_27:
FUNC_22("Administrator")
def FUNC_6():
from frappe.database import get_db
VAR_10 = VAR_1.conf.db_name
VAR_151 = VAR_1.conf.db_password
if VAR_1.conf.different_credentials_for_replica:
VAR_10 = VAR_1.conf.replica_db_name
VAR_151 = VAR_1.conf.replica_db_password
VAR_1.replica_db = get_db(host=VAR_1.conf.replica_host, VAR_10=user, VAR_151=password)
VAR_1.primary_db = VAR_1.db
VAR_1.db = VAR_1.replica_db
def FUNC_7(VAR_24=None, VAR_28=None):
VAR_177 = {}
VAR_24 = sites_path or getattr(VAR_1, "sites_path", None)
VAR_28 = site_path or getattr(VAR_1, "site_path", None)
if VAR_24:
VAR_207 = os.path.join(VAR_24, "common_site_config.json")
if os.path.exists(VAR_207):
try:
VAR_177.update(FUNC_70(VAR_207))
except Exception as VAR_215:
click.secho("common_site_config.json is invalid", fg="red")
print(VAR_215)
if VAR_28:
VAR_208 = os.path.join(VAR_28, "site_config.json")
if os.path.exists(VAR_208):
try:
VAR_177.update(FUNC_70(VAR_208))
except Exception as VAR_215:
click.secho("{0}/VAR_208.json is invalid".format(VAR_1.site), fg="red")
print(VAR_215)
elif VAR_1.site and not VAR_1.flags.new_site:
raise IncorrectSitePath("{0} does not exist".format(VAR_1.site))
return CLASS_0(VAR_177)
def FUNC_8(VAR_23=None):
if hasattr(VAR_1, 'conf'):
return VAR_1.conf
else:
with CLASS_1(VAR_23):
return VAR_1.conf
class CLASS_1:
def __init__(self, VAR_23=None):
self.site = VAR_23 or ''
def __enter__(self):
FUNC_4(self.site)
return VAR_1
def __exit__(self, VAR_171, VAR_105, VAR_178):
FUNC_9()
def FUNC_9():
if VAR_12:
db.close()
release_local(VAR_1)
VAR_29 = None
def FUNC_10():
global VAR_29
if not VAR_29:
from frappe.utils.redis_wrapper import RedisWrapper
VAR_29 = RedisWrapper.from_url(VAR_13.get('redis_cache')
or "redis://localhost:11311")
return VAR_29
def FUNC_11():
from frappe.utils import .get_traceback
return FUNC_11()
def FUNC_12(VAR_3):
VAR_3 = FUNC_1(VAR_3)
if not VAR_16 or (not "cmd" in VAR_1.form_dict) or VAR_13.developer_mode:
print(VAR_3)
VAR_20.append({"exc": VAR_3})
def FUNC_13(VAR_30=True):
return FUNC_10().set_value('flag_print_sql', VAR_30)
def VAR_180(VAR_3):
if not VAR_16:
if VAR_13.get("logging") or False:
print(repr(VAR_3))
VAR_21.append(FUNC_1(VAR_3))
def FUNC_15(VAR_3, VAR_31=None, VAR_32=0, VAR_33=False, VAR_34=False, VAR_35=None, VAR_36=False, VAR_37=None, VAR_38=None, VAR_39=None):
from frappe.utils import strip_html_tags
VAR_3 = FUNC_108(VAR_3)
VAR_179 = CLASS_0(VAR_51=VAR_3)
def FUNC_114():
if VAR_32:
if VAR_19.rollback_on_exception:
VAR_12.rollback()
import inspect
if inspect.isclass(VAR_32) and issubclass(VAR_32, Exception):
raise VAR_32(VAR_3)
else:
raise ValidationError(VAR_3)
if VAR_19.mute_messages:
FUNC_114()
return
if VAR_33 and VAR_171(VAR_3) in (list, tuple):
VAR_179.as_table = 1
if VAR_34 and VAR_171(VAR_3) in (list, tuple) and len(VAR_3) > 1:
VAR_179.as_list = 1
if VAR_19.print_messages and VAR_179.message:
print(f"Message: {strip_html_tags(VAR_179.message)}")
if VAR_31:
VAR_179.title = VAR_31
if not VAR_35 and VAR_32:
VAR_35 = 'red'
if VAR_35:
VAR_179.indicator = VAR_35
if VAR_38:
VAR_179.is_minimizable = VAR_38
if VAR_36:
VAR_179.alert = 1
if VAR_32:
VAR_179.raise_exception = 1
if VAR_37:
VAR_179.primary_action = VAR_37
if VAR_39:
VAR_179.wide = VAR_39
VAR_22.append(json.dumps(VAR_179))
if VAR_32 and hasattr(VAR_32, '__name__'):
VAR_1.response['exc_type'] = VAR_32.__name__
FUNC_114()
def FUNC_16():
VAR_1.message_log = []
def FUNC_17():
VAR_180 = []
for msg_out in VAR_1.message_log:
VAR_180.append(json.loads(msg_out))
return VAR_180
def FUNC_18():
if len(VAR_1.message_log) > 0:
VAR_1.message_log = VAR_1.message_log[:-1]
def VAR_96(VAR_3, VAR_40=ValidationError, VAR_31=None, VAR_38=None, VAR_39=None, VAR_34=False):
FUNC_15(VAR_3, VAR_32=VAR_40, VAR_31=title, VAR_35='red', VAR_38=is_minimizable, VAR_39=wide, VAR_34=as_list)
def FUNC_20(VAR_41, VAR_10=False, **VAR_42):
if VAR_10 == False:
VAR_10 = VAR_18.user
FUNC_93('eval_js', VAR_41, VAR_10=user, **VAR_42)
def FUNC_21(VAR_43, VAR_44=False):
from frappe.utils import touch_file
if not os.path.exists(VAR_43):
os.makedirs(VAR_43)
if VAR_44:
touch_file(os.path.join(VAR_43, "__init__.py"))
def FUNC_22(VAR_45):
VAR_1.session.user = VAR_45
VAR_1.session.sid = VAR_45
VAR_1.cache = {}
VAR_1.form_dict = CLASS_0()
VAR_1.jenv = None
VAR_1.session.data = CLASS_0()
VAR_1.role_permissions = {}
VAR_1.new_doc_templates = {}
VAR_1.user_perms = None
def FUNC_23():
from frappe.utils.user import UserPermissions
if not VAR_1.user_perms:
VAR_1.user_perms = UserPermissions(VAR_1.session.user)
return VAR_1.user_perms
def FUNC_24(VAR_45=None):
if not VAR_1.session:
return ["Guest"]
import frappe.permissions
return frappe.permissions.get_roles(VAR_45 or VAR_1.session.user)
def FUNC_25(VAR_46, VAR_47=None):
return VAR_16.headers.get(VAR_46, VAR_47)
def FUNC_26(VAR_48=[], VAR_49="", VAR_50="No Subject", VAR_51="No Message",
VAR_52=False, VAR_53=True, VAR_54=None, VAR_55=None,
VAR_56=None, VAR_57=None, VAR_58=None, VAR_59=1,
VAR_60=None, VAR_61=None, VAR_62=None, VAR_9=None, VAR_63=None, VAR_64=False,
VAR_65=[], VAR_66=[], VAR_67=None, VAR_68=None, VAR_69=None, VAR_70=None,
VAR_71=1, VAR_72=None, VAR_73=1, VAR_74=None, VAR_75=None, VAR_76=False,
VAR_77=None, VAR_78=None, VAR_79=None, VAR_80=None, VAR_81=False, VAR_82=False):
VAR_181 = None
if VAR_78:
VAR_51, VAR_181 = get_email_from_template(VAR_78, VAR_79)
VAR_51 = VAR_61 or VAR_51
if VAR_52:
from frappe.utils import md_to_html
VAR_51 = md_to_html(VAR_51)
if not VAR_53:
VAR_74 = True
from frappe.email import queue
queue.send(VAR_48=recipients, VAR_49=sender,
VAR_50=subject, VAR_51=message, VAR_181=text_content,
VAR_54 = VAR_62 or VAR_54, VAR_55 = VAR_9 or VAR_55, VAR_59=add_unsubscribe_link,
VAR_56=unsubscribe_method, VAR_57=unsubscribe_params, VAR_58=unsubscribe_message,
VAR_60=attachments, VAR_63=reply_to, VAR_65=cc, VAR_66=bcc, VAR_67=message_id, VAR_68=in_reply_to,
VAR_69=send_after, VAR_70=expose_recipients, VAR_71=send_priority, VAR_64=queue_separately,
VAR_72=communication, VAR_74=now, VAR_75=read_receipt, VAR_76=is_notification,
VAR_77=inline_images, VAR_80=header, VAR_81=print_letterhead, VAR_82=with_container)
VAR_83 = []
VAR_84 = []
VAR_85 = []
VAR_86 = {}
def FUNC_27(VAR_87=False, VAR_88=False, VAR_89=None):
if not VAR_89:
methods = ['GET', 'POST', 'PUT', 'DELETE']
def FUNC_115(VAR_129):
global VAR_83, VAR_84, VAR_85, VAR_86
VAR_83.append(VAR_129)
VAR_86[VAR_129] = VAR_89
if VAR_87:
VAR_84.append(VAR_129)
if VAR_88:
VAR_85.append(VAR_129)
return VAR_129
return FUNC_115
def FUNC_28():
def FUNC_116(VAR_129):
def FUNC_121(*VAR_79, **VAR_42):
if VAR_13.read_from_replica:
FUNC_6()
try:
VAR_225 = VAR_129(*VAR_79, **FUNC_74(VAR_129, VAR_42))
except:
raise
finally:
if VAR_1 and hasattr(VAR_1, 'primary_db'):
VAR_1.db.close()
VAR_1.db = VAR_1.primary_db
return VAR_225
return FUNC_121
return FUNC_116
def FUNC_29(VAR_90, VAR_51=False):
if VAR_1.flags.in_test:
return
if not isinstance(VAR_90, (tuple, list)):
VAR_90 = (roles,)
VAR_90 = set(VAR_90)
VAR_182 = set(FUNC_24())
if not VAR_90.intersection(VAR_182):
if VAR_51:
FUNC_15(FUNC_0('This action is only allowed for {}').format(FUNC_101(', '.join(VAR_90))), FUNC_0('Not Permitted'))
raise PermissionError
def FUNC_30(VAR_91):
try:
VAR_209 = FUNC_66('domains')
if VAR_91 in VAR_209:
return CLASS_0(FUNC_72(FUNC_66('domains')[VAR_91][0] + '.data'))
else:
return CLASS_0()
except ImportError:
if VAR_1.flags.in_test:
return CLASS_0()
else:
raise
def FUNC_31(VAR_10=None, VAR_62=None):
import frappe.cache_manager
if VAR_62:
frappe.cache_manager.clear_doctype_cache(VAR_62)
FUNC_38()
elif VAR_10:
frappe.cache_manager.clear_user_cache(VAR_10)
else: # everything
from frappe import translate
frappe.cache_manager.clear_user_cache()
frappe.cache_manager.clear_domain_cache()
translate.clear_cache()
FUNC_38()
VAR_1.cache = {}
VAR_1.new_doc_templates = {}
for VAR_129 in FUNC_66("clear_cache"):
FUNC_72(VAR_129)()
VAR_1.role_permissions = {}
def FUNC_32(VAR_62, VAR_10=None, VAR_92=False):
if VAR_92:
return False
if not VAR_10:
VAR_10 = VAR_1.session.user
import frappe.permissions
VAR_183 = frappe.permissions.get_role_permissions(VAR_62, VAR_10=user)
if VAR_183.get('select') and not VAR_183.get('read'):
return True
else:
return False
def FUNC_33(VAR_62=None, VAR_93="read", VAR_94=None, VAR_10=None, VAR_95=False, VAR_96=False):
if not VAR_62 and VAR_94:
VAR_62 = VAR_94.doctype
import frappe.permissions
VAR_179 = frappe.permissions.has_permission(VAR_62, VAR_93, VAR_94=doc, VAR_95=verbose, VAR_10=user, VAR_32=VAR_96)
if VAR_96 and not VAR_179:
if VAR_94:
frappe.throw(FUNC_0("No permission for {0}").format(VAR_94.doctype + " " + VAR_94.name))
else:
frappe.throw(FUNC_0("No permission for {0}").format(VAR_62))
return VAR_179
def FUNC_34(VAR_94=None, VAR_93='read', VAR_10=None, VAR_95=False, VAR_62=None):
if not VAR_10:
VAR_10 = VAR_18.user
if VAR_94:
if isinstance(VAR_94, string_types):
VAR_94 = FUNC_45(VAR_62, VAR_94)
VAR_62 = VAR_94.doctype
if VAR_94.flags.ignore_permissions:
return True
if hasattr(VAR_94, 'has_website_permission'):
return VAR_94.has_website_permission(VAR_93, VAR_10, VAR_95=verbose)
VAR_184 = (FUNC_66("has_website_permission") or {}).get(VAR_62, [])
if VAR_184:
for method in VAR_184:
VAR_218 = FUNC_73(method, VAR_94=doc, VAR_93=ptype, VAR_10=user, VAR_95=verbose)
if not VAR_218:
return False
return True
else:
return False
def FUNC_35(VAR_62):
def FUNC_117():
return VAR_12.sql_list("select VAR_9 from tabDocType where istable=1")
VAR_185 = FUNC_10().get_value("is_table", FUNC_117)
return VAR_62 in VAR_185
def FUNC_36(VAR_62, VAR_97, VAR_98=None, VAR_94=None):
from frappe.model.meta import get_field_precision
return get_field_precision(FUNC_48(VAR_62).get_field(VAR_97), VAR_94, VAR_98)
def FUNC_37(VAR_99=None, VAR_100=None):
import hashlib, time
from .utils import random_string
VAR_186 = hashlib.sha224(((VAR_99 or "") + repr(time.time()) + repr(random_string(8))).encode()).hexdigest()
if VAR_100:
VAR_186 = digest[:VAR_100]
return VAR_186
def FUNC_38():
VAR_187 = FUNC_37()
FUNC_10().set_value("metadata_version", VAR_187)
return VAR_187
def FUNC_39(VAR_62, VAR_101=None, VAR_102=None, VAR_103=False):
from frappe.model.create_new import get_new_doc
return get_new_doc(VAR_62, VAR_101, VAR_102, VAR_103=as_dict)
def FUNC_40(VAR_62, VAR_104, VAR_97, VAR_105=None):
import frappe.client
return frappe.client.set_value(VAR_62, VAR_104, VAR_97, VAR_105)
def FUNC_41(*VAR_79, **VAR_42):
if VAR_79 and len(VAR_79) > 1 and isinstance(VAR_79[1], text_type):
VAR_46 = FUNC_42(VAR_79[0], VAR_79[1])
VAR_94 = VAR_1.document_cache.get(VAR_46)
if VAR_94:
return VAR_94
VAR_94 = FUNC_10().hget('document_cache', VAR_46)
if VAR_94:
doc = FUNC_45(VAR_94)
VAR_1.document_cache[VAR_46] = VAR_94
return VAR_94
VAR_94 = FUNC_45(*VAR_79, **VAR_42)
return VAR_94
def FUNC_42(VAR_62, VAR_9):
return '{0}::{1}'.format(VAR_62, VAR_9)
def FUNC_43(VAR_62, VAR_9):
FUNC_10().hdel("last_modified", VAR_62)
VAR_46 = FUNC_42(VAR_62, VAR_9)
if VAR_46 in VAR_1.document_cache:
del VAR_1.document_cache[VAR_46]
FUNC_10().hdel('document_cache', VAR_46)
def FUNC_44(VAR_62, VAR_9, VAR_97, VAR_103=False):
VAR_94 = FUNC_41(VAR_62, VAR_9)
if isinstance(VAR_97, string_types):
if VAR_103:
VAR_96('Cannot make dict for single fieldname')
return VAR_94.get(VAR_97)
VAR_188 = [VAR_94.get(f) for f in VAR_97]
if VAR_103:
return CLASS_0(zip(VAR_97, VAR_188))
return VAR_188
def FUNC_45(*VAR_79, **VAR_42):
import frappe.model.document
VAR_94 = frappe.model.document.get_doc(*VAR_79, **VAR_42)
if VAR_79 and len(VAR_79) > 1:
VAR_46 = FUNC_42(VAR_79[0], VAR_79[1])
VAR_1.document_cache[VAR_46] = VAR_94
FUNC_10().hset('document_cache', VAR_46, VAR_94.as_dict())
return VAR_94
def FUNC_46(VAR_62, VAR_106=None, VAR_107="creation desc"):
VAR_174 = FUNC_83(
VAR_62,
VAR_106=filters,
limit_page_length=1,
VAR_107=order_by,
pluck="name"
)
if VAR_174:
return FUNC_45(VAR_62, VAR_174[0])
else:
raise DoesNotExistError
def FUNC_47(VAR_62):
return FUNC_45(VAR_62, doctype)
def FUNC_48(VAR_62, VAR_108=True):
import frappe.model.meta
return frappe.model.meta.get_meta(VAR_62, VAR_108=cached)
def FUNC_49(VAR_62):
import frappe.modules
return frappe.modules.load_doctype_module(VAR_62)
def FUNC_50(VAR_62=None, VAR_9=None, VAR_109=0, VAR_110=None, VAR_111=False,
VAR_92=False, VAR_19=None, VAR_112=False, VAR_113=True, VAR_114=False):
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(VAR_62, VAR_9, VAR_109, VAR_110, VAR_111,
VAR_92, VAR_19, VAR_112, VAR_113, VAR_114)
def FUNC_51(VAR_62, VAR_9, VAR_109=0):
if VAR_12.exists(VAR_62, VAR_9):
FUNC_50(VAR_62, VAR_9, VAR_109=force)
def FUNC_52(VAR_62, VAR_109=False, VAR_115=False):
FUNC_53(FUNC_56(VAR_12.get_value("DocType", VAR_62, "module")), "doctype", FUNC_56(VAR_62),
VAR_109=force, VAR_115=reset_permissions)
def FUNC_53(VAR_91, VAR_116=None, VAR_117=None, VAR_109=False, VAR_115=False):
import frappe.modules
return frappe.modules.reload_doc(VAR_91, VAR_116, VAR_117, VAR_109=force, VAR_115=reset_permissions)
@FUNC_27()
def FUNC_54(*VAR_79, **VAR_42):
VAR_42.pop('ignore_permissions', None)
VAR_42.pop('cmd', None)
from frappe.model.rename_doc import .rename_doc
return FUNC_54(*VAR_79, **VAR_42)
def FUNC_55(VAR_118):
return importlib.import_module(VAR_118)
def FUNC_56(VAR_99):
return VAR_99.replace(' ', '_').replace('-', '_').lower()
def FUNC_57(VAR_99):
return VAR_99.replace('_', ' ').replace('-', ' ').title()
def FUNC_58(VAR_91, *VAR_119):
VAR_91 = FUNC_56(VAR_91)
return FUNC_61(VAR_1.module_app[VAR_91] + "." + VAR_91, *VAR_119)
def FUNC_59(VAR_120, *VAR_119):
return FUNC_61(VAR_120, *VAR_119)
def FUNC_60(*VAR_119):
return os.path.join(VAR_1.site_path, *VAR_119)
def FUNC_61(VAR_118, *VAR_119):
if not "public" in VAR_119:
joins = [FUNC_56(part) for part in VAR_119]
return os.path.join(os.path.dirname(FUNC_55(FUNC_56(VAR_118)).__file__), *VAR_119)
def FUNC_62(VAR_120):
return FUNC_69(os.path.join(os.path.dirname(FUNC_55(VAR_120).__file__), "modules.txt"))
def FUNC_63(VAR_121=True, VAR_24=None):
if not VAR_24:
sites_path = VAR_1.sites_path
VAR_189 = FUNC_69(os.path.join(VAR_24, "apps.txt"), VAR_126=True)
if VAR_121:
for VAR_219 in FUNC_69(os.path.join(VAR_1.site_path, "apps.txt")):
if VAR_219 not in VAR_189:
apps.append(VAR_219)
if "frappe" in VAR_189:
apps.remove("frappe")
VAR_189.insert(0, 'frappe')
return VAR_189
def FUNC_64(VAR_122=False, VAR_123=False):
if getattr(VAR_19, "in_install_db", True):
return []
if not VAR_12:
FUNC_5()
if not VAR_1.all_apps:
VAR_1.all_apps = FUNC_10().get_value('all_apps', FUNC_63)
VAR_190 = json.loads(VAR_12.get_global("installed_apps") or "[]")
if VAR_122:
VAR_190 = [VAR_219 for VAR_219 in VAR_1.all_apps if VAR_219 in VAR_190]
if VAR_123:
if 'frappe' in VAR_190:
installed.remove('frappe')
VAR_190.append('frappe')
return VAR_190
def FUNC_65():
if not hasattr(VAR_1, 'doc_events_hooks'):
VAR_184 = FUNC_66('doc_events', {})
VAR_179 = {}
for VAR_46, VAR_105 in iteritems(VAR_184):
if isinstance(VAR_46, tuple):
for VAR_62 in VAR_46:
FUNC_67(VAR_179, VAR_62, VAR_105)
else:
FUNC_67(VAR_179, VAR_46, VAR_105)
VAR_1.doc_events_hooks = VAR_179
return VAR_1.doc_events_hooks
def FUNC_66(VAR_124=None, VAR_47=None, VAR_120=None):
def FUNC_118(VAR_120=None):
VAR_184 = {}
for VAR_219 in [VAR_120] if VAR_120 else FUNC_64(VAR_122=True):
VAR_219 = "frappe" if VAR_219=="webnotes" else VAR_219
try:
VAR_226 = FUNC_55(VAR_219 + ".hooks")
except ImportError:
if VAR_1.flags.in_install_app:
pass
print('Could not find VAR_219 "{0}"'.format(VAR_120))
if not VAR_16:
sys.exit(1)
raise
for VAR_46 in dir(VAR_226):
if not VAR_46.startswith("_"):
FUNC_67(VAR_184, VAR_46, getattr(VAR_226, VAR_46))
return VAR_184
VAR_191 = VAR_13.developer_mode or False
if VAR_120:
VAR_184 = CLASS_0(FUNC_118(VAR_120))
else:
if VAR_191:
VAR_184 = CLASS_0(FUNC_118())
else:
VAR_184 = CLASS_0(FUNC_10().get_value("app_hooks", FUNC_118))
if VAR_124:
return VAR_184.get(VAR_124) or (VAR_47 if VAR_47 is not None else [])
else:
return VAR_184
def FUNC_67(VAR_125, VAR_46, VAR_105):
if isinstance(VAR_105, dict):
VAR_125.setdefault(VAR_46, {})
for inkey in VAR_105:
FUNC_67(VAR_125[VAR_46], inkey, VAR_105[inkey])
else:
VAR_125.setdefault(VAR_46, [])
if not isinstance(VAR_105, list):
VAR_105 = [value]
VAR_125[VAR_46].extend(VAR_105)
def FUNC_68():
VAR_192 = FUNC_10()
if VAR_13.db_name:
VAR_1.app_modules = VAR_192.get_value("app_modules")
VAR_1.module_app = VAR_192.get_value("module_app")
if not (VAR_1.app_modules and VAR_1.module_app):
VAR_1.module_app, VAR_1.app_modules = {}, {}
for VAR_219 in FUNC_63(True):
if VAR_219 == "webnotes":
VAR_219 = "frappe"
VAR_1.app_modules.setdefault(VAR_219, [])
for VAR_91 in FUNC_62(VAR_219):
VAR_91 = FUNC_56(VAR_91)
VAR_1.module_app[VAR_91] = VAR_219
VAR_1.app_modules[VAR_219].append(VAR_91)
if VAR_13.db_name:
VAR_192.set_value("app_modules", VAR_1.app_modules)
VAR_192.set_value("module_app", VAR_1.module_app)
def FUNC_69(VAR_43, VAR_126=False, VAR_127=True):
import frappe.utils
VAR_61 = FUNC_71(VAR_43, VAR_126=raise_not_found)
if VAR_61:
VAR_61 = frappe.utils.strip(VAR_61)
return [
p.strip() for p in VAR_61.splitlines()
if (not VAR_127) or (p.strip() and not p.startswith("#"))
]
else:
return []
def FUNC_70(VAR_43):
with open(VAR_43, 'r') as f:
return json.load(f)
def FUNC_71(VAR_43, VAR_126=False):
if isinstance(VAR_43, text_type):
VAR_43 = path.encode("utf-8")
if os.path.exists(VAR_43):
with open(VAR_43, "r") as f:
return FUNC_1(f.read())
elif VAR_126:
raise IOError("{} Not Found".format(VAR_43))
else:
return None
def FUNC_72(VAR_128):
VAR_120 = VAR_128.split(".")[0]
if not VAR_1.flags.in_install and VAR_120 not in FUNC_64():
VAR_96(FUNC_0("App {0} is not installed").format(VAR_120), AppNotInstalledError)
VAR_118 = '.'.join(VAR_128.split('.')[:-1])
VAR_193 = VAR_128.split('.')[-1]
return getattr(FUNC_55(VAR_118), VAR_193)
def FUNC_73(VAR_129, *VAR_79, **VAR_42):
if isinstance(VAR_129, string_types):
VAR_129 = FUNC_72(VAR_129)
VAR_194 = FUNC_74(VAR_129, VAR_42)
return VAR_129(*VAR_79, **VAR_194)
def FUNC_74(VAR_129, VAR_42):
if hasattr(VAR_129, 'fnargs'):
VAR_210 = VAR_129.fnargs
else:
try:
VAR_210, VAR_220, VAR_221, VAR_222 = inspect.getargspec(VAR_129)
except ValueError:
VAR_210 = inspect.getfullargspec(VAR_129).args
VAR_220 = inspect.getfullargspec(VAR_129).varargs
VAR_221 = inspect.getfullargspec(VAR_129).varkw
VAR_222 = inspect.getfullargspec(VAR_129).defaults
VAR_194 = {}
for VAR_223 in VAR_42:
if (VAR_223 in VAR_210) or VAR_221:
VAR_194[VAR_223] = VAR_42.get(VAR_223)
VAR_194.pop("ignore_permissions", None)
VAR_194.pop("flags", None)
return VAR_194
def FUNC_75(VAR_79, VAR_130=False, VAR_131=True):
VAR_79 = CLASS_0(VAR_79)
if not VAR_79.doctype_or_field:
VAR_79.doctype_or_field = 'DocField'
if not VAR_79.property_type:
VAR_79.property_type = VAR_12.get_value('DocField',
{'parent': 'DocField', 'fieldname': VAR_79.property}, 'fieldtype') or 'Data'
if not VAR_79.doctype:
VAR_211 = VAR_12.sql_list('select distinct parent from tabDocField where VAR_97=%s', VAR_79.fieldname)
else:
VAR_211 = [VAR_79.doctype]
for VAR_62 in VAR_211:
if not VAR_79.property_type:
VAR_79.property_type = VAR_12.get_value('DocField',
{'parent': VAR_62, 'fieldname': VAR_79.fieldname}, 'fieldtype') or 'Data'
VAR_212 = FUNC_45({
'doctype': "Property Setter",
'doctype_or_field': VAR_79.doctype_or_field,
'doc_type': VAR_62,
'field_name': VAR_79.fieldname,
'row_name': VAR_79.row_name,
'property': VAR_79.property,
'value': VAR_79.value,
'property_type': VAR_79.property_type or "Data",
'__islocal': 1
})
VAR_212.flags.ignore_validate = VAR_130
VAR_212.flags.validate_fields_for_doctype = VAR_131
VAR_212.validate_fieldtype_change()
VAR_212.insert()
def FUNC_76(VAR_43):
from frappe.core.doctype.data_import.data_import import .import_doc
FUNC_76(VAR_43)
def FUNC_77(VAR_94, VAR_132=True):
import .copy
def FUNC_119(VAR_174):
for df in VAR_174.meta.get("fields", {"no_copy": 1}):
if hasattr(VAR_174, df.fieldname):
VAR_174.set(df.fieldname, None)
VAR_195 = ['name', 'owner', 'creation', 'modified', 'modified_by']
if not VAR_1.flags.in_test:
VAR_195.append("docstatus")
if not isinstance(VAR_94, dict):
VAR_174 = VAR_94.as_dict()
else:
VAR_174 = VAR_94
VAR_196 = FUNC_45(FUNC_113.deepcopy(VAR_174))
VAR_196.set("__islocal", 1)
for VAR_97 in (VAR_195 + ['amended_from', 'amendment_date']):
VAR_196.set(VAR_97, None)
if not VAR_132:
FUNC_119(VAR_196)
for i, VAR_174 in enumerate(VAR_196.get_all_children()):
VAR_174.set("__islocal", 1)
for VAR_97 in VAR_195:
VAR_174.set(VAR_97, None)
if not VAR_132:
FUNC_119(VAR_174)
return VAR_196
def FUNC_78(VAR_133, VAR_134, VAR_135):
import frappe.utils
return frappe.utils.compare(VAR_133, VAR_134, VAR_135)
def FUNC_79(VAR_31, VAR_136, VAR_137=None, VAR_138=None, VAR_5=None,
VAR_139=None, VAR_37='/', VAR_140 = None, VAR_141=False,
VAR_142=None, VAR_78='message'):
VAR_1.message_title = VAR_31
VAR_1.message = VAR_136
VAR_1.response['type'] = 'page'
VAR_1.response['route'] = VAR_78
VAR_1.no_cache = 1
if VAR_138:
VAR_1.response['http_status_code'] = VAR_138
if not VAR_5:
context = {}
if not VAR_139:
if VAR_137:
VAR_139 = 'green'
elif VAR_138 and VAR_138 > 300:
VAR_139 = 'red'
else:
VAR_139 = 'blue'
VAR_5['indicator_color'] = VAR_139
VAR_5['primary_label'] = VAR_140
VAR_5['primary_action'] = VAR_37
VAR_5['error_code'] = VAR_138
VAR_5['fullpage'] = VAR_141
if VAR_142:
VAR_5['card_width'] = VAR_142
VAR_1.response['context'] = VAR_5
def FUNC_80(VAR_31, VAR_136, VAR_138=None, VAR_5=None, VAR_139=None):
VAR_67 = FUNC_37(VAR_100=8)
VAR_51 = {
'context': VAR_5 or {},
'http_status_code': VAR_138 or 200
}
VAR_51['context'].update({
'header': VAR_31,
'title': VAR_31,
'message': VAR_136
})
if VAR_139:
VAR_51['context'].update({
"indicator_color": VAR_139
})
FUNC_10().set_value("message_id:{0}".format(VAR_67), VAR_51, expires_in_sec=60)
VAR_197 = '/VAR_51?id={0}'.format(VAR_67)
if not getattr(VAR_1, 'is_ajax', False):
VAR_1.response["type"] = "redirect"
VAR_1.response["location"] = VAR_197
else:
return VAR_197
def FUNC_81(VAR_62, VAR_143=True):
import frappe.desk.reportview
return frappe.desk.reportview.build_match_conditions(VAR_62, VAR_143=as_condition)
def FUNC_82(VAR_62, *VAR_79, **VAR_42):
import frappe.model.db_query
return frappe.model.db_query.DatabaseQuery(VAR_62).execute(None, *VAR_79, **VAR_42)
def FUNC_83(VAR_62, *VAR_79, **VAR_42):
VAR_42["ignore_permissions"] = True
if not "limit_page_length" in VAR_42:
VAR_42["limit_page_length"] = 0
return FUNC_82(VAR_62, *VAR_79, **VAR_42)
def FUNC_84(*VAR_79, **VAR_42):
return VAR_12.get_value(*VAR_79, **VAR_42)
def FUNC_85(VAR_144, VAR_145=1):
from frappe.utils.response import .json_handler
return json.dumps(VAR_144, VAR_145=indent, sort_keys=True, VAR_47=json_handler, separators=(',', ': '))
def FUNC_86():
from frappe.utils import cint
return VAR_19.mute_emails or cint(VAR_13.get("mute_emails") or 0) or False
def FUNC_87(VAR_62):
from frappe.modules import .get_doctype_module, FUNC_58
VAR_43 = os.path.join(FUNC_58(get_doctype_module(VAR_62)), "doctype", FUNC_56(VAR_62), "test_records.json")
if os.path.exists(VAR_43):
with open(VAR_43, "r") as f:
return json.loads(f.read())
else:
return []
def FUNC_88(*VAR_79, **VAR_42):
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*VAR_79, **VAR_42)
def FUNC_89(*VAR_79, **VAR_42):
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*VAR_79, **VAR_42)
def FUNC_90(VAR_62=None, VAR_9=None, VAR_146=None, VAR_147=None,
VAR_136=None, VAR_148=False, VAR_94=None, VAR_149=None, VAR_150=0, VAR_151=None):
from frappe.website.render import build_page
from frappe.utils.pdf import get_pdf
VAR_1.form_dict.doctype = VAR_62
VAR_1.form_dict.name = VAR_9
VAR_1.form_dict.format = VAR_146
VAR_1.form_dict.style = VAR_147
VAR_1.form_dict.doc = VAR_94
VAR_1.form_dict.no_letterhead = VAR_150
VAR_198 = None
if VAR_151:
VAR_198 = {'password': VAR_151}
if not VAR_136:
html = build_page("printview")
if VAR_148:
return get_pdf(VAR_136, VAR_149 = output, VAR_198 = options)
else:
return VAR_136
def FUNC_91(VAR_62, VAR_9, VAR_152=None, VAR_146=None,
VAR_147=None, VAR_136=None, VAR_94=None, VAR_4=None, VAR_81=True, VAR_151=None):
from frappe.utils import .scrub_urls
if not VAR_152: file_name = VAR_9
VAR_152 = file_name.replace(' ','').replace('/','-')
VAR_199 = VAR_12.get_singles_dict("Print Settings")
VAR_200 = VAR_1.lang
if VAR_4: VAR_1.lang = VAR_4
VAR_1.flags.ignore_print_permissions = True
VAR_150 = not VAR_81
VAR_42 = dict(
VAR_146=print_format,
VAR_147=style,
VAR_136=html,
VAR_94=doc,
VAR_150=no_letterhead,
VAR_151=password
)
VAR_61 = ''
if int(VAR_199.send_print_as_pdf or 0):
VAR_213 = ".pdf"
VAR_42["as_pdf"] = True
VAR_61 = FUNC_90(VAR_62, VAR_9, **VAR_42)
else:
VAR_213 = ".html"
VAR_61 = scrub_urls(FUNC_90(VAR_62, VAR_9, **VAR_42)).encode('utf-8')
VAR_179 = {
"fname": VAR_152 + VAR_213,
"fcontent": VAR_61
}
VAR_1.flags.ignore_print_permissions = False
VAR_1.lang = VAR_200
return VAR_179
def FUNC_92(*VAR_79, **VAR_42):
import frappe.realtime
return frappe.realtime.publish_progress(*VAR_79, **VAR_42)
def FUNC_93(*VAR_79, **VAR_42):
import frappe.realtime
return frappe.realtime.publish_realtime(*VAR_79, **VAR_42)
def FUNC_94(VAR_153, VAR_46, VAR_154, VAR_155=False):
if VAR_153 not in VAR_1.cache:
VAR_1.cache[VAR_153] = {}
if VAR_46 not in VAR_1.cache[VAR_153]:
VAR_1.cache[VAR_153][VAR_46] = VAR_154()
elif VAR_1.cache[VAR_153][VAR_46]==None and VAR_155:
VAR_1.cache[VAR_153][VAR_46] = VAR_154()
return VAR_1.cache[VAR_153][VAR_46]
def FUNC_95(*VAR_79, **VAR_42):
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue(*VAR_79, **VAR_42)
def FUNC_96(*VAR_79, **VAR_42):
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue_doc(*VAR_79, **VAR_42)
def FUNC_97(VAR_62):
def FUNC_120():
VAR_214 = VAR_1.db.get_value("DocType", VAR_62, "module")
return VAR_1.module_app[FUNC_56(VAR_214)]
return FUNC_94("doctype_app", VAR_62, VAR_154=FUNC_120)
VAR_156 = {}
VAR_157 = None
def FUNC_98(VAR_91=None, VAR_158=False, VAR_159=True, VAR_160=None, VAR_161=100_000, VAR_162=20):
from frappe.utils.logger import get_logger
return get_logger(VAR_91=module, VAR_158=with_more_info, VAR_159=allow_site, VAR_160=filter, VAR_161=max_size, VAR_162=file_count)
def FUNC_99(VAR_51=None, VAR_31=FUNC_0("Error")):
if VAR_51:
if '\n' in VAR_31:
VAR_215, VAR_31 = title, VAR_51
else:
VAR_215 = VAR_51
else:
VAR_215 = FUNC_11()
return FUNC_45(dict(VAR_62='Error Log', VAR_215=FUNC_1(VAR_215),
method=VAR_31)).insert(VAR_92=True)
def FUNC_100(VAR_62, VAR_9):
VAR_136 = '<VAR_223 href="/VAR_219/Form/{VAR_62}/{VAR_9}" VAR_147="font-weight: FUNC_101;">{doctype_local} {VAR_9}</VAR_223>'
return VAR_136.format(
VAR_62=doctype,
VAR_9=name,
doctype_local=FUNC_0(VAR_62)
)
def FUNC_101(VAR_6):
return '<b>{0}</b>'.format(VAR_6)
def FUNC_102(VAR_163, VAR_164=None, VAR_165=None):
VAR_201 = {
"int": int,
"float": float,
"long": int,
"round": round
}
if '__' in VAR_163:
VAR_96('Illegal rule {0}. Cannot use "__"'.format(FUNC_101(VAR_163)))
if not VAR_164:
eval_globals = {}
VAR_164['__builtins__'] = {}
VAR_164.update(VAR_201)
return eval(VAR_163, VAR_164, VAR_165)
def FUNC_103(VAR_46):
if VAR_46 not in VAR_1.system_settings:
VAR_1.system_settings.update({VAR_46: VAR_12.get_single_value('System Settings', VAR_46)})
return VAR_1.system_settings.get(VAR_46)
def FUNC_104():
from frappe.core.doctype.domain_settings.domain_settings import .get_active_domains
return FUNC_104()
def FUNC_105(VAR_62, VAR_9, VAR_166=None, VAR_167=False, VAR_168=True):
VAR_202 = FUNC_48(VAR_62)
if VAR_202.track_changes:
VAR_216 = VAR_12.get_all('Version', VAR_106={
'ref_doctype': VAR_62,
'docname': VAR_9,
'order_by': 'creation' if VAR_167 else None,
'limit': VAR_166
}, VAR_34=1)
from frappe.chat.util import squashify, dictify, safe_json_loads
VAR_217 = []
for VAR_9 in VAR_216:
VAR_9 = squashify(VAR_9)
VAR_94 = FUNC_45('Version', VAR_9)
VAR_224 = VAR_94.data
VAR_224 = safe_json_loads(VAR_224)
VAR_224 = dictify(dict(
version=VAR_224,
VAR_10=VAR_94.owner,
creation=VAR_94.creation
))
VAR_217.append(VAR_224)
return VAR_217
else:
if VAR_168:
raise ValueError(FUNC_0('{0} has no VAR_217 tracked.').format(VAR_62))
@FUNC_27(VAR_87=True)
def FUNC_106():
return "pong"
def FUNC_107(VAR_169, VAR_7='utf-8'):
try:
VAR_169 = param.encode(VAR_7)
except Exception:
pass
return VAR_169
def FUNC_108(VAR_169, VAR_7='utf-8'):
try:
VAR_169 = param.decode(VAR_7)
except Exception:
pass
return VAR_169
def FUNC_109(VAR_170):
from frappe.utils import .parse_json
return FUNC_109(VAR_170)
def FUNC_110(VAR_171, VAR_172=1, VAR_173='en'):
VAR_203 = []
VAR_204 = VAR_0.Faker(VAR_173)
if VAR_171 not in dir(VAR_204):
raise ValueError('Not VAR_223 valid FUNC_110 VAR_171.')
else:
for i in range(VAR_172):
VAR_224 = getattr(VAR_204, VAR_171)()
VAR_203.append(VAR_224)
from frappe.chat.util import squashify
return squashify(VAR_203)
def FUNC_111(VAR_129):
from frappe.desk.search import .validate_and_sanitize_search_inputs as func
return func(VAR_129)
|
from __future__ import unicode_literals, print_function
from six import iteritems, binary_type, text_type, string_types, PY2
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
import click
from .exceptions import *
from .utils.jinja import (get_jenv, get_template, render_template, get_email_from_template, get_jloader)
from .utils.lazy_loader import lazy_import
VAR_0 = lazy_import('faker')
if PY2:
reload(sys)
sys.setdefaultencoding("utf-8")
__version__ = '13.0.0-dev'
__title__ = "Frappe Framework"
VAR_1 = Local()
VAR_2 = {}
class CLASS_0(dict):
def __getattr__(self, VAR_46):
VAR_207 = self.get(VAR_46)
if not VAR_207 and VAR_46.startswith("__"):
raise AttributeError()
return VAR_207
def __setattr__(self, VAR_46, VAR_106):
self[VAR_46] = VAR_106
def __getstate__(self):
return self
def __setstate__(self, VAR_175):
self.update(VAR_175)
def FUNC_113(self, VAR_175):
super(CLASS_0, self).update(VAR_175)
return self
def FUNC_114(self):
return CLASS_0(dict(self).copy())
def FUNC_0(VAR_3, VAR_4=None, VAR_5=None):
from frappe.translate import get_full_dict
from frappe.utils import strip_html_tags, is_html
if not hasattr(VAR_1, 'lang'):
VAR_1.lang = VAR_4 or 'en'
if not VAR_4:
lang = VAR_1.lang
VAR_176 = VAR_3
if is_html(VAR_3):
msg = strip_html_tags(VAR_3)
VAR_3 = FUNC_1(VAR_3).strip()
VAR_177 = ''
if VAR_5:
VAR_208 = '{VAR_3}:{VAR_5}'.format(VAR_3=msg, VAR_5=context)
VAR_177 = get_full_dict(VAR_4).get(VAR_208)
if not VAR_177:
translated_string = get_full_dict(VAR_4).get(VAR_3)
return VAR_177 or VAR_176
def FUNC_1(VAR_6, VAR_7='utf-8'):
if isinstance(VAR_6, text_type):
return VAR_6
elif VAR_6==None:
return ''
elif isinstance(VAR_6, binary_type):
return text_type(VAR_6, VAR_7)
else:
return text_type(VAR_6)
def FUNC_2(VAR_8, VAR_9=None):
from frappe.translate import get_dict
return get_dict(VAR_8, VAR_9)
def FUNC_3(VAR_10, VAR_11=None):
from frappe.translate import .get_user_lang
VAR_1.lang = get_user_lang(VAR_10)
VAR_12 = VAR_1("db")
VAR_13 = VAR_1("conf")
VAR_14 = VAR_15 = VAR_1("form_dict")
VAR_16 = VAR_1("request")
VAR_17 = VAR_1("response")
VAR_18 = VAR_1("session")
VAR_10 = VAR_1("user")
VAR_19 = VAR_1("flags")
VAR_20 = VAR_1("error_log")
VAR_21 = VAR_1("debug_log")
VAR_22 = VAR_1("message_log")
VAR_4 = VAR_1("lang")
def FUNC_4(VAR_23, VAR_24=None, VAR_25=False):
if getattr(VAR_1, "initialised", None):
return
if not VAR_24:
sites_path = '.'
VAR_1.error_log = []
VAR_1.message_log = []
VAR_1.debug_log = []
VAR_1.realtime_log = []
VAR_1.flags = CLASS_0({
"currently_saving": [],
"redirect_location": "",
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
"has_dataurl": False,
"new_site": VAR_25
})
VAR_1.rollback_observers = []
VAR_1.before_commit = []
VAR_1.test_objects = {}
VAR_1.site = VAR_23
VAR_1.sites_path = VAR_24
VAR_1.site_path = os.path.join(VAR_24, VAR_23)
VAR_1.all_apps = None
VAR_1.request_ip = None
VAR_1.response = CLASS_0({"docs":[]})
VAR_1.task_id = None
VAR_1.conf = CLASS_0(FUNC_7())
VAR_1.lang = VAR_1.conf.lang or "en"
VAR_1.lang_full_dict = None
VAR_1.module_app = None
VAR_1.app_modules = None
VAR_1.system_settings = CLASS_0()
VAR_1.user = None
VAR_1.user_perms = None
VAR_1.session = None
VAR_1.role_permissions = {}
VAR_1.valid_columns = {}
VAR_1.new_doc_templates = {}
VAR_1.link_count = {}
VAR_1.jenv = None
VAR_1.jloader =None
VAR_1.cache = {}
VAR_1.document_cache = {}
VAR_1.meta_cache = {}
VAR_1.form_dict = CLASS_0()
VAR_1.session = CLASS_0()
VAR_1.dev_server = os.environ.get('DEV_SERVER', False)
FUNC_69()
VAR_1.initialised = True
def FUNC_5(VAR_23=None, VAR_26=None, VAR_27=True):
from frappe.database import get_db
if VAR_23:
FUNC_4(VAR_23)
VAR_1.db = get_db(VAR_10=VAR_26 or VAR_1.conf.db_name)
if VAR_27:
FUNC_22("Administrator")
def FUNC_6():
from frappe.database import get_db
VAR_10 = VAR_1.conf.db_name
VAR_152 = VAR_1.conf.db_password
if VAR_1.conf.different_credentials_for_replica:
VAR_10 = VAR_1.conf.replica_db_name
VAR_152 = VAR_1.conf.replica_db_password
VAR_1.replica_db = get_db(host=VAR_1.conf.replica_host, VAR_10=user, VAR_152=password)
VAR_1.primary_db = VAR_1.db
VAR_1.db = VAR_1.replica_db
def FUNC_7(VAR_24=None, VAR_28=None):
VAR_178 = {}
VAR_24 = sites_path or getattr(VAR_1, "sites_path", None)
VAR_28 = site_path or getattr(VAR_1, "site_path", None)
if VAR_24:
VAR_209 = os.path.join(VAR_24, "common_site_config.json")
if os.path.exists(VAR_209):
try:
VAR_178.update(FUNC_71(VAR_209))
except Exception as VAR_217:
click.secho("common_site_config.json is invalid", fg="red")
print(VAR_217)
if VAR_28:
VAR_210 = os.path.join(VAR_28, "site_config.json")
if os.path.exists(VAR_210):
try:
VAR_178.update(FUNC_71(VAR_210))
except Exception as VAR_217:
click.secho("{0}/VAR_210.json is invalid".format(VAR_1.site), fg="red")
print(VAR_217)
elif VAR_1.site and not VAR_1.flags.new_site:
raise IncorrectSitePath("{0} does not exist".format(VAR_1.site))
return CLASS_0(VAR_178)
def FUNC_8(VAR_23=None):
if hasattr(VAR_1, 'conf'):
return VAR_1.conf
else:
with CLASS_1(VAR_23):
return VAR_1.conf
class CLASS_1:
def __init__(self, VAR_23=None):
self.site = VAR_23 or ''
def __enter__(self):
FUNC_4(self.site)
return VAR_1
def __exit__(self, VAR_172, VAR_106, VAR_179):
FUNC_9()
def FUNC_9():
if VAR_12:
db.close()
release_local(VAR_1)
VAR_29 = None
def FUNC_10():
global VAR_29
if not VAR_29:
from frappe.utils.redis_wrapper import RedisWrapper
VAR_29 = RedisWrapper.from_url(VAR_13.get('redis_cache')
or "redis://localhost:11311")
return VAR_29
def FUNC_11():
from frappe.utils import .get_traceback
return FUNC_11()
def FUNC_12(VAR_3):
VAR_3 = FUNC_1(VAR_3)
if not VAR_16 or (not "cmd" in VAR_1.form_dict) or VAR_13.developer_mode:
print(VAR_3)
VAR_20.append({"exc": VAR_3})
def FUNC_13(VAR_30=True):
return FUNC_10().set_value('flag_print_sql', VAR_30)
def VAR_181(VAR_3):
if not VAR_16:
if VAR_13.get("logging") or False:
print(repr(VAR_3))
VAR_21.append(FUNC_1(VAR_3))
def FUNC_15(VAR_3, VAR_31=None, VAR_32=0, VAR_33=False, VAR_34=False, VAR_35=None, VAR_36=False, VAR_37=None, VAR_38=None, VAR_39=None):
from frappe.utils import strip_html_tags
VAR_3 = FUNC_109(VAR_3)
VAR_180 = CLASS_0(VAR_51=VAR_3)
def FUNC_115():
if VAR_32:
if VAR_19.rollback_on_exception:
VAR_12.rollback()
import inspect
if inspect.isclass(VAR_32) and issubclass(VAR_32, Exception):
raise VAR_32(VAR_3)
else:
raise ValidationError(VAR_3)
if VAR_19.mute_messages:
FUNC_115()
return
if VAR_33 and VAR_172(VAR_3) in (list, tuple):
VAR_180.as_table = 1
if VAR_34 and VAR_172(VAR_3) in (list, tuple) and len(VAR_3) > 1:
VAR_180.as_list = 1
if VAR_19.print_messages and VAR_180.message:
print(f"Message: {strip_html_tags(VAR_180.message)}")
if VAR_31:
VAR_180.title = VAR_31
if not VAR_35 and VAR_32:
VAR_35 = 'red'
if VAR_35:
VAR_180.indicator = VAR_35
if VAR_38:
VAR_180.is_minimizable = VAR_38
if VAR_36:
VAR_180.alert = 1
if VAR_32:
VAR_180.raise_exception = 1
if VAR_37:
VAR_180.primary_action = VAR_37
if VAR_39:
VAR_180.wide = VAR_39
VAR_22.append(json.dumps(VAR_180))
if VAR_32 and hasattr(VAR_32, '__name__'):
VAR_1.response['exc_type'] = VAR_32.__name__
FUNC_115()
def FUNC_16():
VAR_1.message_log = []
def FUNC_17():
VAR_181 = []
for msg_out in VAR_1.message_log:
VAR_181.append(json.loads(msg_out))
return VAR_181
def FUNC_18():
if len(VAR_1.message_log) > 0:
VAR_1.message_log = VAR_1.message_log[:-1]
def VAR_97(VAR_3, VAR_40=ValidationError, VAR_31=None, VAR_38=None, VAR_39=None, VAR_34=False):
FUNC_15(VAR_3, VAR_32=VAR_40, VAR_31=title, VAR_35='red', VAR_38=is_minimizable, VAR_39=wide, VAR_34=as_list)
def FUNC_20(VAR_41, VAR_10=False, **VAR_42):
if VAR_10 == False:
VAR_10 = VAR_18.user
FUNC_94('eval_js', VAR_41, VAR_10=user, **VAR_42)
def FUNC_21(VAR_43, VAR_44=False):
from frappe.utils import touch_file
if not os.path.exists(VAR_43):
os.makedirs(VAR_43)
if VAR_44:
touch_file(os.path.join(VAR_43, "__init__.py"))
def FUNC_22(VAR_45):
VAR_1.session.user = VAR_45
VAR_1.session.sid = VAR_45
VAR_1.cache = {}
VAR_1.form_dict = CLASS_0()
VAR_1.jenv = None
VAR_1.session.data = CLASS_0()
VAR_1.role_permissions = {}
VAR_1.new_doc_templates = {}
VAR_1.user_perms = None
def FUNC_23():
from frappe.utils.user import UserPermissions
if not VAR_1.user_perms:
VAR_1.user_perms = UserPermissions(VAR_1.session.user)
return VAR_1.user_perms
def FUNC_24(VAR_45=None):
if not VAR_1.session:
return ["Guest"]
import frappe.permissions
return frappe.permissions.get_roles(VAR_45 or VAR_1.session.user)
def FUNC_25(VAR_46, VAR_47=None):
return VAR_16.headers.get(VAR_46, VAR_47)
def FUNC_26(VAR_48=[], VAR_49="", VAR_50="No Subject", VAR_51="No Message",
VAR_52=False, VAR_53=True, VAR_54=None, VAR_55=None,
VAR_56=None, VAR_57=None, VAR_58=None, VAR_59=1,
VAR_60=None, VAR_61=None, VAR_62=None, VAR_9=None, VAR_63=None, VAR_64=False,
VAR_65=[], VAR_66=[], VAR_67=None, VAR_68=None, VAR_69=None, VAR_70=None,
VAR_71=1, VAR_72=None, VAR_73=1, VAR_74=None, VAR_75=None, VAR_76=False,
VAR_77=None, VAR_78=None, VAR_79=None, VAR_80=None, VAR_81=False, VAR_82=False):
VAR_182 = None
if VAR_78:
VAR_51, VAR_182 = get_email_from_template(VAR_78, VAR_79)
VAR_51 = VAR_61 or VAR_51
if VAR_52:
from frappe.utils import md_to_html
VAR_51 = md_to_html(VAR_51)
if not VAR_53:
VAR_74 = True
from frappe.email import queue
queue.send(VAR_48=recipients, VAR_49=sender,
VAR_50=subject, VAR_51=message, VAR_182=text_content,
VAR_54 = VAR_62 or VAR_54, VAR_55 = VAR_9 or VAR_55, VAR_59=add_unsubscribe_link,
VAR_56=unsubscribe_method, VAR_57=unsubscribe_params, VAR_58=unsubscribe_message,
VAR_60=attachments, VAR_63=reply_to, VAR_65=cc, VAR_66=bcc, VAR_67=message_id, VAR_68=in_reply_to,
VAR_69=send_after, VAR_70=expose_recipients, VAR_71=send_priority, VAR_64=queue_separately,
VAR_72=communication, VAR_74=now, VAR_75=read_receipt, VAR_76=is_notification,
VAR_77=inline_images, VAR_80=header, VAR_81=print_letterhead, VAR_82=with_container)
VAR_83 = []
VAR_84 = []
VAR_85 = []
VAR_86 = {}
def FUNC_27(VAR_87=False, VAR_88=False, VAR_89=None):
if not VAR_89:
methods = ['GET', 'POST', 'PUT', 'DELETE']
def FUNC_116(VAR_130):
global VAR_83, VAR_84, VAR_85, VAR_86
if hasattr(VAR_130, '__func__'):
VAR_130 = VAR_130.__func__
VAR_83.append(VAR_130)
VAR_86[VAR_130] = VAR_89
if VAR_87:
VAR_84.append(VAR_130)
if VAR_88:
VAR_85.append(VAR_130)
return VAR_130
return FUNC_116
def FUNC_28(VAR_90):
from frappe.utils import sanitize_html
VAR_183 = VAR_18['user'] == 'Guest'
if VAR_90 not in VAR_83 or VAR_183 and VAR_90 not in VAR_84:
VAR_97(FUNC_0("Not permitted"), PermissionError)
if VAR_183 and VAR_90 not in VAR_85:
for VAR_46, VAR_106 in VAR_15.items():
if isinstance(VAR_106, string_types):
VAR_15[VAR_46] = sanitize_html(VAR_106)
def FUNC_29():
def FUNC_117(VAR_130):
def FUNC_122(*VAR_79, **VAR_42):
if VAR_13.read_from_replica:
FUNC_6()
try:
VAR_227 = VAR_130(*VAR_79, **FUNC_75(VAR_130, VAR_42))
except:
raise
finally:
if VAR_1 and hasattr(VAR_1, 'primary_db'):
VAR_1.db.close()
VAR_1.db = VAR_1.primary_db
return VAR_227
return FUNC_122
return FUNC_117
def FUNC_30(VAR_91, VAR_51=False):
if VAR_1.flags.in_test:
return
if not isinstance(VAR_91, (tuple, list)):
VAR_91 = (roles,)
VAR_91 = set(VAR_91)
VAR_184 = set(FUNC_24())
if not VAR_91.intersection(VAR_184):
if VAR_51:
FUNC_15(FUNC_0('This action is only allowed for {}').format(FUNC_102(', '.join(VAR_91))), FUNC_0('Not Permitted'))
raise PermissionError
def FUNC_31(VAR_92):
try:
VAR_211 = FUNC_67('domains')
if VAR_92 in VAR_211:
return CLASS_0(FUNC_73(FUNC_67('domains')[VAR_92][0] + '.data'))
else:
return CLASS_0()
except ImportError:
if VAR_1.flags.in_test:
return CLASS_0()
else:
raise
def FUNC_32(VAR_10=None, VAR_62=None):
import frappe.cache_manager
if VAR_62:
frappe.cache_manager.clear_doctype_cache(VAR_62)
FUNC_39()
elif VAR_10:
frappe.cache_manager.clear_user_cache(VAR_10)
else: # everything
from frappe import translate
frappe.cache_manager.clear_user_cache()
frappe.cache_manager.clear_domain_cache()
translate.clear_cache()
FUNC_39()
VAR_1.cache = {}
VAR_1.new_doc_templates = {}
for VAR_130 in FUNC_67("clear_cache"):
FUNC_73(VAR_130)()
VAR_1.role_permissions = {}
def FUNC_33(VAR_62, VAR_10=None, VAR_93=False):
if VAR_93:
return False
if not VAR_10:
VAR_10 = VAR_1.session.user
import frappe.permissions
VAR_185 = frappe.permissions.get_role_permissions(VAR_62, VAR_10=user)
if VAR_185.get('select') and not VAR_185.get('read'):
return True
else:
return False
def FUNC_34(VAR_62=None, VAR_94="read", VAR_95=None, VAR_10=None, VAR_96=False, VAR_97=False):
if not VAR_62 and VAR_95:
VAR_62 = VAR_95.doctype
import frappe.permissions
VAR_180 = frappe.permissions.has_permission(VAR_62, VAR_94, VAR_95=doc, VAR_96=verbose, VAR_10=user, VAR_32=VAR_97)
if VAR_97 and not VAR_180:
if VAR_95:
frappe.throw(FUNC_0("No permission for {0}").format(VAR_95.doctype + " " + VAR_95.name))
else:
frappe.throw(FUNC_0("No permission for {0}").format(VAR_62))
return VAR_180
def FUNC_35(VAR_95=None, VAR_94='read', VAR_10=None, VAR_96=False, VAR_62=None):
if not VAR_10:
VAR_10 = VAR_18.user
if VAR_95:
if isinstance(VAR_95, string_types):
VAR_95 = FUNC_46(VAR_62, VAR_95)
VAR_62 = VAR_95.doctype
if VAR_95.flags.ignore_permissions:
return True
if hasattr(VAR_95, 'has_website_permission'):
return VAR_95.has_website_permission(VAR_94, VAR_10, VAR_96=verbose)
VAR_186 = (FUNC_67("has_website_permission") or {}).get(VAR_62, [])
if VAR_186:
for VAR_90 in VAR_186:
VAR_220 = FUNC_74(VAR_90, VAR_95=doc, VAR_94=ptype, VAR_10=user, VAR_96=verbose)
if not VAR_220:
return False
return True
else:
return False
def FUNC_36(VAR_62):
def FUNC_118():
return VAR_12.sql_list("select VAR_9 from tabDocType where istable=1")
VAR_187 = FUNC_10().get_value("is_table", FUNC_118)
return VAR_62 in VAR_187
def FUNC_37(VAR_62, VAR_98, VAR_99=None, VAR_95=None):
from frappe.model.meta import get_field_precision
return get_field_precision(FUNC_49(VAR_62).get_field(VAR_98), VAR_95, VAR_99)
def FUNC_38(VAR_100=None, VAR_101=None):
import hashlib, time
from .utils import random_string
VAR_188 = hashlib.sha224(((VAR_100 or "") + repr(time.time()) + repr(random_string(8))).encode()).hexdigest()
if VAR_101:
VAR_188 = digest[:VAR_101]
return VAR_188
def FUNC_39():
VAR_189 = FUNC_38()
FUNC_10().set_value("metadata_version", VAR_189)
return VAR_189
def FUNC_40(VAR_62, VAR_102=None, VAR_103=None, VAR_104=False):
from frappe.model.create_new import get_new_doc
return get_new_doc(VAR_62, VAR_102, VAR_103, VAR_104=as_dict)
def FUNC_41(VAR_62, VAR_105, VAR_98, VAR_106=None):
import frappe.client
return frappe.client.set_value(VAR_62, VAR_105, VAR_98, VAR_106)
def FUNC_42(*VAR_79, **VAR_42):
if VAR_79 and len(VAR_79) > 1 and isinstance(VAR_79[1], text_type):
VAR_46 = FUNC_43(VAR_79[0], VAR_79[1])
VAR_95 = VAR_1.document_cache.get(VAR_46)
if VAR_95:
return VAR_95
VAR_95 = FUNC_10().hget('document_cache', VAR_46)
if VAR_95:
doc = FUNC_46(VAR_95)
VAR_1.document_cache[VAR_46] = VAR_95
return VAR_95
VAR_95 = FUNC_46(*VAR_79, **VAR_42)
return VAR_95
def FUNC_43(VAR_62, VAR_9):
return '{0}::{1}'.format(VAR_62, VAR_9)
def FUNC_44(VAR_62, VAR_9):
FUNC_10().hdel("last_modified", VAR_62)
VAR_46 = FUNC_43(VAR_62, VAR_9)
if VAR_46 in VAR_1.document_cache:
del VAR_1.document_cache[VAR_46]
FUNC_10().hdel('document_cache', VAR_46)
def FUNC_45(VAR_62, VAR_9, VAR_98, VAR_104=False):
VAR_95 = FUNC_42(VAR_62, VAR_9)
if isinstance(VAR_98, string_types):
if VAR_104:
VAR_97('Cannot make dict for single fieldname')
return VAR_95.get(VAR_98)
VAR_190 = [VAR_95.get(f) for f in VAR_98]
if VAR_104:
return CLASS_0(zip(VAR_98, VAR_190))
return VAR_190
def FUNC_46(*VAR_79, **VAR_42):
import frappe.model.document
VAR_95 = frappe.model.document.get_doc(*VAR_79, **VAR_42)
if VAR_79 and len(VAR_79) > 1:
VAR_46 = FUNC_43(VAR_79[0], VAR_79[1])
VAR_1.document_cache[VAR_46] = VAR_95
FUNC_10().hset('document_cache', VAR_46, VAR_95.as_dict())
return VAR_95
def FUNC_47(VAR_62, VAR_107=None, VAR_108="creation desc"):
VAR_175 = FUNC_84(
VAR_62,
VAR_107=filters,
limit_page_length=1,
VAR_108=order_by,
pluck="name"
)
if VAR_175:
return FUNC_46(VAR_62, VAR_175[0])
else:
raise DoesNotExistError
def FUNC_48(VAR_62):
return FUNC_46(VAR_62, doctype)
def FUNC_49(VAR_62, VAR_109=True):
import frappe.model.meta
return frappe.model.meta.get_meta(VAR_62, VAR_109=cached)
def FUNC_50(VAR_62):
import frappe.modules
return frappe.modules.load_doctype_module(VAR_62)
def FUNC_51(VAR_62=None, VAR_9=None, VAR_110=0, VAR_111=None, VAR_112=False,
VAR_93=False, VAR_19=None, VAR_113=False, VAR_114=True, VAR_115=False):
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(VAR_62, VAR_9, VAR_110, VAR_111, VAR_112,
VAR_93, VAR_19, VAR_113, VAR_114, VAR_115)
def FUNC_52(VAR_62, VAR_9, VAR_110=0):
if VAR_12.exists(VAR_62, VAR_9):
FUNC_51(VAR_62, VAR_9, VAR_110=force)
def FUNC_53(VAR_62, VAR_110=False, VAR_116=False):
FUNC_54(FUNC_57(VAR_12.get_value("DocType", VAR_62, "module")), "doctype", FUNC_57(VAR_62),
VAR_110=force, VAR_116=reset_permissions)
def FUNC_54(VAR_92, VAR_117=None, VAR_118=None, VAR_110=False, VAR_116=False):
import frappe.modules
return frappe.modules.reload_doc(VAR_92, VAR_117, VAR_118, VAR_110=force, VAR_116=reset_permissions)
@FUNC_27()
def FUNC_55(*VAR_79, **VAR_42):
VAR_42.pop('ignore_permissions', None)
VAR_42.pop('cmd', None)
from frappe.model.rename_doc import .rename_doc
return FUNC_55(*VAR_79, **VAR_42)
def FUNC_56(VAR_119):
return importlib.import_module(VAR_119)
def FUNC_57(VAR_100):
return VAR_100.replace(' ', '_').replace('-', '_').lower()
def FUNC_58(VAR_100):
return VAR_100.replace('_', ' ').replace('-', ' ').title()
def FUNC_59(VAR_92, *VAR_120):
VAR_92 = FUNC_57(VAR_92)
return FUNC_62(VAR_1.module_app[VAR_92] + "." + VAR_92, *VAR_120)
def FUNC_60(VAR_121, *VAR_120):
return FUNC_62(VAR_121, *VAR_120)
def FUNC_61(*VAR_120):
return os.path.join(VAR_1.site_path, *VAR_120)
def FUNC_62(VAR_119, *VAR_120):
if not "public" in VAR_120:
joins = [FUNC_57(part) for part in VAR_120]
return os.path.join(os.path.dirname(FUNC_56(FUNC_57(VAR_119)).__file__), *VAR_120)
def FUNC_63(VAR_121):
return FUNC_70(os.path.join(os.path.dirname(FUNC_56(VAR_121).__file__), "modules.txt"))
def FUNC_64(VAR_122=True, VAR_24=None):
if not VAR_24:
sites_path = VAR_1.sites_path
VAR_191 = FUNC_70(os.path.join(VAR_24, "apps.txt"), VAR_127=True)
if VAR_122:
for VAR_221 in FUNC_70(os.path.join(VAR_1.site_path, "apps.txt")):
if VAR_221 not in VAR_191:
apps.append(VAR_221)
if "frappe" in VAR_191:
apps.remove("frappe")
VAR_191.insert(0, 'frappe')
return VAR_191
def FUNC_65(VAR_123=False, VAR_124=False):
if getattr(VAR_19, "in_install_db", True):
return []
if not VAR_12:
FUNC_5()
if not VAR_1.all_apps:
VAR_1.all_apps = FUNC_10().get_value('all_apps', FUNC_64)
VAR_192 = json.loads(VAR_12.get_global("installed_apps") or "[]")
if VAR_123:
VAR_192 = [VAR_221 for VAR_221 in VAR_1.all_apps if VAR_221 in VAR_192]
if VAR_124:
if 'frappe' in VAR_192:
installed.remove('frappe')
VAR_192.append('frappe')
return VAR_192
def FUNC_66():
if not hasattr(VAR_1, 'doc_events_hooks'):
VAR_186 = FUNC_67('doc_events', {})
VAR_180 = {}
for VAR_46, VAR_106 in iteritems(VAR_186):
if isinstance(VAR_46, tuple):
for VAR_62 in VAR_46:
FUNC_68(VAR_180, VAR_62, VAR_106)
else:
FUNC_68(VAR_180, VAR_46, VAR_106)
VAR_1.doc_events_hooks = VAR_180
return VAR_1.doc_events_hooks
def FUNC_67(VAR_125=None, VAR_47=None, VAR_121=None):
def FUNC_119(VAR_121=None):
VAR_186 = {}
for VAR_221 in [VAR_121] if VAR_121 else FUNC_65(VAR_123=True):
VAR_221 = "frappe" if VAR_221=="webnotes" else VAR_221
try:
VAR_228 = FUNC_56(VAR_221 + ".hooks")
except ImportError:
if VAR_1.flags.in_install_app:
pass
print('Could not find VAR_221 "{0}"'.format(VAR_121))
if not VAR_16:
sys.exit(1)
raise
for VAR_46 in dir(VAR_228):
if not VAR_46.startswith("_"):
FUNC_68(VAR_186, VAR_46, getattr(VAR_228, VAR_46))
return VAR_186
VAR_193 = VAR_13.developer_mode or False
if VAR_121:
VAR_186 = CLASS_0(FUNC_119(VAR_121))
else:
if VAR_193:
VAR_186 = CLASS_0(FUNC_119())
else:
VAR_186 = CLASS_0(FUNC_10().get_value("app_hooks", FUNC_119))
if VAR_125:
return VAR_186.get(VAR_125) or (VAR_47 if VAR_47 is not None else [])
else:
return VAR_186
def FUNC_68(VAR_126, VAR_46, VAR_106):
if isinstance(VAR_106, dict):
VAR_126.setdefault(VAR_46, {})
for inkey in VAR_106:
FUNC_68(VAR_126[VAR_46], inkey, VAR_106[inkey])
else:
VAR_126.setdefault(VAR_46, [])
if not isinstance(VAR_106, list):
VAR_106 = [value]
VAR_126[VAR_46].extend(VAR_106)
def FUNC_69():
VAR_194 = FUNC_10()
if VAR_13.db_name:
VAR_1.app_modules = VAR_194.get_value("app_modules")
VAR_1.module_app = VAR_194.get_value("module_app")
if not (VAR_1.app_modules and VAR_1.module_app):
VAR_1.module_app, VAR_1.app_modules = {}, {}
for VAR_221 in FUNC_64(True):
if VAR_221 == "webnotes":
VAR_221 = "frappe"
VAR_1.app_modules.setdefault(VAR_221, [])
for VAR_92 in FUNC_63(VAR_221):
VAR_92 = FUNC_57(VAR_92)
VAR_1.module_app[VAR_92] = VAR_221
VAR_1.app_modules[VAR_221].append(VAR_92)
if VAR_13.db_name:
VAR_194.set_value("app_modules", VAR_1.app_modules)
VAR_194.set_value("module_app", VAR_1.module_app)
def FUNC_70(VAR_43, VAR_127=False, VAR_128=True):
import frappe.utils
VAR_61 = FUNC_72(VAR_43, VAR_127=raise_not_found)
if VAR_61:
VAR_61 = frappe.utils.strip(VAR_61)
return [
p.strip() for p in VAR_61.splitlines()
if (not VAR_128) or (p.strip() and not p.startswith("#"))
]
else:
return []
def FUNC_71(VAR_43):
with open(VAR_43, 'r') as f:
return json.load(f)
def FUNC_72(VAR_43, VAR_127=False):
if isinstance(VAR_43, text_type):
VAR_43 = path.encode("utf-8")
if os.path.exists(VAR_43):
with open(VAR_43, "r") as f:
return FUNC_1(f.read())
elif VAR_127:
raise IOError("{} Not Found".format(VAR_43))
else:
return None
def FUNC_73(VAR_129):
VAR_121 = VAR_129.split(".")[0]
if not VAR_1.flags.in_install and VAR_121 not in FUNC_65():
VAR_97(FUNC_0("App {0} is not installed").format(VAR_121), AppNotInstalledError)
VAR_119 = '.'.join(VAR_129.split('.')[:-1])
VAR_195 = VAR_129.split('.')[-1]
return getattr(FUNC_56(VAR_119), VAR_195)
def FUNC_74(VAR_130, *VAR_79, **VAR_42):
if isinstance(VAR_130, string_types):
VAR_130 = FUNC_73(VAR_130)
VAR_196 = FUNC_75(VAR_130, VAR_42)
return VAR_130(*VAR_79, **VAR_196)
def FUNC_75(VAR_130, VAR_42):
if hasattr(VAR_130, 'fnargs'):
VAR_212 = VAR_130.fnargs
else:
try:
VAR_212, VAR_222, VAR_223, VAR_224 = inspect.getargspec(VAR_130)
except ValueError:
VAR_212 = inspect.getfullargspec(VAR_130).args
VAR_222 = inspect.getfullargspec(VAR_130).varargs
VAR_223 = inspect.getfullargspec(VAR_130).varkw
VAR_224 = inspect.getfullargspec(VAR_130).defaults
VAR_196 = {}
for VAR_225 in VAR_42:
if (VAR_225 in VAR_212) or VAR_223:
VAR_196[VAR_225] = VAR_42.get(VAR_225)
VAR_196.pop("ignore_permissions", None)
VAR_196.pop("flags", None)
return VAR_196
def FUNC_76(VAR_79, VAR_131=False, VAR_132=True):
VAR_79 = CLASS_0(VAR_79)
if not VAR_79.doctype_or_field:
VAR_79.doctype_or_field = 'DocField'
if not VAR_79.property_type:
VAR_79.property_type = VAR_12.get_value('DocField',
{'parent': 'DocField', 'fieldname': VAR_79.property}, 'fieldtype') or 'Data'
if not VAR_79.doctype:
VAR_213 = VAR_12.sql_list('select distinct parent from tabDocField where VAR_98=%s', VAR_79.fieldname)
else:
VAR_213 = [VAR_79.doctype]
for VAR_62 in VAR_213:
if not VAR_79.property_type:
VAR_79.property_type = VAR_12.get_value('DocField',
{'parent': VAR_62, 'fieldname': VAR_79.fieldname}, 'fieldtype') or 'Data'
VAR_214 = FUNC_46({
'doctype': "Property Setter",
'doctype_or_field': VAR_79.doctype_or_field,
'doc_type': VAR_62,
'field_name': VAR_79.fieldname,
'row_name': VAR_79.row_name,
'property': VAR_79.property,
'value': VAR_79.value,
'property_type': VAR_79.property_type or "Data",
'__islocal': 1
})
VAR_214.flags.ignore_validate = VAR_131
VAR_214.flags.validate_fields_for_doctype = VAR_132
VAR_214.validate_fieldtype_change()
VAR_214.insert()
def FUNC_77(VAR_43):
from frappe.core.doctype.data_import.data_import import .import_doc
FUNC_77(VAR_43)
def FUNC_78(VAR_95, VAR_133=True):
import .copy
def FUNC_120(VAR_175):
for df in VAR_175.meta.get("fields", {"no_copy": 1}):
if hasattr(VAR_175, df.fieldname):
VAR_175.set(df.fieldname, None)
VAR_197 = ['name', 'owner', 'creation', 'modified', 'modified_by']
if not VAR_1.flags.in_test:
VAR_197.append("docstatus")
if not isinstance(VAR_95, dict):
VAR_175 = VAR_95.as_dict()
else:
VAR_175 = VAR_95
VAR_198 = FUNC_46(FUNC_114.deepcopy(VAR_175))
VAR_198.set("__islocal", 1)
for VAR_98 in (VAR_197 + ['amended_from', 'amendment_date']):
VAR_198.set(VAR_98, None)
if not VAR_133:
FUNC_120(VAR_198)
for i, VAR_175 in enumerate(VAR_198.get_all_children()):
VAR_175.set("__islocal", 1)
for VAR_98 in VAR_197:
VAR_175.set(VAR_98, None)
if not VAR_133:
FUNC_120(VAR_175)
return VAR_198
def FUNC_79(VAR_134, VAR_135, VAR_136):
import frappe.utils
return frappe.utils.compare(VAR_134, VAR_135, VAR_136)
def FUNC_80(VAR_31, VAR_137, VAR_138=None, VAR_139=None, VAR_5=None,
VAR_140=None, VAR_37='/', VAR_141 = None, VAR_142=False,
VAR_143=None, VAR_78='message'):
VAR_1.message_title = VAR_31
VAR_1.message = VAR_137
VAR_1.response['type'] = 'page'
VAR_1.response['route'] = VAR_78
VAR_1.no_cache = 1
if VAR_139:
VAR_1.response['http_status_code'] = VAR_139
if not VAR_5:
context = {}
if not VAR_140:
if VAR_138:
VAR_140 = 'green'
elif VAR_139 and VAR_139 > 300:
VAR_140 = 'red'
else:
VAR_140 = 'blue'
VAR_5['indicator_color'] = VAR_140
VAR_5['primary_label'] = VAR_141
VAR_5['primary_action'] = VAR_37
VAR_5['error_code'] = VAR_139
VAR_5['fullpage'] = VAR_142
if VAR_143:
VAR_5['card_width'] = VAR_143
VAR_1.response['context'] = VAR_5
def FUNC_81(VAR_31, VAR_137, VAR_139=None, VAR_5=None, VAR_140=None):
VAR_67 = FUNC_38(VAR_101=8)
VAR_51 = {
'context': VAR_5 or {},
'http_status_code': VAR_139 or 200
}
VAR_51['context'].update({
'header': VAR_31,
'title': VAR_31,
'message': VAR_137
})
if VAR_140:
VAR_51['context'].update({
"indicator_color": VAR_140
})
FUNC_10().set_value("message_id:{0}".format(VAR_67), VAR_51, expires_in_sec=60)
VAR_199 = '/VAR_51?id={0}'.format(VAR_67)
if not getattr(VAR_1, 'is_ajax', False):
VAR_1.response["type"] = "redirect"
VAR_1.response["location"] = VAR_199
else:
return VAR_199
def FUNC_82(VAR_62, VAR_144=True):
import frappe.desk.reportview
return frappe.desk.reportview.build_match_conditions(VAR_62, VAR_144=as_condition)
def FUNC_83(VAR_62, *VAR_79, **VAR_42):
import frappe.model.db_query
return frappe.model.db_query.DatabaseQuery(VAR_62).execute(None, *VAR_79, **VAR_42)
def FUNC_84(VAR_62, *VAR_79, **VAR_42):
VAR_42["ignore_permissions"] = True
if not "limit_page_length" in VAR_42:
VAR_42["limit_page_length"] = 0
return FUNC_83(VAR_62, *VAR_79, **VAR_42)
def FUNC_85(*VAR_79, **VAR_42):
return VAR_12.get_value(*VAR_79, **VAR_42)
def FUNC_86(VAR_145, VAR_146=1):
from frappe.utils.response import .json_handler
return json.dumps(VAR_145, VAR_146=indent, sort_keys=True, VAR_47=json_handler, separators=(',', ': '))
def FUNC_87():
from frappe.utils import cint
return VAR_19.mute_emails or cint(VAR_13.get("mute_emails") or 0) or False
def FUNC_88(VAR_62):
from frappe.modules import .get_doctype_module, FUNC_59
VAR_43 = os.path.join(FUNC_59(get_doctype_module(VAR_62)), "doctype", FUNC_57(VAR_62), "test_records.json")
if os.path.exists(VAR_43):
with open(VAR_43, "r") as f:
return json.loads(f.read())
else:
return []
def FUNC_89(*VAR_79, **VAR_42):
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*VAR_79, **VAR_42)
def FUNC_90(*VAR_79, **VAR_42):
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*VAR_79, **VAR_42)
def FUNC_91(VAR_62=None, VAR_9=None, VAR_147=None, VAR_148=None,
VAR_137=None, VAR_149=False, VAR_95=None, VAR_150=None, VAR_151=0, VAR_152=None):
from frappe.website.render import build_page
from frappe.utils.pdf import get_pdf
VAR_1.form_dict.doctype = VAR_62
VAR_1.form_dict.name = VAR_9
VAR_1.form_dict.format = VAR_147
VAR_1.form_dict.style = VAR_148
VAR_1.form_dict.doc = VAR_95
VAR_1.form_dict.no_letterhead = VAR_151
VAR_200 = None
if VAR_152:
VAR_200 = {'password': VAR_152}
if not VAR_137:
html = build_page("printview")
if VAR_149:
return get_pdf(VAR_137, VAR_150 = output, VAR_200 = options)
else:
return VAR_137
def FUNC_92(VAR_62, VAR_9, VAR_153=None, VAR_147=None,
VAR_148=None, VAR_137=None, VAR_95=None, VAR_4=None, VAR_81=True, VAR_152=None):
from frappe.utils import .scrub_urls
if not VAR_153: file_name = VAR_9
VAR_153 = file_name.replace(' ','').replace('/','-')
VAR_201 = VAR_12.get_singles_dict("Print Settings")
VAR_202 = VAR_1.lang
if VAR_4: VAR_1.lang = VAR_4
VAR_1.flags.ignore_print_permissions = True
VAR_151 = not VAR_81
VAR_42 = dict(
VAR_147=print_format,
VAR_148=style,
VAR_137=html,
VAR_95=doc,
VAR_151=no_letterhead,
VAR_152=password
)
VAR_61 = ''
if int(VAR_201.send_print_as_pdf or 0):
VAR_215 = ".pdf"
VAR_42["as_pdf"] = True
VAR_61 = FUNC_91(VAR_62, VAR_9, **VAR_42)
else:
VAR_215 = ".html"
VAR_61 = scrub_urls(FUNC_91(VAR_62, VAR_9, **VAR_42)).encode('utf-8')
VAR_180 = {
"fname": VAR_153 + VAR_215,
"fcontent": VAR_61
}
VAR_1.flags.ignore_print_permissions = False
VAR_1.lang = VAR_202
return VAR_180
def FUNC_93(*VAR_79, **VAR_42):
import frappe.realtime
return frappe.realtime.publish_progress(*VAR_79, **VAR_42)
def FUNC_94(*VAR_79, **VAR_42):
import frappe.realtime
return frappe.realtime.publish_realtime(*VAR_79, **VAR_42)
def FUNC_95(VAR_154, VAR_46, VAR_155, VAR_156=False):
if VAR_154 not in VAR_1.cache:
VAR_1.cache[VAR_154] = {}
if VAR_46 not in VAR_1.cache[VAR_154]:
VAR_1.cache[VAR_154][VAR_46] = VAR_155()
elif VAR_1.cache[VAR_154][VAR_46]==None and VAR_156:
VAR_1.cache[VAR_154][VAR_46] = VAR_155()
return VAR_1.cache[VAR_154][VAR_46]
def FUNC_96(*VAR_79, **VAR_42):
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue(*VAR_79, **VAR_42)
def FUNC_97(*VAR_79, **VAR_42):
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue_doc(*VAR_79, **VAR_42)
def FUNC_98(VAR_62):
def FUNC_121():
VAR_216 = VAR_1.db.get_value("DocType", VAR_62, "module")
return VAR_1.module_app[FUNC_57(VAR_216)]
return FUNC_95("doctype_app", VAR_62, VAR_155=FUNC_121)
VAR_157 = {}
VAR_158 = None
def FUNC_99(VAR_92=None, VAR_159=False, VAR_160=True, VAR_161=None, VAR_162=100_000, VAR_163=20):
from frappe.utils.logger import get_logger
return get_logger(VAR_92=module, VAR_159=with_more_info, VAR_160=allow_site, VAR_161=filter, VAR_162=max_size, VAR_163=file_count)
def FUNC_100(VAR_51=None, VAR_31=FUNC_0("Error")):
if VAR_51:
if '\n' in VAR_31:
VAR_217, VAR_31 = title, VAR_51
else:
VAR_217 = VAR_51
else:
VAR_217 = FUNC_11()
return FUNC_46(dict(VAR_62='Error Log', VAR_217=FUNC_1(VAR_217),
VAR_90=VAR_31)).insert(VAR_93=True)
def FUNC_101(VAR_62, VAR_9):
VAR_137 = '<VAR_225 href="/VAR_221/Form/{VAR_62}/{VAR_9}" VAR_148="font-weight: FUNC_102;">{doctype_local} {VAR_9}</VAR_225>'
return VAR_137.format(
VAR_62=doctype,
VAR_9=name,
doctype_local=FUNC_0(VAR_62)
)
def FUNC_102(VAR_6):
return '<b>{0}</b>'.format(VAR_6)
def FUNC_103(VAR_164, VAR_165=None, VAR_166=None):
VAR_203 = {
"int": int,
"float": float,
"long": int,
"round": round
}
if '__' in VAR_164:
VAR_97('Illegal rule {0}. Cannot use "__"'.format(FUNC_102(VAR_164)))
if not VAR_165:
eval_globals = {}
VAR_165['__builtins__'] = {}
VAR_165.update(VAR_203)
return eval(VAR_164, VAR_165, VAR_166)
def FUNC_104(VAR_46):
if VAR_46 not in VAR_1.system_settings:
VAR_1.system_settings.update({VAR_46: VAR_12.get_single_value('System Settings', VAR_46)})
return VAR_1.system_settings.get(VAR_46)
def FUNC_105():
from frappe.core.doctype.domain_settings.domain_settings import .get_active_domains
return FUNC_105()
def FUNC_106(VAR_62, VAR_9, VAR_167=None, VAR_168=False, VAR_169=True):
VAR_204 = FUNC_49(VAR_62)
if VAR_204.track_changes:
VAR_218 = VAR_12.get_all('Version', VAR_107={
'ref_doctype': VAR_62,
'docname': VAR_9,
'order_by': 'creation' if VAR_168 else None,
'limit': VAR_167
}, VAR_34=1)
from frappe.chat.util import squashify, dictify, safe_json_loads
VAR_219 = []
for VAR_9 in VAR_218:
VAR_9 = squashify(VAR_9)
VAR_95 = FUNC_46('Version', VAR_9)
VAR_226 = VAR_95.data
VAR_226 = safe_json_loads(VAR_226)
VAR_226 = dictify(dict(
version=VAR_226,
VAR_10=VAR_95.owner,
creation=VAR_95.creation
))
VAR_219.append(VAR_226)
return VAR_219
else:
if VAR_169:
raise ValueError(FUNC_0('{0} has no VAR_219 tracked.').format(VAR_62))
@FUNC_27(VAR_87=True)
def FUNC_107():
return "pong"
def FUNC_108(VAR_170, VAR_7='utf-8'):
try:
VAR_170 = param.encode(VAR_7)
except Exception:
pass
return VAR_170
def FUNC_109(VAR_170, VAR_7='utf-8'):
try:
VAR_170 = param.decode(VAR_7)
except Exception:
pass
return VAR_170
def FUNC_110(VAR_171):
from frappe.utils import .parse_json
return FUNC_110(VAR_171)
def FUNC_111(VAR_172, VAR_173=1, VAR_174='en'):
VAR_205 = []
VAR_206 = VAR_0.Faker(VAR_174)
if VAR_172 not in dir(VAR_206):
raise ValueError('Not VAR_225 valid FUNC_111 VAR_172.')
else:
for i in range(VAR_173):
VAR_226 = getattr(VAR_206, VAR_172)()
VAR_205.append(VAR_226)
from frappe.chat.util import squashify
return squashify(VAR_205)
def FUNC_112(VAR_130):
from frappe.desk.search import .validate_and_sanitize_search_inputs as func
return func(VAR_130)
| [
1,
2,
5,
10,
14,
20,
21,
25,
26,
28,
29,
30,
31,
35,
38,
41,
61,
70,
73,
76,
78,
81,
82,
84,
89,
92,
93,
95,
106,
109,
114,
119,
120,
129,
133,
135,
140,
143,
164,
169,
173,
177,
181,
189,
198,
200,
202,
205,
213,
217,
222,
226,
228,
229,
232,
237,
240,
249,
260,
262,
266,
268,
271,
276,
280,
283,
288,
290,
291,
301,
306,
309,
314,
316,
319,
322,
327,
329,
334,
345,
348,
354,
359,
363,
366,
369,
372,
375,
378,
381,
384,
387,
390,
393,
396,
398,
401,
403,
406,
411,
413,
417,
420,
424,
429,
432,
438,
441,
444,
455,
461,
468,
471,
475,
484,
485,
513,
515,
519,
522,
532,
537,
542,
545,
547,
552,
555,
559,
561,
564,
567,
569,
571,
577,
586,
590,
593,
597,
606,
619,
620,
623,
640,
643,
645,
649,
652,
655,
660,
663,
670,
678,
680,
683,
688,
691,
695,
697,
700,
701,
704,
709,
712,
713,
715,
718,
723,
726,
731,
740,
746,
749,
755,
760,
764,
768,
769,
775,
776,
778,
780,
783,
790,
797,
802,
805,
808,
810,
811,
814,
815,
817,
821,
822,
827,
829,
843,
847,
852,
856,
860,
871,
876,
881,
884,
890,
893,
898,
903,
906,
910,
914,
918,
921,
926,
929,
933,
936,
939,
942,
948,
952,
957,
959,
964,
968,
970,
975,
978,
981,
983,
986,
991,
993,
1005,
1007,
1009,
1012,
1024,
1025,
1035,
1037,
1045,
1050,
1053,
1055,
1060,
1065,
1070,
1074,
1078,
1089,
1093,
1097,
1101,
1108,
1113,
1118,
1126,
1132,
1136,
1141,
1143,
1145,
1157,
1162,
1165,
1167,
1170,
1179,
1184,
1189,
1205,
1210,
1214,
1219,
1221,
1224,
1229,
1234,
1237,
1240,
1243,
1246,
1248,
1251,
1267,
1272,
1290,
1293,
1296,
1304,
1312,
1314,
1318,
1322,
1325,
1327,
1338,
1343,
1346,
1350,
1353,
1358,
1361,
1368,
1370,
1371,
1373,
1374,
1376,
1377,
1382,
1386,
1393,
1395,
1396,
1398,
1399,
1401,
1402,
1409,
1412,
1414,
1423,
1427,
1431,
1441,
1444,
1449,
1452,
1457,
1461,
1470,
1477,
1481,
1484,
1489,
1493,
1496,
1498,
1500,
1501,
1504,
1506,
1515,
1524,
1529,
1531,
1533,
1535,
1538,
1547,
1550,
1560,
1562,
1565,
1569,
1573,
1576,
1578,
1580,
1582,
1586,
1597,
1601,
1611,
1616,
1618,
1625,
1628,
1629,
1630,
1631,
1632,
1633,
1641,
1644,
1652,
1655,
1664,
1667,
1670,
1674,
1679,
1683,
1687,
1689,
1709,
1711,
1713,
1717,
1725,
1727,
1732,
1736,
1737,
1744,
1745,
1752,
1756,
1766,
1769,
1773,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
43,
63,
64,
65,
66,
67,
97,
108,
109,
110,
111,
116,
137,
204,
205,
206,
207,
208,
209,
234,
235,
285,
294,
303,
308,
309,
310,
321,
322,
323,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
419,
420,
421,
422,
431,
432,
433,
434,
443,
444,
445,
463,
470,
471,
472,
473,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
539,
540,
541,
542,
543,
544,
545,
546,
547,
548,
549,
550,
551,
592,
593,
594,
622,
623,
624,
625,
662,
663,
664,
665,
666,
667,
682,
683,
684,
685,
686,
687,
720,
728,
733,
742,
748,
749,
750,
751,
752,
757,
804,
805,
806,
807,
808,
809,
810,
811,
812,
813,
814,
815,
816,
817,
818,
831,
845,
849,
859,
860,
861,
862,
863,
864,
865,
866,
867,
873,
878,
883,
884,
885,
886,
887,
888,
889,
896,
897,
898,
899,
900,
908,
912,
916,
920,
921,
922,
923,
928,
929,
930,
931,
935,
936,
937,
941,
942,
943,
944,
950,
954,
972,
995,
1011,
1012,
1013,
1014,
1015,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1072,
1095,
1110,
1115,
1128,
1138,
1169,
1170,
1171,
1172,
1207,
1212,
1250,
1251,
1252,
1253,
1254,
1255,
1256,
1257,
1258,
1259,
1260,
1261,
1262,
1263,
1264,
1271,
1272,
1273,
1274,
1275,
1276,
1277,
1278,
1279,
1280,
1281,
1282,
1283,
1284,
1316,
1317,
1318,
1319,
1320,
1321,
1322,
1323,
1324,
1325,
1326,
1355,
1360,
1361,
1362,
1363,
1364,
1365,
1366,
1367,
1368,
1369,
1370,
1371,
1372,
1373,
1374,
1375,
1376,
1377,
1378,
1379,
1384,
1385,
1386,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1401,
1402,
1403,
1404,
1411,
1412,
1413,
1414,
1415,
1416,
1417,
1418,
1419,
1420,
1421,
1433,
1443,
1444,
1445,
1446,
1451,
1452,
1453,
1454,
1460,
1461,
1462,
1463,
1464,
1465,
1466,
1467,
1537,
1538,
1539,
1540,
1541,
1542,
1543,
1544,
1549,
1550,
1551,
1552,
1553,
1554,
1555,
1556,
1557,
1558,
1564,
1565,
1566,
1567,
1568,
1569,
1570,
1584,
1585,
1586,
1587,
1588,
1589,
1590,
1591,
1592,
1593,
1594,
1599,
1600,
1601,
1602,
1603,
1604,
1605,
1606,
1607,
1608,
1622,
1627,
1657,
1685,
1686,
1687,
1688,
1689,
1690,
1691,
1692,
1693,
1694,
1695,
1696,
1697,
1698,
1699,
1700,
56,
274
] | [
1,
2,
5,
10,
14,
20,
21,
25,
26,
28,
29,
30,
31,
35,
38,
41,
61,
70,
73,
76,
78,
81,
82,
84,
89,
92,
93,
95,
106,
109,
114,
119,
120,
129,
133,
135,
140,
143,
164,
169,
173,
177,
181,
189,
198,
200,
202,
205,
213,
217,
222,
226,
228,
229,
232,
237,
240,
249,
260,
262,
266,
268,
271,
276,
280,
283,
288,
290,
291,
301,
306,
309,
314,
316,
319,
322,
327,
329,
334,
345,
348,
354,
359,
363,
366,
369,
372,
375,
378,
381,
384,
387,
390,
393,
396,
398,
401,
403,
406,
411,
413,
417,
420,
424,
429,
432,
438,
441,
444,
455,
461,
468,
471,
475,
484,
485,
513,
515,
519,
522,
532,
537,
542,
545,
547,
552,
555,
558,
559,
560,
563,
566,
569,
572,
574,
576,
579,
583,
585,
586,
590,
596,
605,
609,
612,
616,
625,
638,
639,
642,
659,
662,
664,
668,
671,
674,
679,
682,
689,
697,
699,
702,
707,
710,
714,
716,
719,
720,
723,
728,
731,
732,
734,
737,
742,
745,
750,
759,
765,
768,
774,
779,
783,
787,
788,
794,
795,
797,
799,
802,
809,
816,
821,
824,
827,
829,
830,
833,
834,
836,
840,
841,
846,
848,
862,
866,
871,
875,
879,
890,
895,
900,
903,
909,
912,
917,
922,
925,
929,
933,
937,
940,
945,
948,
952,
955,
958,
961,
967,
971,
976,
978,
983,
987,
989,
994,
997,
1000,
1002,
1005,
1010,
1012,
1024,
1026,
1028,
1031,
1043,
1044,
1054,
1056,
1064,
1069,
1072,
1074,
1079,
1084,
1089,
1093,
1097,
1108,
1112,
1116,
1120,
1127,
1132,
1137,
1145,
1151,
1155,
1160,
1162,
1164,
1176,
1181,
1184,
1186,
1189,
1198,
1203,
1208,
1224,
1229,
1233,
1238,
1240,
1243,
1248,
1253,
1256,
1259,
1262,
1265,
1267,
1270,
1286,
1291,
1309,
1312,
1315,
1323,
1331,
1333,
1337,
1341,
1344,
1346,
1357,
1362,
1365,
1369,
1372,
1377,
1380,
1387,
1389,
1390,
1392,
1393,
1395,
1396,
1401,
1405,
1412,
1414,
1415,
1417,
1418,
1420,
1421,
1428,
1431,
1433,
1442,
1446,
1450,
1460,
1463,
1468,
1471,
1476,
1480,
1489,
1496,
1500,
1503,
1508,
1512,
1515,
1517,
1519,
1520,
1523,
1525,
1534,
1543,
1548,
1550,
1552,
1554,
1557,
1566,
1569,
1579,
1581,
1584,
1588,
1592,
1595,
1597,
1599,
1601,
1605,
1616,
1620,
1630,
1635,
1637,
1644,
1647,
1648,
1649,
1650,
1651,
1652,
1660,
1663,
1671,
1674,
1683,
1686,
1689,
1693,
1698,
1702,
1706,
1708,
1728,
1730,
1732,
1736,
1744,
1746,
1751,
1755,
1756,
1763,
1764,
1771,
1775,
1785,
1788,
1792,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
43,
63,
64,
65,
66,
67,
97,
108,
109,
110,
111,
116,
137,
204,
205,
206,
207,
208,
209,
234,
235,
285,
294,
303,
308,
309,
310,
321,
322,
323,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
419,
420,
421,
422,
431,
432,
433,
434,
443,
444,
445,
463,
470,
471,
472,
473,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
539,
540,
541,
542,
543,
544,
545,
546,
547,
548,
549,
550,
551,
611,
612,
613,
641,
642,
643,
644,
681,
682,
683,
684,
685,
686,
701,
702,
703,
704,
705,
706,
739,
747,
752,
761,
767,
768,
769,
770,
771,
776,
823,
824,
825,
826,
827,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
850,
864,
868,
878,
879,
880,
881,
882,
883,
884,
885,
886,
892,
897,
902,
903,
904,
905,
906,
907,
908,
915,
916,
917,
918,
919,
927,
931,
935,
939,
940,
941,
942,
947,
948,
949,
950,
954,
955,
956,
960,
961,
962,
963,
969,
973,
991,
1014,
1030,
1031,
1032,
1033,
1034,
1071,
1072,
1073,
1074,
1075,
1076,
1077,
1091,
1114,
1129,
1134,
1147,
1157,
1188,
1189,
1190,
1191,
1226,
1231,
1269,
1270,
1271,
1272,
1273,
1274,
1275,
1276,
1277,
1278,
1279,
1280,
1281,
1282,
1283,
1290,
1291,
1292,
1293,
1294,
1295,
1296,
1297,
1298,
1299,
1300,
1301,
1302,
1303,
1335,
1336,
1337,
1338,
1339,
1340,
1341,
1342,
1343,
1344,
1345,
1374,
1379,
1380,
1381,
1382,
1383,
1384,
1385,
1386,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1396,
1397,
1398,
1403,
1404,
1405,
1406,
1407,
1408,
1409,
1410,
1411,
1412,
1413,
1414,
1415,
1416,
1417,
1418,
1419,
1420,
1421,
1422,
1423,
1430,
1431,
1432,
1433,
1434,
1435,
1436,
1437,
1438,
1439,
1440,
1452,
1462,
1463,
1464,
1465,
1470,
1471,
1472,
1473,
1479,
1480,
1481,
1482,
1483,
1484,
1485,
1486,
1556,
1557,
1558,
1559,
1560,
1561,
1562,
1563,
1568,
1569,
1570,
1571,
1572,
1573,
1574,
1575,
1576,
1577,
1583,
1584,
1585,
1586,
1587,
1588,
1589,
1603,
1604,
1605,
1606,
1607,
1608,
1609,
1610,
1611,
1612,
1613,
1618,
1619,
1620,
1621,
1622,
1623,
1624,
1625,
1626,
1627,
1641,
1646,
1676,
1704,
1705,
1706,
1707,
1708,
1709,
1710,
1711,
1712,
1713,
1714,
1715,
1716,
1717,
1718,
1719,
56,
274
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.utils import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
BASEURL = 'augment'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
KEYWORD = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
keywords/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<base_rate>
/augment/root/STREAM/distributed/1of2/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = 'STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('augment_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>' +
'/start/<int:start>/limit/<int:limit>')
def get_scope(rootdir, index=0, total=1, params=None, start=0, limit=sys.maxsize):
global KEYWORD
if rootdir == "0":
rootdir = INDEXDIR
rootdir = _get_obj_absolute_path(rootdir)
seed = None
percentage = 0.
seed, percentage = decode_params(params)
# Assuming the same positive list is present in all the servers
# Always create a new index file
base_list, KEYWORD = create_index(rootdir, percentage, seed, index, total)
total_entries = len(base_list)
start = start if start > 0 else 0
end = min(total_entries, start + limit) if limit > 0 else total_entries
base_list = base_list[start:end]
total_entries = end - start
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
for path in base_list:
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
"""
Decodes the params which are '_' seperated
<[d]eterminant/[r]andom>_<random_seed>_<baserate>
"""
keywords = params.split('_')
mix_type = keywords[0]
seed = None
if len(keywords) > 1:
seed = int(keywords[1])
if mix_type == 'r' or seed is None:
seed = random.randrange(10000)
percentage = 0.1 # default base_rate = 0.1%
if len(keywords) > 2:
percentage = float(keywords[2])
return seed, round(percentage, 4)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
meta = {'_gt_label': KEYWORD}
if KEYWORD in path:
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', present=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
@scope_blueprint.route('/meta/<path:present>')
def get_object_meta(present=False):
attrs = dict()
if present:
attrs['_gt_label'] = KEYWORD
return jsonify(attrs)
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
def create_index(base_dir, base_rate=0.05, seed=42, rank=0, total_servers=1):
"""
Creates Index List File:
Assuming name of files NEGATIVE (e.g:subset YFCC), POSITIVE
"""
filepath_split = ['STREAM', "{:.2f}".format(base_rate), str(rank), str(total_servers), str(seed)]
filepath = '_'.join(filepath_split)
filepath = os.path.join(base_dir, filepath)
positive_path = os.path.join(base_dir, 'POSITIVE')
negative_path = os.path.join(base_dir, 'NEGATIVE')
positive_firstline = open(positive_path).readline().rstrip()
keyword = positive_firstline.split('/')[-2] # Assuming all positives are in the same parent dir
_log.info("Dir {} BR: {} Seed:{} FP{}".format(base_dir, base_rate, seed, filepath))
sys.stdout.flush()
if not os.path.exists(filepath):
positive_data = read_file_list(positive_path) # same across servers
negative_data = read_file_list(negative_path) # different across servers
random.Random(seed).shuffle(positive_data)
random.Random(seed).shuffle(negative_data)
len_positive = len(positive_data)
start_idx = int(rank * (1.0 / total_servers) * len_positive)
end_idx = int((rank+1) * (1.0 / total_servers) * len_positive)
positive_data = positive_data[start_idx:end_idx]
len_positive = len(positive_data)
negative_sample = int(len_positive * (100./base_rate -1))
negative_data = negative_data[:negative_sample]
return write_data(filepath, [negative_data, positive_data], seed), keyword
return read_file_list(filepath), keyword
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
BASEURL = 'augment'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
KEYWORD = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
keywords/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<base_rate>
/augment/root/STREAM/distributed/1of2/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = 'STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('augment_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>' +
'/start/<int:start>/limit/<int:limit>')
def get_scope(rootdir, index=0, total=1, params=None, start=0, limit=sys.maxsize):
global KEYWORD
if rootdir == "0":
rootdir = INDEXDIR
rootdir = _get_obj_absolute_path(rootdir)
seed = None
percentage = 0.
seed, percentage = decode_params(params)
# Assuming the same positive list is present in all the servers
# Always create a new index file
base_list, KEYWORD = create_index(rootdir, percentage, seed, index, total)
total_entries = len(base_list)
start = start if start > 0 else 0
end = min(total_entries, start + limit) if limit > 0 else total_entries
base_list = base_list[start:end]
total_entries = end - start
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
for path in base_list:
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
"""
Decodes the params which are '_' seperated
<[d]eterminant/[r]andom>_<random_seed>_<baserate>
"""
keywords = params.split('_')
mix_type = keywords[0]
seed = None
if len(keywords) > 1:
seed = int(keywords[1])
if mix_type == 'r' or seed is None:
seed = random.randrange(10000)
percentage = 0.1 # default base_rate = 0.1%
if len(keywords) > 2:
percentage = float(keywords[2])
return seed, round(percentage, 4)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
meta = {'_gt_label': KEYWORD}
if KEYWORD in path:
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', present=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
@scope_blueprint.route('/meta/<path:present>')
def get_object_meta(present=False):
attrs = dict()
if present:
attrs['_gt_label'] = KEYWORD
return jsonify(attrs)
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
def create_index(base_dir, base_rate=0.05, seed=42, rank=0, total_servers=1):
"""
Creates Index List File:
Assuming name of files NEGATIVE (e.g:subset YFCC), POSITIVE
"""
filepath_split = ['STREAM', "{:.2f}".format(base_rate), str(rank), str(total_servers), str(seed)]
filepath = '_'.join(filepath_split)
filepath = os.path.join(base_dir, filepath)
positive_path = os.path.join(base_dir, 'POSITIVE')
negative_path = os.path.join(base_dir, 'NEGATIVE')
positive_firstline = open(positive_path).readline().rstrip()
keyword = positive_firstline.split('/')[-2] # Assuming all positives are in the same parent dir
_log.info("Dir {} BR: {} Seed:{} FP{}".format(base_dir, base_rate, seed, filepath))
sys.stdout.flush()
if not os.path.exists(filepath):
positive_data = read_file_list(positive_path) # same across servers
negative_data = read_file_list(negative_path) # different across servers
random.Random(seed).shuffle(positive_data)
random.Random(seed).shuffle(negative_data)
len_positive = len(positive_data)
start_idx = int(rank * (1.0 / total_servers) * len_positive)
end_idx = int((rank+1) * (1.0 / total_servers) * len_positive)
positive_data = positive_data[start_idx:end_idx]
len_positive = len(positive_data)
negative_sample = int(len_positive * (100./base_rate -1))
negative_data = negative_data[:negative_sample]
return write_data(filepath, [negative_data, positive_data], seed), keyword
return read_file_list(filepath), keyword
| path_disclosure | {
"code": [
"from werkzeug.utils import safe_join"
],
"line_no": [
27
]
} | {
"code": [
"from werkzeug.security import safe_join"
],
"line_no": [
27
]
} |
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.utils import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
VAR_0 = 'augment'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_31, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
VAR_6 = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
VAR_29/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<VAR_20>
/augment/root/STREAM/distributed/1of2/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_7):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = 'STREAM'
VAR_4 = VAR_7.dataroot
VAR_8 = Blueprint('augment_store', __name__)
VAR_9 = logging.getLogger(__name__)
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>' +
'/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
def FUNC_1(VAR_10, VAR_11=0, VAR_12=1, VAR_13=None, VAR_14=0, VAR_15=sys.maxsize):
global VAR_6
if VAR_10 == "0":
VAR_10 = VAR_3
VAR_10 = FUNC_7(VAR_10)
VAR_21 = None
VAR_24 = 0.
VAR_21, VAR_24 = FUNC_2(VAR_13)
VAR_25, VAR_6 = FUNC_9(VAR_10, VAR_24, VAR_21, VAR_11, VAR_12)
VAR_26 = len(VAR_25)
VAR_14 = start if VAR_14 > 0 else 0
VAR_27 = min(VAR_26, VAR_14 + VAR_15) if VAR_15 > 0 else VAR_26
VAR_25 = base_list[VAR_14:VAR_27]
VAR_26 = VAR_27 - VAR_14
def FUNC_10():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_26)
for VAR_31 in VAR_25:
VAR_31 = path.strip()
yield FUNC_4(VAR_16=VAR_31) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_10()),
status="200 OK",
VAR_28=headers)
def FUNC_2(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_21 = None
if len(VAR_29) > 1:
VAR_21 = int(VAR_29[1])
if VAR_30 == 'r' or VAR_21 is None:
VAR_21 = random.randrange(10000)
VAR_24 = 0.1 # default VAR_20 = 0.1%
if len(VAR_29) > 2:
VAR_24 = float(VAR_29[2])
return VAR_21, round(VAR_24, 4)
@VAR_8.route('/id/<VAR_31:VAR_16>')
def FUNC_3(VAR_16):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_16=object_path),
"200 OK",
VAR_28=headers)
def FUNC_4(VAR_16):
VAR_31 = FUNC_7(VAR_16)
VAR_32 = {'_gt_label': VAR_6}
if VAR_6 in VAR_31:
return '<object id={} src={} VAR_32={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)),
quoteattr(url_for('.get_object_meta', VAR_17=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)))
@VAR_8.route('/VAR_32/<VAR_31:VAR_17>')
def FUNC_5(VAR_17=False):
VAR_33 = dict()
if VAR_17:
VAR_33['_gt_label'] = VAR_6
return jsonify(VAR_33)
def FUNC_6(VAR_16):
if VAR_2:
return 'file://' + FUNC_7(VAR_16)
return url_for('.get_object_src_http', VAR_18=VAR_16)
def FUNC_7(VAR_18):
return safe_join(VAR_4, VAR_18)
@VAR_8.route('/obj/<VAR_31:VAR_18>')
def FUNC_8(VAR_18):
VAR_31 = FUNC_7(VAR_18)
VAR_28 = Headers()
VAR_34 = send_file(VAR_31,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_34.headers.extend(VAR_28)
return VAR_34
def FUNC_9(VAR_19, VAR_20=0.05, VAR_21=42, VAR_22=0, VAR_23=1):
VAR_35 = ['STREAM', "{:.2f}".format(VAR_20), str(VAR_22), str(VAR_23), str(VAR_21)]
VAR_36 = '_'.join(VAR_35)
VAR_36 = os.path.join(VAR_19, VAR_36)
VAR_37 = os.path.join(VAR_19, 'POSITIVE')
VAR_38 = os.path.join(VAR_19, 'NEGATIVE')
VAR_39 = open(VAR_37).readline().rstrip()
VAR_40 = VAR_39.split('/')[-2] # Assuming all positives are in the same parent dir
VAR_9.info("Dir {} BR: {} Seed:{} FP{}".format(VAR_19, VAR_20, VAR_21, VAR_36))
sys.stdout.flush()
if not os.path.exists(VAR_36):
VAR_41 = read_file_list(VAR_37) # same across servers
VAR_42 = read_file_list(VAR_38) # different across servers
random.Random(VAR_21).shuffle(VAR_41)
random.Random(VAR_21).shuffle(VAR_42)
VAR_43 = len(VAR_41)
VAR_44 = int(VAR_22 * (1.0 / VAR_23) * VAR_43)
VAR_45 = int((VAR_22+1) * (1.0 / VAR_23) * VAR_43)
VAR_41 = positive_data[VAR_44:VAR_45]
VAR_43 = len(VAR_41)
VAR_46 = int(VAR_43 * (100./VAR_20 -1))
VAR_42 = negative_data[:VAR_46]
return write_data(VAR_36, [VAR_42, VAR_41], VAR_21), VAR_40
return read_file_list(VAR_36), VAR_40
|
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
VAR_0 = 'augment'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_31, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
VAR_6 = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
VAR_29/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<VAR_20>
/augment/root/STREAM/distributed/1of2/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_7):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = 'STREAM'
VAR_4 = VAR_7.dataroot
VAR_8 = Blueprint('augment_store', __name__)
VAR_9 = logging.getLogger(__name__)
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>' +
'/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
def FUNC_1(VAR_10, VAR_11=0, VAR_12=1, VAR_13=None, VAR_14=0, VAR_15=sys.maxsize):
global VAR_6
if VAR_10 == "0":
VAR_10 = VAR_3
VAR_10 = FUNC_7(VAR_10)
VAR_21 = None
VAR_24 = 0.
VAR_21, VAR_24 = FUNC_2(VAR_13)
VAR_25, VAR_6 = FUNC_9(VAR_10, VAR_24, VAR_21, VAR_11, VAR_12)
VAR_26 = len(VAR_25)
VAR_14 = start if VAR_14 > 0 else 0
VAR_27 = min(VAR_26, VAR_14 + VAR_15) if VAR_15 > 0 else VAR_26
VAR_25 = base_list[VAR_14:VAR_27]
VAR_26 = VAR_27 - VAR_14
def FUNC_10():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_26)
for VAR_31 in VAR_25:
VAR_31 = path.strip()
yield FUNC_4(VAR_16=VAR_31) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_10()),
status="200 OK",
VAR_28=headers)
def FUNC_2(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_21 = None
if len(VAR_29) > 1:
VAR_21 = int(VAR_29[1])
if VAR_30 == 'r' or VAR_21 is None:
VAR_21 = random.randrange(10000)
VAR_24 = 0.1 # default VAR_20 = 0.1%
if len(VAR_29) > 2:
VAR_24 = float(VAR_29[2])
return VAR_21, round(VAR_24, 4)
@VAR_8.route('/id/<VAR_31:VAR_16>')
def FUNC_3(VAR_16):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_16=object_path),
"200 OK",
VAR_28=headers)
def FUNC_4(VAR_16):
VAR_31 = FUNC_7(VAR_16)
VAR_32 = {'_gt_label': VAR_6}
if VAR_6 in VAR_31:
return '<object id={} src={} VAR_32={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)),
quoteattr(url_for('.get_object_meta', VAR_17=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)))
@VAR_8.route('/VAR_32/<VAR_31:VAR_17>')
def FUNC_5(VAR_17=False):
VAR_33 = dict()
if VAR_17:
VAR_33['_gt_label'] = VAR_6
return jsonify(VAR_33)
def FUNC_6(VAR_16):
if VAR_2:
return 'file://' + FUNC_7(VAR_16)
return url_for('.get_object_src_http', VAR_18=VAR_16)
def FUNC_7(VAR_18):
return safe_join(VAR_4, VAR_18)
@VAR_8.route('/obj/<VAR_31:VAR_18>')
def FUNC_8(VAR_18):
VAR_31 = FUNC_7(VAR_18)
VAR_28 = Headers()
VAR_34 = send_file(VAR_31,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_34.headers.extend(VAR_28)
return VAR_34
def FUNC_9(VAR_19, VAR_20=0.05, VAR_21=42, VAR_22=0, VAR_23=1):
VAR_35 = ['STREAM', "{:.2f}".format(VAR_20), str(VAR_22), str(VAR_23), str(VAR_21)]
VAR_36 = '_'.join(VAR_35)
VAR_36 = os.path.join(VAR_19, VAR_36)
VAR_37 = os.path.join(VAR_19, 'POSITIVE')
VAR_38 = os.path.join(VAR_19, 'NEGATIVE')
VAR_39 = open(VAR_37).readline().rstrip()
VAR_40 = VAR_39.split('/')[-2] # Assuming all positives are in the same parent dir
VAR_9.info("Dir {} BR: {} Seed:{} FP{}".format(VAR_19, VAR_20, VAR_21, VAR_36))
sys.stdout.flush()
if not os.path.exists(VAR_36):
VAR_41 = read_file_list(VAR_37) # same across servers
VAR_42 = read_file_list(VAR_38) # different across servers
random.Random(VAR_21).shuffle(VAR_41)
random.Random(VAR_21).shuffle(VAR_42)
VAR_43 = len(VAR_41)
VAR_44 = int(VAR_22 * (1.0 / VAR_23) * VAR_43)
VAR_45 = int((VAR_22+1) * (1.0 / VAR_23) * VAR_43)
VAR_41 = positive_data[VAR_44:VAR_45]
VAR_43 = len(VAR_41)
VAR_46 = int(VAR_43 * (100./VAR_20 -1))
VAR_42 = negative_data[:VAR_46]
return write_data(VAR_36, [VAR_42, VAR_41], VAR_21), VAR_40
return read_file_list(VAR_36), VAR_40
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
29,
30,
37,
42,
45,
46,
51,
52,
54,
56,
68,
73,
74,
75,
78,
83,
88,
90,
94,
96,
98,
102,
119,
126,
135,
139,
140,
146,
148,
152,
154,
157,
161,
163,
164,
172,
178,
186,
189,
203,
205,
104,
105,
106,
107,
174,
175,
176,
177
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
29,
30,
37,
42,
45,
46,
51,
52,
54,
56,
68,
73,
74,
75,
78,
83,
88,
90,
94,
96,
98,
102,
119,
126,
135,
139,
140,
146,
148,
152,
154,
157,
161,
163,
164,
172,
178,
186,
189,
203,
205,
104,
105,
106,
107,
174,
175,
176,
177
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class FederationConfig(Config):
section = "federation"
def read_config(self, config, **kwargs):
# FIXME: federation_domain_whitelist needs sytests
self.federation_domain_whitelist = None # type: Optional[dict]
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
if federation_domain_whitelist is not None:
# turn the whitelist into a hash for speed of lookup
self.federation_domain_whitelist = {}
for domain in federation_domain_whitelist:
self.federation_domain_whitelist[domain] = True
self.federation_ip_range_blacklist = config.get(
"federation_ip_range_blacklist", []
)
# Attempt to create an IPSet from the given ranges
try:
self.federation_ip_range_blacklist = IPSet(
self.federation_ip_range_blacklist
)
# Always blacklist 0.0.0.0, ::
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
federation_metrics_domains = config.get("federation_metrics_domains") or []
validate_config(
_METRICS_FOR_DOMAINS_SCHEMA,
federation_metrics_domains,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(federation_metrics_domains)
def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
## Federation ##
# Restrict federation to the following whitelist of domains.
# N.B. we recommend also firewalling your federation listener to limit
# inbound federation traffic as early as possible, rather than relying
# purely on this application-layer restriction. If not specified, the
# default is to whitelist everything.
#
#federation_domain_whitelist:
# - lon.example.com
# - nyc.example.com
# - syd.example.com
# Prevent federation requests from being sent to the following
# blacklist IP address CIDR ranges. If this option is not specified, or
# specified with an empty list, no ip range blacklist will be enforced.
#
# As of Synapse v1.4.0 this option also affects any outbound requests to identity
# servers provided by user input.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
federation_ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
# Report prometheus metrics on the age of PDUs being sent to and received from
# the following domains. This can be used to give an idea of "delay" on inbound
# and outbound federation, though be aware that any delay can be due to problems
# at either end or with the intermediate network.
#
# By default, no domains are monitored in this way.
#
#federation_metrics_domains:
# - matrix.org
# - example.com
"""
_METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}}
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class FederationConfig(Config):
section = "federation"
def read_config(self, config, **kwargs):
# FIXME: federation_domain_whitelist needs sytests
self.federation_domain_whitelist = None # type: Optional[dict]
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
if federation_domain_whitelist is not None:
# turn the whitelist into a hash for speed of lookup
self.federation_domain_whitelist = {}
for domain in federation_domain_whitelist:
self.federation_domain_whitelist[domain] = True
ip_range_blacklist = config.get("ip_range_blacklist", [])
# Attempt to create an IPSet from the given ranges
try:
self.ip_range_blacklist = IPSet(ip_range_blacklist)
except Exception as e:
raise ConfigError("Invalid range(s) provided in ip_range_blacklist: %s" % e)
# Always blacklist 0.0.0.0, ::
self.ip_range_blacklist.update(["0.0.0.0", "::"])
# The federation_ip_range_blacklist is used for backwards-compatibility
# and only applies to federation and identity servers. If it is not given,
# default to ip_range_blacklist.
federation_ip_range_blacklist = config.get(
"federation_ip_range_blacklist", ip_range_blacklist
)
try:
self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
# Always blacklist 0.0.0.0, ::
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
federation_metrics_domains = config.get("federation_metrics_domains") or []
validate_config(
_METRICS_FOR_DOMAINS_SCHEMA,
federation_metrics_domains,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(federation_metrics_domains)
def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
## Federation ##
# Restrict federation to the following whitelist of domains.
# N.B. we recommend also firewalling your federation listener to limit
# inbound federation traffic as early as possible, rather than relying
# purely on this application-layer restriction. If not specified, the
# default is to whitelist everything.
#
#federation_domain_whitelist:
# - lon.example.com
# - nyc.example.com
# - syd.example.com
# Prevent outgoing requests from being sent to the following blacklisted IP address
# CIDR ranges. If this option is not specified, or specified with an empty list,
# no IP range blacklist will be enforced.
#
# The blacklist applies to the outbound requests for federation, identity servers,
# push servers, and for checking key validitity for third-party invite events.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
# This option replaces federation_ip_range_blacklist in Synapse v1.24.0.
#
ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
# Report prometheus metrics on the age of PDUs being sent to and received from
# the following domains. This can be used to give an idea of "delay" on inbound
# and outbound federation, though be aware that any delay can be due to problems
# at either end or with the intermediate network.
#
# By default, no domains are monitored in this way.
#
#federation_metrics_domains:
# - matrix.org
# - example.com
"""
_METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}}
| open_redirect | {
"code": [
" self.federation_ip_range_blacklist = config.get(",
" \"federation_ip_range_blacklist\", []",
" )",
" self.federation_ip_range_blacklist = IPSet(",
" self.federation_ip_range_blacklist",
" )",
" self.federation_ip_range_blacklist.update([\"0.0.0.0\", \"::\"])",
" federation_ip_range_blacklist:"
],
"line_no": [
39,
40,
41,
45,
46,
47,
50,
89
]
} | {
"code": [
" ip_range_blacklist = config.get(\"ip_range_blacklist\", [])",
" except Exception as e:",
" raise ConfigError(\"Invalid range(s) provided in ip_range_blacklist: %s\" % e)",
" self.ip_range_blacklist.update([\"0.0.0.0\", \"::\"])",
" federation_ip_range_blacklist = config.get(",
" \"federation_ip_range_blacklist\", ip_range_blacklist",
" )",
" self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)",
" self.federation_ip_range_blacklist.update([\"0.0.0.0\", \"::\"])"
],
"line_no": [
39,
44,
45,
47,
52,
53,
54,
56,
62
]
} |
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class CLASS_0(Config):
VAR_1 = "federation"
def FUNC_0(self, VAR_2, **VAR_3):
self.federation_domain_whitelist = None # type: Optional[dict]
VAR_6 = VAR_2.get("federation_domain_whitelist", None)
if VAR_6 is not None:
self.federation_domain_whitelist = {}
for VAR_8 in VAR_6:
self.federation_domain_whitelist[VAR_8] = True
self.federation_ip_range_blacklist = VAR_2.get(
"federation_ip_range_blacklist", []
)
try:
self.federation_ip_range_blacklist = IPSet(
self.federation_ip_range_blacklist
)
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
VAR_7 = VAR_2.get("federation_metrics_domains") or []
validate_config(
VAR_0,
VAR_7,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(VAR_7)
def FUNC_1(self, VAR_4, VAR_5, **VAR_3):
return """\
federation_ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
"""
VAR_0 = {"type": "array", "items": {"type": "string"}}
|
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class CLASS_0(Config):
VAR_1 = "federation"
def FUNC_0(self, VAR_2, **VAR_3):
self.federation_domain_whitelist = None # type: Optional[dict]
VAR_6 = VAR_2.get("federation_domain_whitelist", None)
if VAR_6 is not None:
self.federation_domain_whitelist = {}
for VAR_10 in VAR_6:
self.federation_domain_whitelist[VAR_10] = True
VAR_7 = VAR_2.get("ip_range_blacklist", [])
try:
self.ip_range_blacklist = IPSet(VAR_7)
except Exception as e:
raise ConfigError("Invalid range(s) provided in VAR_7: %s" % e)
self.ip_range_blacklist.update(["0.0.0.0", "::"])
VAR_8 = VAR_2.get(
"federation_ip_range_blacklist", VAR_7
)
try:
self.federation_ip_range_blacklist = IPSet(VAR_8)
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in VAR_8: %s" % e
)
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
VAR_9 = VAR_2.get("federation_metrics_domains") or []
validate_config(
VAR_0,
VAR_9,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(VAR_9)
def FUNC_1(self, VAR_4, VAR_5, **VAR_3):
return """\
VAR_7:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
"""
VAR_0 = {"type": "array", "items": {"type": "string"}}
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
23,
26,
28,
31,
33,
35,
38,
42,
43,
48,
49,
55,
63,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
111,
112,
114
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
23,
26,
28,
31,
33,
35,
38,
40,
41,
46,
48,
49,
50,
51,
61,
63,
71,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
121,
122,
124
] |
0CWE-22
| from __future__ import absolute_import
import cgi
import email.utils
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
from pip._vendor import requests, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.utils import get_netrc_auth
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
# why we ignore the type on this import
from pip._vendor.six.moves import xmlrpc_client # type: ignore
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.urllib3.util import IS_PYOPENSSL
import pip
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.locations import write_delete_marker_file
from pip._internal.models.index import PyPI
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import libc_ver
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
backup_dir, consume, display_path, format_size, get_installed_version,
path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
splitext, unpack_file,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Optional, Tuple, Dict, IO, Text, Union
)
from optparse import Values
from pip._internal.models.link import Link
from pip._internal.utils.hashes import Hashes
from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
try:
import ssl # noqa
except ImportError:
ssl = None
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s",
str(exc))
keyring = None
# These are environment variables present when running under various
# CI systems. For each variable, some CI systems that use the variable
# are indicated. The collection was chosen so that for each of a number
# of popular systems, at least one of the environment variables is used.
# This list is used to provide some indication of and lower bound for
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
# For more background, see: https://github.com/pypa/pip/issues/5499
CI_ENVIRONMENT_VARIABLES = (
# Azure Pipelines
'BUILD_BUILDID',
# Jenkins
'BUILD_ID',
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
'CI',
# Explicit environment variable.
'PIP_IS_CI',
)
def looks_like_ci():
# type: () -> bool
"""
Return whether it looks like pip is running under CI.
"""
# We don't use the method of checking for a tty (e.g. using isatty())
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
# method doesn't provide definitive information in either direction.
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if libc:
distro_infos["libc"] = libc
if distro_infos:
data["distro"] = distro_infos
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
if HAS_TLS:
data["openssl_version"] = ssl.OPENSSL_VERSION
setuptools_version = get_installed_version("setuptools")
if setuptools_version is not None:
data["setuptools_version"] = setuptools_version
# Use None rather than False so as not to give the impression that
# pip knows it is not being run under CI. Rather, it is a null or
# inconclusive result. Also, we include some value rather than no
# value to make it easier to know that the check has been run.
data["ci"] = True if looks_like_ci() else None
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
if user_data is not None:
data["user_data"] = user_data
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
def _get_keyring_auth(url, username):
"""Return the tuple auth for a given url from keyring."""
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s",
str(exc))
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[Values]) -> None
self.prompting = prompting
self.index_urls = index_urls
self.passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
self._credentials_to_save = None # type: Tuple[str, str, str]
def _get_index_url(self, url):
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not self.index_urls:
return None
for u in self.index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
def _get_new_credentials(self, original_url, allow_netrc=True,
allow_keyring=True):
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(
original_url)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = self._get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = (_get_keyring_auth(index_url, username) or
_get_keyring_auth(netloc, username))
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return None, None
def _get_url_and_credentials(self, original_url):
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# If nothing cached, acquire new credentials without prompting
# the user (e.g. from netrc, keyring, or similar).
if username is None or password is None:
username, password = self._get_new_credentials(original_url)
if username is not None and password is not None:
# Store the username and password
self.passwords[netloc] = (username, password)
return url, username, password
def __call__(self, req):
# Get credentials for this request
url, username, password = self._get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
username = ask_input("User for %s: " % netloc)
if not username:
return None, None
auth = _get_keyring_auth(netloc, username)
if auth:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = self._prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
self._credentials_to_save = None
if username is not None and password is not None:
self.passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and self._should_save_password_to_keyring():
self._credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", self.warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if self._credentials_to_save:
req.register_hook("response", self.save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning('401 Error, Credentials not correct for %s',
resp.request.url)
def save_credentials(self, resp, **kwargs):
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = self._credentials_to_save
self._credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info('Saving credentials to keyring')
keyring.set_password(*creds)
except Exception:
logger.exception('Failed to save credentials')
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def __init__(self, *args, **kwargs):
super(SafeFileCache, self).__init__(*args, **kwargs)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
# If it is not owned by the user executing pip then we will disable
# the cache and log a warning.
if not check_path_owner(self.directory):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
self.directory,
)
# Set our directory to None to disable the Cache
self.directory = None
def get(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None # type: Optional[int]
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
insecure_hosts = kwargs.pop("insecure_hosts", [])
index_urls = kwargs.pop("index_urls", None)
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interrupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
# A 500 may indicate transient error in Amazon S3
# A 520 or 527 - may indicate transient error in CloudFlare
status_forcelist=[500, 503, 520, 527],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual eviction from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache, use_dir_lock=True),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# We want to use a non-validating adapter for any requests which are
# deemed insecure.
for host in insecure_hosts:
self.mount("https://{}/".format(host), insecure_adapter)
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
# type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode.
:param url: File path or url.
:param comes_from: Origin description of requirements.
:param session: Instance of pip.download.PipSession.
"""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from and
comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
return resp.url, resp.text
try:
with open(url, 'rb') as f:
content = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
# type: (Union[str, Text]) -> bool
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
# type: (str) -> str
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
if not netloc or netloc == 'localhost':
# According to RFC 8089, same as empty authority.
netloc = ''
elif sys.platform == 'win32':
# If we have a UNC path, prepend UNC share notation.
netloc = '\\\\' + netloc
else:
raise ValueError(
'non-local file URIs are not supported on this platform: %r'
% url
)
path = urllib_request.url2pathname(netloc + path)
return path
def is_archive_file(name):
# type: (str) -> bool
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
def unpack_vcs_link(link, location):
vcs_backend = _get_used_vcs_backend(link)
vcs_backend.unpack(location, url=link.url)
def _get_used_vcs_backend(link):
# type: (Link) -> Optional[VersionControl]
"""
Return a VersionControl object or None.
"""
for vcs_backend in vcs.backends:
if link.scheme in vcs_backend.schemes:
return vcs_backend
return None
def is_vcs_url(link):
# type: (Link) -> bool
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
# type: (Link) -> bool
return link.url.lower().startswith('file:')
def is_dir_url(link):
# type: (Link) -> bool
"""Return whether a file:// Link points to a directory.
``link`` must not have any other scheme but file://. Call is_file_url()
first.
"""
link_path = url_to_path(link.url_without_fragment)
return os.path.isdir(link_path)
def _progress_indicator(iterable, *args, **kwargs):
return iterable
def _download_url(
resp, # type: Response
link, # type: Link
content_file, # type: IO
hashes, # type: Hashes
progress_bar # type: str
):
# type: (...) -> None
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we don't
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
def written_chunks(chunks):
for chunk in chunks:
content_file.write(chunk)
yield chunk
progress_indicator = _progress_indicator
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
progress_indicator = DownloadProgressProvider(progress_bar,
max=total_length)
if total_length:
logger.info("Downloading %s (%s)", url, format_size(total_length))
else:
logger.info("Downloading %s", url)
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
logger.debug('Downloading from URL %s', link)
downloaded_chunks = written_chunks(
progress_indicator(
resp_read(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if hashes:
hashes.check_against_chunks(downloaded_chunks)
else:
consume(downloaded_chunks)
def _copy_file(filename, location, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(download_location), ('i', 'w', 'b', 'a'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
elif response == 'a':
sys.exit(-1)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as temp_dir:
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link,
session,
temp_dir.path,
hashes,
progress_bar)
# unpack the archive to the build dir location. even when only
# downloading archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
if not already_downloaded_path:
os.unlink(from_path)
def unpack_file_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> None
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir.
"""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if is_dir_url(link):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(link_path)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(
link, # type: Optional[Link]
location, # type: Optional[str]
download_dir=None, # type: Optional[str]
only_download=False, # type: bool
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir, hashes=hashes)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
hashes=hashes,
progress_bar=progress_bar
)
if only_download:
write_delete_marker_file(location)
def _download_http_url(
link, # type: Link
session, # type: PipSession
temp_dir, # type: str
hashes, # type: Hashes
progress_bar # type: str
):
# type: (...) -> Tuple[str, str]
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file, hashes, progress_bar)
return file_path, content_type
def _check_download_dir(link, download_dir, hashes):
# type: (Link, str, Hashes) -> Optional[str]
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
| from __future__ import absolute_import
import cgi
import email.utils
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
from pip._vendor import requests, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.utils import get_netrc_auth
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
# why we ignore the type on this import
from pip._vendor.six.moves import xmlrpc_client # type: ignore
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.urllib3.util import IS_PYOPENSSL
import pip
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.locations import write_delete_marker_file
from pip._internal.models.index import PyPI
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import libc_ver
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
backup_dir, consume, display_path, format_size, get_installed_version,
path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
splitext, unpack_file,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Optional, Tuple, Dict, IO, Text, Union
)
from optparse import Values
from pip._internal.models.link import Link
from pip._internal.utils.hashes import Hashes
from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
try:
import ssl # noqa
except ImportError:
ssl = None
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url',
'parse_content_disposition', 'sanitize_content_filename']
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s",
str(exc))
keyring = None
# These are environment variables present when running under various
# CI systems. For each variable, some CI systems that use the variable
# are indicated. The collection was chosen so that for each of a number
# of popular systems, at least one of the environment variables is used.
# This list is used to provide some indication of and lower bound for
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
# For more background, see: https://github.com/pypa/pip/issues/5499
CI_ENVIRONMENT_VARIABLES = (
# Azure Pipelines
'BUILD_BUILDID',
# Jenkins
'BUILD_ID',
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
'CI',
# Explicit environment variable.
'PIP_IS_CI',
)
def looks_like_ci():
# type: () -> bool
"""
Return whether it looks like pip is running under CI.
"""
# We don't use the method of checking for a tty (e.g. using isatty())
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
# method doesn't provide definitive information in either direction.
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if libc:
distro_infos["libc"] = libc
if distro_infos:
data["distro"] = distro_infos
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
if HAS_TLS:
data["openssl_version"] = ssl.OPENSSL_VERSION
setuptools_version = get_installed_version("setuptools")
if setuptools_version is not None:
data["setuptools_version"] = setuptools_version
# Use None rather than False so as not to give the impression that
# pip knows it is not being run under CI. Rather, it is a null or
# inconclusive result. Also, we include some value rather than no
# value to make it easier to know that the check has been run.
data["ci"] = True if looks_like_ci() else None
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
if user_data is not None:
data["user_data"] = user_data
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
def _get_keyring_auth(url, username):
"""Return the tuple auth for a given url from keyring."""
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s",
str(exc))
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[Values]) -> None
self.prompting = prompting
self.index_urls = index_urls
self.passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
self._credentials_to_save = None # type: Tuple[str, str, str]
def _get_index_url(self, url):
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not self.index_urls:
return None
for u in self.index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
def _get_new_credentials(self, original_url, allow_netrc=True,
allow_keyring=True):
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(
original_url)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = self._get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = (_get_keyring_auth(index_url, username) or
_get_keyring_auth(netloc, username))
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return None, None
def _get_url_and_credentials(self, original_url):
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# If nothing cached, acquire new credentials without prompting
# the user (e.g. from netrc, keyring, or similar).
if username is None or password is None:
username, password = self._get_new_credentials(original_url)
if username is not None and password is not None:
# Store the username and password
self.passwords[netloc] = (username, password)
return url, username, password
def __call__(self, req):
# Get credentials for this request
url, username, password = self._get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
username = ask_input("User for %s: " % netloc)
if not username:
return None, None
auth = _get_keyring_auth(netloc, username)
if auth:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = self._prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
self._credentials_to_save = None
if username is not None and password is not None:
self.passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and self._should_save_password_to_keyring():
self._credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", self.warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if self._credentials_to_save:
req.register_hook("response", self.save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning('401 Error, Credentials not correct for %s',
resp.request.url)
def save_credentials(self, resp, **kwargs):
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = self._credentials_to_save
self._credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info('Saving credentials to keyring')
keyring.set_password(*creds)
except Exception:
logger.exception('Failed to save credentials')
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def __init__(self, *args, **kwargs):
super(SafeFileCache, self).__init__(*args, **kwargs)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
# If it is not owned by the user executing pip then we will disable
# the cache and log a warning.
if not check_path_owner(self.directory):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
self.directory,
)
# Set our directory to None to disable the Cache
self.directory = None
def get(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None # type: Optional[int]
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
insecure_hosts = kwargs.pop("insecure_hosts", [])
index_urls = kwargs.pop("index_urls", None)
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interrupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
# A 500 may indicate transient error in Amazon S3
# A 520 or 527 - may indicate transient error in CloudFlare
status_forcelist=[500, 503, 520, 527],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual eviction from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache, use_dir_lock=True),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# We want to use a non-validating adapter for any requests which are
# deemed insecure.
for host in insecure_hosts:
self.mount("https://{}/".format(host), insecure_adapter)
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
# type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode.
:param url: File path or url.
:param comes_from: Origin description of requirements.
:param session: Instance of pip.download.PipSession.
"""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from and
comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
return resp.url, resp.text
try:
with open(url, 'rb') as f:
content = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
# type: (Union[str, Text]) -> bool
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
# type: (str) -> str
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
if not netloc or netloc == 'localhost':
# According to RFC 8089, same as empty authority.
netloc = ''
elif sys.platform == 'win32':
# If we have a UNC path, prepend UNC share notation.
netloc = '\\\\' + netloc
else:
raise ValueError(
'non-local file URIs are not supported on this platform: %r'
% url
)
path = urllib_request.url2pathname(netloc + path)
return path
def is_archive_file(name):
# type: (str) -> bool
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
def unpack_vcs_link(link, location):
vcs_backend = _get_used_vcs_backend(link)
vcs_backend.unpack(location, url=link.url)
def _get_used_vcs_backend(link):
# type: (Link) -> Optional[VersionControl]
"""
Return a VersionControl object or None.
"""
for vcs_backend in vcs.backends:
if link.scheme in vcs_backend.schemes:
return vcs_backend
return None
def is_vcs_url(link):
# type: (Link) -> bool
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
# type: (Link) -> bool
return link.url.lower().startswith('file:')
def is_dir_url(link):
# type: (Link) -> bool
"""Return whether a file:// Link points to a directory.
``link`` must not have any other scheme but file://. Call is_file_url()
first.
"""
link_path = url_to_path(link.url_without_fragment)
return os.path.isdir(link_path)
def _progress_indicator(iterable, *args, **kwargs):
return iterable
def _download_url(
resp, # type: Response
link, # type: Link
content_file, # type: IO
hashes, # type: Hashes
progress_bar # type: str
):
# type: (...) -> None
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we don't
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
def written_chunks(chunks):
for chunk in chunks:
content_file.write(chunk)
yield chunk
progress_indicator = _progress_indicator
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
progress_indicator = DownloadProgressProvider(progress_bar,
max=total_length)
if total_length:
logger.info("Downloading %s (%s)", url, format_size(total_length))
else:
logger.info("Downloading %s", url)
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
logger.debug('Downloading from URL %s', link)
downloaded_chunks = written_chunks(
progress_indicator(
resp_read(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if hashes:
hashes.check_against_chunks(downloaded_chunks)
else:
consume(downloaded_chunks)
def _copy_file(filename, location, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(download_location), ('i', 'w', 'b', 'a'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
elif response == 'a':
sys.exit(-1)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as temp_dir:
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link,
session,
temp_dir.path,
hashes,
progress_bar)
# unpack the archive to the build dir location. even when only
# downloading archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
if not already_downloaded_path:
os.unlink(from_path)
def unpack_file_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> None
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir.
"""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if is_dir_url(link):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(link_path)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(
link, # type: Optional[Link]
location, # type: Optional[str]
download_dir=None, # type: Optional[str]
only_download=False, # type: bool
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir, hashes=hashes)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
hashes=hashes,
progress_bar=progress_bar
)
if only_download:
write_delete_marker_file(location)
def sanitize_content_filename(filename):
# type: (str) -> str
"""
Sanitize the "filename" value from a Content-Disposition header.
"""
return os.path.basename(filename)
def parse_content_disposition(content_disposition, default_filename):
# type: (str, str) -> str
"""
Parse the "filename" value from a Content-Disposition header, and
return the default filename if the result is empty.
"""
_type, params = cgi.parse_header(content_disposition)
filename = params.get('filename')
if filename:
# We need to sanitize the filename to prevent directory traversal
# in case the filename contains ".." path parts.
filename = sanitize_content_filename(filename)
return filename or default_filename
def _download_http_url(
link, # type: Link
session, # type: PipSession
temp_dir, # type: str
hashes, # type: Hashes
progress_bar # type: str
):
# type: (...) -> Tuple[str, str]
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
filename = parse_content_disposition(content_disposition, filename)
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file, hashes, progress_bar)
return file_path, content_type
def _check_download_dir(link, download_dir, hashes):
# type: (Link, str, Hashes) -> Optional[str]
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
| path_disclosure | {
"code": [
" 'unpack_http_url', 'unpack_url']",
" type, params = cgi.parse_header(content_disposition)",
" filename = params.get('filename') or filename"
],
"line_no": [
69,
1100,
1103
]
} | {
"code": [
" 'unpack_http_url', 'unpack_url',",
"def sanitize_content_filename(filename):",
" \"\"\"",
" Sanitize the \"filename\" value from a Content-Disposition header.",
" \"\"\"",
" return os.path.basename(filename)",
"def parse_content_disposition(content_disposition, default_filename):",
" \"\"\"",
" Parse the \"filename\" value from a Content-Disposition header, and",
" \"\"\"",
" filename = params.get('filename')",
" filename = sanitize_content_filename(filename)",
" filename = parse_content_disposition(content_disposition, filename)"
],
"line_no": [
69,
1054,
1056,
1057,
1058,
1059,
1062,
1064,
1065,
1067,
1069,
1073,
1124
]
} | from __future__ import absolute_import
import cgi
import email.utils
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
from pip._vendor import .requests, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.utils import .get_netrc_auth
from pip._vendor.six.moves import xmlrpc_client # VAR_90: ignore
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import .request as urllib_request
from pip._vendor.urllib3.util import IS_PYOPENSSL
import pip
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.locations import write_delete_marker_file
from pip._internal.models.index import PyPI
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import .libc_ver
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
backup_dir, consume, display_path, format_size, get_installed_version,
path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
splitext, unpack_file,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Optional, Tuple, Dict, IO, Text, Union
)
from optparse import Values
from pip._internal.models.link import Link
from pip._internal.utils.hashes import Hashes
from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
try:
import .ssl # noqa
except ImportError:
VAR_68 = None
VAR_0 = (VAR_68 is not None) or IS_PYOPENSSL
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
VAR_1 = logging.getLogger(__name__)
try:
import .keyring # noqa
except ImportError:
VAR_69 = None
except Exception as exc:
VAR_1.warning("Keyring is skipped due to an exception: %s",
str(exc))
VAR_69 = None
VAR_2 = (
'BUILD_BUILDID',
'BUILD_ID',
'CI',
'PIP_IS_CI',
)
def FUNC_0():
return any(VAR_9 in os.environ for VAR_9 in VAR_2)
def FUNC_1():
VAR_23 = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if VAR_23["implementation"]["name"] == 'CPython':
VAR_23["implementation"]["version"] = platform.python_version()
elif VAR_23["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
VAR_103 = sys.pypy_version_info[:3]
else:
VAR_103 = sys.pypy_version_info
VAR_23["implementation"]["version"] = ".".join(
[str(x) for x in VAR_103]
)
elif VAR_23["implementation"]["name"] == 'Jython':
VAR_23["implementation"]["version"] = platform.python_version()
elif VAR_23["implementation"]["name"] == 'IronPython':
VAR_23["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
VAR_70 = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
VAR_71 = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if VAR_71:
VAR_70["libc"] = VAR_71
if VAR_70:
VAR_23["distro"] = VAR_70
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
VAR_23["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
VAR_23.setdefault("system", {})["name"] = platform.system()
if platform.release():
VAR_23.setdefault("system", {})["release"] = platform.release()
if platform.machine():
VAR_23["cpu"] = platform.machine()
if VAR_0:
VAR_23["openssl_version"] = VAR_68.OPENSSL_VERSION
VAR_24 = get_installed_version("setuptools")
if VAR_24 is not None:
VAR_23["setuptools_version"] = VAR_24
VAR_23["ci"] = True if FUNC_0() else None
VAR_25 = os.environ.get("PIP_USER_AGENT_USER_DATA")
if VAR_25 is not None:
VAR_23["user_data"] = VAR_25
return "{VAR_23[installer][VAR_9]}/{VAR_23[installer][version]} {json}".format(
VAR_23=data,
json=json.dumps(VAR_23, separators=(",", ":"), sort_keys=True),
)
def FUNC_2(VAR_3, VAR_4):
if not VAR_3 or not VAR_69:
return None
try:
try:
VAR_92 = VAR_69.get_credential
except AttributeError:
pass
else:
VAR_1.debug("Getting credentials from VAR_69 for %s", VAR_3)
VAR_93 = VAR_92(VAR_3, VAR_4)
if VAR_93 is not None:
return VAR_93.username, VAR_93.password
return None
if VAR_4:
VAR_1.debug("Getting VAR_73 from VAR_69 for %s", VAR_3)
VAR_73 = VAR_69.get_password(VAR_3, VAR_4)
if VAR_73:
return VAR_4, VAR_73
except Exception as exc:
VAR_1.warning("Keyring is skipped due to an exception: %s",
str(exc))
class CLASS_0(AuthBase):
def __init__(self, VAR_26=True, VAR_27=None):
self.prompting = VAR_26
self.index_urls = VAR_27
self.passwords = {} # VAR_90: Dict[str, AuthInfo]
self._credentials_to_save = None # VAR_90: Tuple[str, str, str]
def FUNC_20(self, VAR_3):
if not VAR_3 or not self.index_urls:
return None
for u in self.index_urls:
VAR_94 = remove_auth_from_url(u).rstrip("/") + "/"
if VAR_3.startswith(VAR_94):
return u
def FUNC_21(self, VAR_28, VAR_29=True,
VAR_30=True):
VAR_3, VAR_32, VAR_72 = split_auth_netloc_from_url(
VAR_28)
VAR_4, VAR_73 = VAR_72
if VAR_4 is not None and VAR_73 is not None:
VAR_1.debug("Found credentials in VAR_3 for %s", VAR_32)
return VAR_72
VAR_58 = self._get_index_url(VAR_3)
if VAR_58:
VAR_95 = split_auth_netloc_from_url(VAR_58)
if VAR_95:
VAR_58, VAR_43, VAR_104 = VAR_95
VAR_1.debug("Found index VAR_3 %s", VAR_58)
if VAR_58 and VAR_104[0] is not None:
VAR_4, VAR_73 = VAR_104
if VAR_4 is not None and VAR_73 is not None:
VAR_1.debug("Found credentials in index VAR_3 for %s", VAR_32)
return VAR_104
if VAR_29:
VAR_96 = get_netrc_auth(VAR_28)
if VAR_96:
VAR_1.debug("Found credentials in netrc for %s", VAR_32)
return VAR_96
if VAR_30:
VAR_97 = (FUNC_2(VAR_58, VAR_4) or
FUNC_2(VAR_32, VAR_4))
if VAR_97:
VAR_1.debug("Found credentials in VAR_69 for %s", VAR_32)
return VAR_97
return None, None
def FUNC_22(self, VAR_28):
VAR_3, VAR_32, VAR_43 = split_auth_netloc_from_url(VAR_28)
VAR_4, VAR_73 = self.passwords.get(VAR_32, (None, None))
if VAR_4 is None or VAR_73 is None:
VAR_4, VAR_73 = self._get_new_credentials(VAR_28)
if VAR_4 is not None and VAR_73 is not None:
self.passwords[VAR_32] = (VAR_4, VAR_73)
return VAR_3, VAR_4, VAR_73
def __call__(self, VAR_31):
VAR_3, VAR_4, VAR_73 = self._get_url_and_credentials(VAR_31.url)
VAR_31.url = VAR_3
if VAR_4 is not None and VAR_73 is not None:
VAR_31 = HTTPBasicAuth(VAR_4, VAR_73)(VAR_31)
VAR_31.register_hook("response", self.handle_401)
return VAR_31
def FUNC_23(self, VAR_32):
VAR_4 = ask_input("User for %s: " % VAR_32)
if not VAR_4:
return None, None
VAR_74 = FUNC_2(VAR_32, VAR_4)
if VAR_74:
return VAR_74[0], VAR_74[1], False
VAR_73 = ask_password("Password: ")
return VAR_4, VAR_73, True
def FUNC_24(self):
if not VAR_69:
return False
return ask("Save credentials to VAR_69 [y/N]: ", ["y", "n"]) == "y"
def FUNC_25(self, VAR_15, **VAR_14):
if VAR_15.status_code != 401:
return VAR_15
if not self.prompting:
return VAR_15
VAR_75 = urllib_parse.urlparse(VAR_15.url)
VAR_4, VAR_73, VAR_76 = self._prompt_for_password(VAR_75.netloc)
self._credentials_to_save = None
if VAR_4 is not None and VAR_73 is not None:
self.passwords[VAR_75.netloc] = (VAR_4, VAR_73)
if VAR_76 and self._should_save_password_to_keyring():
self._credentials_to_save = (VAR_75.netloc, VAR_4, VAR_73)
VAR_15.content
VAR_15.raw.release_conn()
VAR_31 = HTTPBasicAuth(VAR_4 or "", VAR_73 or "")(VAR_15.request)
VAR_31.register_hook("response", self.warn_on_401)
if self._credentials_to_save:
VAR_31.register_hook("response", self.save_credentials)
VAR_77 = VAR_15.connection.send(VAR_31, **VAR_14)
VAR_77.history.append(VAR_15)
return VAR_77
def FUNC_26(self, VAR_15, **VAR_14):
if VAR_15.status_code == 401:
VAR_1.warning('401 Error, Credentials not correct for %s',
VAR_15.request.url)
def FUNC_27(self, VAR_15, **VAR_14):
assert VAR_69 is not None, "should never reach here without keyring"
if not VAR_69:
return
VAR_78 = self._credentials_to_save
self._credentials_to_save = None
if VAR_78 and VAR_15.status_code < 400:
try:
VAR_1.info('Saving credentials to keyring')
VAR_69.set_password(*VAR_78)
except Exception:
VAR_1.exception('Failed to VAR_76 credentials')
class CLASS_1(BaseAdapter):
def FUNC_28(self, VAR_33, VAR_34=None, VAR_35=None, VAR_36=None, VAR_37=None,
VAR_38=None):
VAR_79 = FUNC_5(VAR_33.url)
VAR_15 = Response()
VAR_15.status_code = 200
VAR_15.url = VAR_33.url
try:
VAR_98 = os.stat(VAR_79)
except OSError as exc:
VAR_15.status_code = 404
VAR_15.raw = exc
else:
VAR_99 = email.utils.formatdate(VAR_98.st_mtime, usegmt=True)
VAR_57 = mimetypes.guess_type(VAR_79)[0] or "text/plain"
VAR_15.headers = CaseInsensitiveDict({
"Content-Type": VAR_57,
"Content-Length": VAR_98.st_size,
"Last-Modified": VAR_99,
})
VAR_15.raw = open(VAR_79, "rb")
VAR_15.close = VAR_15.raw.close
return VAR_15
def FUNC_29(self):
pass
class CLASS_2(FileCache):
def __init__(self, *VAR_13, **VAR_14):
super(CLASS_2, self).__init__(*VAR_13, **VAR_14)
if not check_path_owner(self.directory):
VAR_1.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the VAR_81 has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
self.directory,
)
self.directory = None
def FUNC_30(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).get(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
def FUNC_31(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).set(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
def FUNC_32(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).delete(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
class CLASS_3(HTTPAdapter):
def FUNC_33(self, VAR_39, VAR_3, VAR_36, VAR_37):
VAR_39.cert_reqs = 'CERT_NONE'
VAR_39.ca_certs = None
class CLASS_4(requests.Session):
VAR_35 = None # VAR_90: Optional[int]
def __init__(self, *VAR_13, **VAR_14):
VAR_80 = VAR_14.pop("retries", 0)
VAR_81 = VAR_14.pop("cache", None)
VAR_82 = VAR_14.pop("insecure_hosts", [])
VAR_27 = VAR_14.pop("index_urls", None)
super(CLASS_4, self).__init__(*VAR_13, **VAR_14)
self.headers["User-Agent"] = FUNC_1()
self.auth = CLASS_0(VAR_27=index_urls)
VAR_80 = urllib3.Retry(
total=VAR_80,
status_forcelist=[500, 503, 520, 527],
backoff_factor=0.25,
)
if VAR_81:
VAR_100 = CacheControlAdapter(
VAR_81=CLASS_2(VAR_81, use_dir_lock=True),
max_retries=VAR_80,
)
else:
VAR_100 = HTTPAdapter(max_retries=VAR_80)
VAR_83 = CLASS_3(max_retries=VAR_80)
self.mount("https://", VAR_100)
self.mount("http://", VAR_83)
self.mount("file://", CLASS_1())
for VAR_60 in VAR_82:
self.mount("https://{}/".format(VAR_60), VAR_83)
def VAR_33(self, VAR_40, VAR_3, *VAR_13, **VAR_14):
VAR_14.setdefault("timeout", self.timeout)
return super(CLASS_4, self).request(VAR_40, VAR_3, *VAR_13, **VAR_14)
def FUNC_3(VAR_3, VAR_5=None, VAR_6=None):
if VAR_6 is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
VAR_41 = VAR_7.search(VAR_3)
if VAR_41:
VAR_42 = VAR_41.group(1).lower()
if (VAR_42 == 'file' and VAR_5 and
VAR_5.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (VAR_5, VAR_3))
if VAR_42 == 'file':
VAR_44 = VAR_3.split(':', 1)[1]
VAR_44 = VAR_44.replace('\\', '/')
VAR_41 = VAR_8.match(VAR_44)
if VAR_41:
VAR_44 = VAR_41.group(1) + ':' + VAR_44.split('|', 1)[1]
VAR_44 = urllib_parse.unquote(VAR_44)
if VAR_44.startswith('/'):
VAR_44 = '/' + VAR_44.lstrip('/')
VAR_3 = VAR_44
else:
VAR_15 = VAR_6.get(VAR_3)
VAR_15.raise_for_status()
return VAR_15.url, VAR_15.text
try:
with open(VAR_3, 'rb') as f:
VAR_101 = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return VAR_3, VAR_101
VAR_7 = re.compile(r'^(http|https|file):', re.I)
VAR_8 = re.compile(r'/*([a-z])\|', re.I)
def FUNC_4(VAR_9):
if ':' not in VAR_9:
return False
VAR_42 = VAR_9.split(':', 1)[0].lower()
return VAR_42 in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def FUNC_5(VAR_3):
assert VAR_3.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % VAR_3)
VAR_43, VAR_32, VAR_44, VAR_43, VAR_43 = urllib_parse.urlsplit(VAR_3)
if not VAR_32 or VAR_32 == 'localhost':
VAR_32 = ''
elif sys.platform == 'win32':
VAR_32 = '\\\\' + VAR_32
else:
raise ValueError(
'non-local file URIs are not supported on this platform: %r'
% VAR_3
)
VAR_44 = urllib_request.url2pathname(VAR_32 + VAR_44)
return VAR_44
def FUNC_6(VAR_9):
VAR_45 = splitext(VAR_9)[1].lower()
if VAR_45 in ARCHIVE_EXTENSIONS:
return True
return False
def FUNC_7(VAR_10, VAR_11):
VAR_46 = FUNC_8(VAR_10)
VAR_46.unpack(VAR_11, VAR_3=VAR_10.url)
def FUNC_8(VAR_10):
for VAR_46 in vcs.backends:
if VAR_10.scheme in VAR_46.schemes:
return VAR_46
return None
def FUNC_9(VAR_10):
return bool(FUNC_8(VAR_10))
def FUNC_10(VAR_10):
return VAR_10.url.lower().startswith('file:')
def FUNC_11(VAR_10):
VAR_47 = FUNC_5(VAR_10.url_without_fragment)
return os.path.isdir(VAR_47)
def FUNC_12(VAR_12, *VAR_13, **VAR_14):
return VAR_12
def FUNC_13(
VAR_15, # VAR_90: Response
VAR_10, # VAR_90: Link
VAR_16, # VAR_90: IO
VAR_17, # VAR_90: Hashes
VAR_18 # VAR_90: str
):
try:
VAR_84 = int(VAR_15.headers['content-length'])
except (ValueError, KeyError, TypeError):
VAR_84 = 0
VAR_48 = getattr(VAR_15, "from_cache", False)
if VAR_1.getEffectiveLevel() > logging.INFO:
VAR_85 = False
elif VAR_48:
VAR_85 = False
elif VAR_84 > (40 * 1000):
VAR_85 = True
elif not VAR_84:
VAR_85 = True
else:
VAR_85 = False
VAR_49 = VAR_10.show_url
def FUNC_35(VAR_50):
try:
for VAR_105 in VAR_15.raw.stream(
VAR_50,
decode_content=False):
yield VAR_105
except AttributeError:
while True:
VAR_105 = VAR_15.raw.read(VAR_50)
if not VAR_105:
break
yield VAR_105
def FUNC_36(VAR_51):
for VAR_105 in VAR_51:
VAR_16.write(VAR_105)
yield VAR_105
VAR_52 = FUNC_12
if VAR_10.netloc == PyPI.netloc:
VAR_3 = VAR_49
else:
VAR_3 = VAR_10.url_without_fragment
if VAR_85: # We don't show progress on cached responses
VAR_52 = DownloadProgressProvider(VAR_18,
max=VAR_84)
if VAR_84:
VAR_1.info("Downloading %s (%s)", VAR_3, format_size(VAR_84))
else:
VAR_1.info("Downloading %s", VAR_3)
elif VAR_48:
VAR_1.info("Using cached %s", VAR_3)
else:
VAR_1.info("Downloading %s", VAR_3)
VAR_1.debug('Downloading from URL %s', VAR_10)
VAR_53 = FUNC_36(
VAR_52(
FUNC_35(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if VAR_17:
hashes.check_against_chunks(VAR_53)
else:
consume(VAR_53)
def FUNC_14(VAR_19, VAR_11, VAR_10):
VAR_54 = True
VAR_55 = os.path.join(VAR_11, VAR_10.filename)
if os.path.exists(VAR_55):
VAR_86 = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(VAR_55), ('i', 'w', 'b', 'a'))
if VAR_86 == 'i':
VAR_54 = False
elif VAR_86 == 'w':
VAR_1.warning('Deleting %s', display_path(VAR_55))
os.remove(VAR_55)
elif VAR_86 == 'b':
VAR_106 = backup_dir(VAR_55)
VAR_1.warning(
'Backing up %s to %s',
display_path(VAR_55),
display_path(VAR_106),
)
shutil.move(VAR_55, VAR_106)
elif VAR_86 == 'a':
sys.exit(-1)
if VAR_54:
shutil.copy(VAR_19, VAR_55)
VAR_1.info('Saved %s', display_path(VAR_55))
def FUNC_15(
VAR_10, # VAR_90: Link
VAR_11, # VAR_90: str
VAR_20=None, # VAR_90: Optional[str]
VAR_6=None, # VAR_90: Optional[CLASS_4]
VAR_17=None, # VAR_90: Optional[Hashes]
VAR_18="on" # VAR_90: str
):
if VAR_6 is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as VAR_22:
VAR_56 = None
if VAR_20:
VAR_56 = FUNC_19(VAR_10,
VAR_20,
VAR_17)
if VAR_56:
VAR_87 = VAR_56
VAR_57 = mimetypes.guess_type(VAR_87)[0]
else:
VAR_87, VAR_57 = FUNC_18(VAR_10,
VAR_6,
VAR_22.path,
VAR_17,
VAR_18)
unpack_file(VAR_87, VAR_11, VAR_57, VAR_10)
if VAR_20 and not VAR_56:
FUNC_14(VAR_87, VAR_20, VAR_10)
if not VAR_56:
os.unlink(VAR_87)
def FUNC_16(
VAR_10, # VAR_90: Link
VAR_11, # VAR_90: str
VAR_20=None, # VAR_90: Optional[str]
VAR_17=None # VAR_90: Optional[Hashes]
):
VAR_47 = FUNC_5(VAR_10.url_without_fragment)
if FUNC_11(VAR_10):
if os.path.isdir(VAR_11):
rmtree(VAR_11)
shutil.copytree(VAR_47, VAR_11, symlinks=True)
if VAR_20:
VAR_1.info('Link is a directory, ignoring download_dir')
return
if VAR_17:
hashes.check_against_path(VAR_47)
VAR_56 = None
if VAR_20:
VAR_56 = FUNC_19(VAR_10,
VAR_20,
VAR_17)
if VAR_56:
VAR_87 = VAR_56
else:
VAR_87 = VAR_47
VAR_57 = mimetypes.guess_type(VAR_87)[0]
unpack_file(VAR_87, VAR_11, VAR_57, VAR_10)
if VAR_20 and not VAR_56:
FUNC_14(VAR_87, VAR_20, VAR_10)
class CLASS_5(xmlrpc_client.Transport):
def __init__(self, VAR_58, VAR_6, VAR_59=False):
xmlrpc_client.Transport.__init__(self, VAR_59)
VAR_88 = urllib_parse.urlparse(VAR_58)
self._scheme = VAR_88.scheme
self._session = VAR_6
def VAR_33(self, VAR_60, VAR_61, VAR_62, VAR_63=False):
VAR_89 = (self._scheme, VAR_60, VAR_61, None, None, None)
VAR_3 = urllib_parse.urlunparse(VAR_89)
try:
VAR_102 = {'Content-Type': 'text/xml'}
VAR_86 = self._session.post(VAR_3, VAR_23=VAR_62,
VAR_102=headers, VAR_34=True)
VAR_86.raise_for_status()
self.verbose = VAR_63
return self.parse_response(VAR_86.raw)
except requests.HTTPError as exc:
VAR_1.critical(
"HTTP error %s while getting %s",
exc.response.status_code, VAR_3,
)
raise
def FUNC_17(
VAR_10, # VAR_90: Optional[Link]
VAR_11, # VAR_90: Optional[str]
VAR_20=None, # VAR_90: Optional[str]
VAR_21=False, # VAR_90: bool
VAR_6=None, # VAR_90: Optional[CLASS_4]
VAR_17=None, # VAR_90: Optional[Hashes]
VAR_18="on" # VAR_90: str
):
if FUNC_9(VAR_10):
FUNC_7(VAR_10, VAR_11)
elif FUNC_10(VAR_10):
FUNC_16(VAR_10, VAR_11, VAR_20, VAR_17=hashes)
else:
if VAR_6 is None:
VAR_6 = CLASS_4()
FUNC_15(
VAR_10,
VAR_11,
VAR_20,
VAR_6,
VAR_17=hashes,
VAR_18=progress_bar
)
if VAR_21:
write_delete_marker_file(VAR_11)
def FUNC_18(
VAR_10, # VAR_90: Link
VAR_6, # VAR_90: CLASS_4
VAR_22, # VAR_90: str
VAR_17, # VAR_90: Hashes
VAR_18 # VAR_90: str
):
VAR_64 = VAR_10.url.split('#', 1)[0]
try:
VAR_15 = VAR_6.get(
VAR_64,
VAR_102={"Accept-Encoding": "identity"},
VAR_34=True,
)
VAR_15.raise_for_status()
except requests.HTTPError as exc:
VAR_1.critical(
"HTTP error %s while getting %s", exc.response.status_code, VAR_10,
)
raise
VAR_57 = VAR_15.headers.get('content-type', '')
VAR_19 = VAR_10.filename # fallback
VAR_65 = VAR_15.headers.get('content-disposition')
if VAR_65:
VAR_90, VAR_91 = cgi.parse_header(VAR_65)
VAR_19 = VAR_91.get('filename') or VAR_19
VAR_45 = splitext(VAR_19)[1]
if not VAR_45:
VAR_45 = mimetypes.guess_extension(VAR_57)
if VAR_45:
VAR_19 += VAR_45
if not VAR_45 and VAR_10.url != VAR_15.url:
VAR_45 = os.path.splitext(VAR_15.url)[1]
if VAR_45:
VAR_19 += VAR_45
VAR_66 = os.path.join(VAR_22, VAR_19)
with open(VAR_66, 'wb') as VAR_16:
FUNC_13(VAR_15, VAR_10, VAR_16, VAR_17, VAR_18)
return VAR_66, VAR_57
def FUNC_19(VAR_10, VAR_20, VAR_17):
VAR_67 = os.path.join(VAR_20, VAR_10.filename)
if os.path.exists(VAR_67):
VAR_1.info('File was already downloaded %s', VAR_67)
if VAR_17:
try:
VAR_17.check_against_path(VAR_67)
except HashMismatch:
VAR_1.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
VAR_67
)
os.unlink(VAR_67)
return None
return VAR_67
return None
| from __future__ import absolute_import
import cgi
import email.utils
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
from pip._vendor import .requests, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.utils import .get_netrc_auth
from pip._vendor.six.moves import xmlrpc_client # type: ignore
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import .request as urllib_request
from pip._vendor.urllib3.util import IS_PYOPENSSL
import pip
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.locations import write_delete_marker_file
from pip._internal.models.index import PyPI
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import .libc_ver
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
backup_dir, consume, display_path, format_size, get_installed_version,
path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
splitext, unpack_file,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Optional, Tuple, Dict, IO, Text, Union
)
from optparse import Values
from pip._internal.models.link import Link
from pip._internal.utils.hashes import Hashes
from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
try:
import .ssl # noqa
except ImportError:
VAR_71 = None
VAR_0 = (VAR_71 is not None) or IS_PYOPENSSL
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url',
'parse_content_disposition', 'sanitize_content_filename']
VAR_1 = logging.getLogger(__name__)
try:
import .keyring # noqa
except ImportError:
VAR_72 = None
except Exception as exc:
VAR_1.warning("Keyring is skipped due to an exception: %s",
str(exc))
VAR_72 = None
VAR_2 = (
'BUILD_BUILDID',
'BUILD_ID',
'CI',
'PIP_IS_CI',
)
def FUNC_0():
return any(VAR_9 in os.environ for VAR_9 in VAR_2)
def FUNC_1():
VAR_25 = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if VAR_25["implementation"]["name"] == 'CPython':
VAR_25["implementation"]["version"] = platform.python_version()
elif VAR_25["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
VAR_104 = sys.pypy_version_info[:3]
else:
VAR_104 = sys.pypy_version_info
VAR_25["implementation"]["version"] = ".".join(
[str(x) for x in VAR_104]
)
elif VAR_25["implementation"]["name"] == 'Jython':
VAR_25["implementation"]["version"] = platform.python_version()
elif VAR_25["implementation"]["name"] == 'IronPython':
VAR_25["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
VAR_73 = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
VAR_74 = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if VAR_74:
VAR_73["libc"] = VAR_74
if VAR_73:
VAR_25["distro"] = VAR_73
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
VAR_25["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
VAR_25.setdefault("system", {})["name"] = platform.system()
if platform.release():
VAR_25.setdefault("system", {})["release"] = platform.release()
if platform.machine():
VAR_25["cpu"] = platform.machine()
if VAR_0:
VAR_25["openssl_version"] = VAR_71.OPENSSL_VERSION
VAR_26 = get_installed_version("setuptools")
if VAR_26 is not None:
VAR_25["setuptools_version"] = VAR_26
VAR_25["ci"] = True if FUNC_0() else None
VAR_27 = os.environ.get("PIP_USER_AGENT_USER_DATA")
if VAR_27 is not None:
VAR_25["user_data"] = VAR_27
return "{VAR_25[installer][VAR_9]}/{VAR_25[installer][version]} {json}".format(
VAR_25=data,
json=json.dumps(VAR_25, separators=(",", ":"), sort_keys=True),
)
def FUNC_2(VAR_3, VAR_4):
if not VAR_3 or not VAR_72:
return None
try:
try:
VAR_93 = VAR_72.get_credential
except AttributeError:
pass
else:
VAR_1.debug("Getting credentials from VAR_72 for %s", VAR_3)
VAR_94 = VAR_93(VAR_3, VAR_4)
if VAR_94 is not None:
return VAR_94.username, VAR_94.password
return None
if VAR_4:
VAR_1.debug("Getting VAR_76 from VAR_72 for %s", VAR_3)
VAR_76 = VAR_72.get_password(VAR_3, VAR_4)
if VAR_76:
return VAR_4, VAR_76
except Exception as exc:
VAR_1.warning("Keyring is skipped due to an exception: %s",
str(exc))
class CLASS_0(AuthBase):
def __init__(self, VAR_28=True, VAR_29=None):
self.prompting = VAR_28
self.index_urls = VAR_29
self.passwords = {} # type: Dict[str, AuthInfo]
self._credentials_to_save = None # type: Tuple[str, str, str]
def FUNC_22(self, VAR_3):
if not VAR_3 or not self.index_urls:
return None
for u in self.index_urls:
VAR_95 = remove_auth_from_url(u).rstrip("/") + "/"
if VAR_3.startswith(VAR_95):
return u
def FUNC_23(self, VAR_30, VAR_31=True,
VAR_32=True):
VAR_3, VAR_34, VAR_75 = split_auth_netloc_from_url(
VAR_30)
VAR_4, VAR_76 = VAR_75
if VAR_4 is not None and VAR_76 is not None:
VAR_1.debug("Found credentials in VAR_3 for %s", VAR_34)
return VAR_75
VAR_60 = self._get_index_url(VAR_3)
if VAR_60:
VAR_96 = split_auth_netloc_from_url(VAR_60)
if VAR_96:
VAR_60, VAR_45, VAR_105 = VAR_96
VAR_1.debug("Found index VAR_3 %s", VAR_60)
if VAR_60 and VAR_105[0] is not None:
VAR_4, VAR_76 = VAR_105
if VAR_4 is not None and VAR_76 is not None:
VAR_1.debug("Found credentials in index VAR_3 for %s", VAR_34)
return VAR_105
if VAR_31:
VAR_97 = get_netrc_auth(VAR_30)
if VAR_97:
VAR_1.debug("Found credentials in netrc for %s", VAR_34)
return VAR_97
if VAR_32:
VAR_98 = (FUNC_2(VAR_60, VAR_4) or
FUNC_2(VAR_34, VAR_4))
if VAR_98:
VAR_1.debug("Found credentials in VAR_72 for %s", VAR_34)
return VAR_98
return None, None
def FUNC_24(self, VAR_30):
VAR_3, VAR_34, VAR_45 = split_auth_netloc_from_url(VAR_30)
VAR_4, VAR_76 = self.passwords.get(VAR_34, (None, None))
if VAR_4 is None or VAR_76 is None:
VAR_4, VAR_76 = self._get_new_credentials(VAR_30)
if VAR_4 is not None and VAR_76 is not None:
self.passwords[VAR_34] = (VAR_4, VAR_76)
return VAR_3, VAR_4, VAR_76
def __call__(self, VAR_33):
VAR_3, VAR_4, VAR_76 = self._get_url_and_credentials(VAR_33.url)
VAR_33.url = VAR_3
if VAR_4 is not None and VAR_76 is not None:
VAR_33 = HTTPBasicAuth(VAR_4, VAR_76)(VAR_33)
VAR_33.register_hook("response", self.handle_401)
return VAR_33
def FUNC_25(self, VAR_34):
VAR_4 = ask_input("User for %s: " % VAR_34)
if not VAR_4:
return None, None
VAR_77 = FUNC_2(VAR_34, VAR_4)
if VAR_77:
return VAR_77[0], VAR_77[1], False
VAR_76 = ask_password("Password: ")
return VAR_4, VAR_76, True
def FUNC_26(self):
if not VAR_72:
return False
return ask("Save credentials to VAR_72 [y/N]: ", ["y", "n"]) == "y"
def FUNC_27(self, VAR_15, **VAR_14):
if VAR_15.status_code != 401:
return VAR_15
if not self.prompting:
return VAR_15
VAR_78 = urllib_parse.urlparse(VAR_15.url)
VAR_4, VAR_76, VAR_79 = self._prompt_for_password(VAR_78.netloc)
self._credentials_to_save = None
if VAR_4 is not None and VAR_76 is not None:
self.passwords[VAR_78.netloc] = (VAR_4, VAR_76)
if VAR_79 and self._should_save_password_to_keyring():
self._credentials_to_save = (VAR_78.netloc, VAR_4, VAR_76)
VAR_15.content
VAR_15.raw.release_conn()
VAR_33 = HTTPBasicAuth(VAR_4 or "", VAR_76 or "")(VAR_15.request)
VAR_33.register_hook("response", self.warn_on_401)
if self._credentials_to_save:
VAR_33.register_hook("response", self.save_credentials)
VAR_80 = VAR_15.connection.send(VAR_33, **VAR_14)
VAR_80.history.append(VAR_15)
return VAR_80
def FUNC_28(self, VAR_15, **VAR_14):
if VAR_15.status_code == 401:
VAR_1.warning('401 Error, Credentials not correct for %s',
VAR_15.request.url)
def FUNC_29(self, VAR_15, **VAR_14):
assert VAR_72 is not None, "should never reach here without keyring"
if not VAR_72:
return
VAR_81 = self._credentials_to_save
self._credentials_to_save = None
if VAR_81 and VAR_15.status_code < 400:
try:
VAR_1.info('Saving credentials to keyring')
VAR_72.set_password(*VAR_81)
except Exception:
VAR_1.exception('Failed to VAR_79 credentials')
class CLASS_1(BaseAdapter):
def FUNC_30(self, VAR_35, VAR_36=None, VAR_37=None, VAR_38=None, VAR_39=None,
VAR_40=None):
VAR_82 = FUNC_5(VAR_35.url)
VAR_15 = Response()
VAR_15.status_code = 200
VAR_15.url = VAR_35.url
try:
VAR_99 = os.stat(VAR_82)
except OSError as exc:
VAR_15.status_code = 404
VAR_15.raw = exc
else:
VAR_100 = email.utils.formatdate(VAR_99.st_mtime, usegmt=True)
VAR_59 = mimetypes.guess_type(VAR_82)[0] or "text/plain"
VAR_15.headers = CaseInsensitiveDict({
"Content-Type": VAR_59,
"Content-Length": VAR_99.st_size,
"Last-Modified": VAR_100,
})
VAR_15.raw = open(VAR_82, "rb")
VAR_15.close = VAR_15.raw.close
return VAR_15
def FUNC_31(self):
pass
class CLASS_2(FileCache):
def __init__(self, *VAR_13, **VAR_14):
super(CLASS_2, self).__init__(*VAR_13, **VAR_14)
if not check_path_owner(self.directory):
VAR_1.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the VAR_84 has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
self.directory,
)
self.directory = None
def FUNC_32(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).get(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
def FUNC_33(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).set(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
def FUNC_34(self, *VAR_13, **VAR_14):
if self.directory is None:
return
try:
return super(CLASS_2, self).delete(*VAR_13, **VAR_14)
except (LockError, OSError, IOError):
pass
class CLASS_3(HTTPAdapter):
def FUNC_35(self, VAR_41, VAR_3, VAR_38, VAR_39):
VAR_41.cert_reqs = 'CERT_NONE'
VAR_41.ca_certs = None
class CLASS_4(requests.Session):
VAR_37 = None # type: Optional[int]
def __init__(self, *VAR_13, **VAR_14):
VAR_83 = VAR_14.pop("retries", 0)
VAR_84 = VAR_14.pop("cache", None)
VAR_85 = VAR_14.pop("insecure_hosts", [])
VAR_29 = VAR_14.pop("index_urls", None)
super(CLASS_4, self).__init__(*VAR_13, **VAR_14)
self.headers["User-Agent"] = FUNC_1()
self.auth = CLASS_0(VAR_29=index_urls)
VAR_83 = urllib3.Retry(
total=VAR_83,
status_forcelist=[500, 503, 520, 527],
backoff_factor=0.25,
)
if VAR_84:
VAR_101 = CacheControlAdapter(
VAR_84=CLASS_2(VAR_84, use_dir_lock=True),
max_retries=VAR_83,
)
else:
VAR_101 = HTTPAdapter(max_retries=VAR_83)
VAR_86 = CLASS_3(max_retries=VAR_83)
self.mount("https://", VAR_101)
self.mount("http://", VAR_86)
self.mount("file://", CLASS_1())
for VAR_62 in VAR_85:
self.mount("https://{}/".format(VAR_62), VAR_86)
def VAR_35(self, VAR_42, VAR_3, *VAR_13, **VAR_14):
VAR_14.setdefault("timeout", self.timeout)
return super(CLASS_4, self).request(VAR_42, VAR_3, *VAR_13, **VAR_14)
def FUNC_3(VAR_3, VAR_5=None, VAR_6=None):
if VAR_6 is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
VAR_43 = VAR_7.search(VAR_3)
if VAR_43:
VAR_44 = VAR_43.group(1).lower()
if (VAR_44 == 'file' and VAR_5 and
VAR_5.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (VAR_5, VAR_3))
if VAR_44 == 'file':
VAR_46 = VAR_3.split(':', 1)[1]
VAR_46 = VAR_46.replace('\\', '/')
VAR_43 = VAR_8.match(VAR_46)
if VAR_43:
VAR_46 = VAR_43.group(1) + ':' + VAR_46.split('|', 1)[1]
VAR_46 = urllib_parse.unquote(VAR_46)
if VAR_46.startswith('/'):
VAR_46 = '/' + VAR_46.lstrip('/')
VAR_3 = VAR_46
else:
VAR_15 = VAR_6.get(VAR_3)
VAR_15.raise_for_status()
return VAR_15.url, VAR_15.text
try:
with open(VAR_3, 'rb') as f:
VAR_102 = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return VAR_3, VAR_102
VAR_7 = re.compile(r'^(http|https|file):', re.I)
VAR_8 = re.compile(r'/*([a-z])\|', re.I)
def FUNC_4(VAR_9):
if ':' not in VAR_9:
return False
VAR_44 = VAR_9.split(':', 1)[0].lower()
return VAR_44 in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def FUNC_5(VAR_3):
assert VAR_3.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % VAR_3)
VAR_45, VAR_34, VAR_46, VAR_45, VAR_45 = urllib_parse.urlsplit(VAR_3)
if not VAR_34 or VAR_34 == 'localhost':
VAR_34 = ''
elif sys.platform == 'win32':
VAR_34 = '\\\\' + VAR_34
else:
raise ValueError(
'non-local file URIs are not supported on this platform: %r'
% VAR_3
)
VAR_46 = urllib_request.url2pathname(VAR_34 + VAR_46)
return VAR_46
def FUNC_6(VAR_9):
VAR_47 = splitext(VAR_9)[1].lower()
if VAR_47 in ARCHIVE_EXTENSIONS:
return True
return False
def FUNC_7(VAR_10, VAR_11):
VAR_48 = FUNC_8(VAR_10)
VAR_48.unpack(VAR_11, VAR_3=VAR_10.url)
def FUNC_8(VAR_10):
for VAR_48 in vcs.backends:
if VAR_10.scheme in VAR_48.schemes:
return VAR_48
return None
def FUNC_9(VAR_10):
return bool(FUNC_8(VAR_10))
def FUNC_10(VAR_10):
return VAR_10.url.lower().startswith('file:')
def FUNC_11(VAR_10):
VAR_49 = FUNC_5(VAR_10.url_without_fragment)
return os.path.isdir(VAR_49)
def FUNC_12(VAR_12, *VAR_13, **VAR_14):
return VAR_12
def FUNC_13(
VAR_15, # type: Response
VAR_10, # type: Link
VAR_16, # type: IO
VAR_17, # type: Hashes
VAR_18 # type: str
):
try:
VAR_87 = int(VAR_15.headers['content-length'])
except (ValueError, KeyError, TypeError):
VAR_87 = 0
VAR_50 = getattr(VAR_15, "from_cache", False)
if VAR_1.getEffectiveLevel() > logging.INFO:
VAR_88 = False
elif VAR_50:
VAR_88 = False
elif VAR_87 > (40 * 1000):
VAR_88 = True
elif not VAR_87:
VAR_88 = True
else:
VAR_88 = False
VAR_51 = VAR_10.show_url
def FUNC_37(VAR_52):
try:
for VAR_106 in VAR_15.raw.stream(
VAR_52,
decode_content=False):
yield VAR_106
except AttributeError:
while True:
VAR_106 = VAR_15.raw.read(VAR_52)
if not VAR_106:
break
yield VAR_106
def FUNC_38(VAR_53):
for VAR_106 in VAR_53:
VAR_16.write(VAR_106)
yield VAR_106
VAR_54 = FUNC_12
if VAR_10.netloc == PyPI.netloc:
VAR_3 = VAR_51
else:
VAR_3 = VAR_10.url_without_fragment
if VAR_88: # We don't show progress on cached responses
VAR_54 = DownloadProgressProvider(VAR_18,
max=VAR_87)
if VAR_87:
VAR_1.info("Downloading %s (%s)", VAR_3, format_size(VAR_87))
else:
VAR_1.info("Downloading %s", VAR_3)
elif VAR_50:
VAR_1.info("Using cached %s", VAR_3)
else:
VAR_1.info("Downloading %s", VAR_3)
VAR_1.debug('Downloading from URL %s', VAR_10)
VAR_55 = FUNC_38(
VAR_54(
FUNC_37(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if VAR_17:
hashes.check_against_chunks(VAR_55)
else:
consume(VAR_55)
def FUNC_14(VAR_19, VAR_11, VAR_10):
VAR_56 = True
VAR_57 = os.path.join(VAR_11, VAR_10.filename)
if os.path.exists(VAR_57):
VAR_89 = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(VAR_57), ('i', 'w', 'b', 'a'))
if VAR_89 == 'i':
VAR_56 = False
elif VAR_89 == 'w':
VAR_1.warning('Deleting %s', display_path(VAR_57))
os.remove(VAR_57)
elif VAR_89 == 'b':
VAR_107 = backup_dir(VAR_57)
VAR_1.warning(
'Backing up %s to %s',
display_path(VAR_57),
display_path(VAR_107),
)
shutil.move(VAR_57, VAR_107)
elif VAR_89 == 'a':
sys.exit(-1)
if VAR_56:
shutil.copy(VAR_19, VAR_57)
VAR_1.info('Saved %s', display_path(VAR_57))
def FUNC_15(
VAR_10, # type: Link
VAR_11, # type: str
VAR_20=None, # type: Optional[str]
VAR_6=None, # type: Optional[CLASS_4]
VAR_17=None, # type: Optional[Hashes]
VAR_18="on" # type: str
):
if VAR_6 is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as VAR_24:
VAR_58 = None
if VAR_20:
VAR_58 = FUNC_21(VAR_10,
VAR_20,
VAR_17)
if VAR_58:
VAR_90 = VAR_58
VAR_59 = mimetypes.guess_type(VAR_90)[0]
else:
VAR_90, VAR_59 = FUNC_20(VAR_10,
VAR_6,
VAR_24.path,
VAR_17,
VAR_18)
unpack_file(VAR_90, VAR_11, VAR_59, VAR_10)
if VAR_20 and not VAR_58:
FUNC_14(VAR_90, VAR_20, VAR_10)
if not VAR_58:
os.unlink(VAR_90)
def FUNC_16(
VAR_10, # type: Link
VAR_11, # type: str
VAR_20=None, # type: Optional[str]
VAR_17=None # type: Optional[Hashes]
):
VAR_49 = FUNC_5(VAR_10.url_without_fragment)
if FUNC_11(VAR_10):
if os.path.isdir(VAR_11):
rmtree(VAR_11)
shutil.copytree(VAR_49, VAR_11, symlinks=True)
if VAR_20:
VAR_1.info('Link is a directory, ignoring download_dir')
return
if VAR_17:
hashes.check_against_path(VAR_49)
VAR_58 = None
if VAR_20:
VAR_58 = FUNC_21(VAR_10,
VAR_20,
VAR_17)
if VAR_58:
VAR_90 = VAR_58
else:
VAR_90 = VAR_49
VAR_59 = mimetypes.guess_type(VAR_90)[0]
unpack_file(VAR_90, VAR_11, VAR_59, VAR_10)
if VAR_20 and not VAR_58:
FUNC_14(VAR_90, VAR_20, VAR_10)
class CLASS_5(xmlrpc_client.Transport):
def __init__(self, VAR_60, VAR_6, VAR_61=False):
xmlrpc_client.Transport.__init__(self, VAR_61)
VAR_91 = urllib_parse.urlparse(VAR_60)
self._scheme = VAR_91.scheme
self._session = VAR_6
def VAR_35(self, VAR_62, VAR_63, VAR_64, VAR_65=False):
VAR_92 = (self._scheme, VAR_62, VAR_63, None, None, None)
VAR_3 = urllib_parse.urlunparse(VAR_92)
try:
VAR_103 = {'Content-Type': 'text/xml'}
VAR_89 = self._session.post(VAR_3, VAR_25=VAR_64,
VAR_103=headers, VAR_36=True)
VAR_89.raise_for_status()
self.verbose = VAR_65
return self.parse_response(VAR_89.raw)
except requests.HTTPError as exc:
VAR_1.critical(
"HTTP error %s while getting %s",
exc.response.status_code, VAR_3,
)
raise
def FUNC_17(
VAR_10, # type: Optional[Link]
VAR_11, # type: Optional[str]
VAR_20=None, # type: Optional[str]
VAR_21=False, # type: bool
VAR_6=None, # type: Optional[CLASS_4]
VAR_17=None, # type: Optional[Hashes]
VAR_18="on" # type: str
):
if FUNC_9(VAR_10):
FUNC_7(VAR_10, VAR_11)
elif FUNC_10(VAR_10):
FUNC_16(VAR_10, VAR_11, VAR_20, VAR_17=hashes)
else:
if VAR_6 is None:
VAR_6 = CLASS_4()
FUNC_15(
VAR_10,
VAR_11,
VAR_20,
VAR_6,
VAR_17=hashes,
VAR_18=progress_bar
)
if VAR_21:
write_delete_marker_file(VAR_11)
def FUNC_18(VAR_19):
return os.path.basename(VAR_19)
def FUNC_19(VAR_22, VAR_23):
VAR_66, VAR_67 = cgi.parse_header(VAR_22)
VAR_19 = VAR_67.get('filename')
if VAR_19:
VAR_19 = FUNC_18(VAR_19)
return VAR_19 or VAR_23
def FUNC_20(
VAR_10, # type: Link
VAR_6, # type: CLASS_4
VAR_24, # type: str
VAR_17, # type: Hashes
VAR_18 # type: str
):
VAR_68 = VAR_10.url.split('#', 1)[0]
try:
VAR_15 = VAR_6.get(
VAR_68,
VAR_103={"Accept-Encoding": "identity"},
VAR_36=True,
)
VAR_15.raise_for_status()
except requests.HTTPError as exc:
VAR_1.critical(
"HTTP error %s while getting %s", exc.response.status_code, VAR_10,
)
raise
VAR_59 = VAR_15.headers.get('content-type', '')
VAR_19 = VAR_10.filename # fallback
VAR_22 = VAR_15.headers.get('content-disposition')
if VAR_22:
VAR_19 = FUNC_19(VAR_22, VAR_19)
VAR_47 = splitext(VAR_19)[1]
if not VAR_47:
VAR_47 = mimetypes.guess_extension(VAR_59)
if VAR_47:
VAR_19 += VAR_47
if not VAR_47 and VAR_10.url != VAR_15.url:
VAR_47 = os.path.splitext(VAR_15.url)[1]
if VAR_47:
VAR_19 += VAR_47
VAR_69 = os.path.join(VAR_24, VAR_19)
with open(VAR_69, 'wb') as VAR_16:
FUNC_13(VAR_15, VAR_10, VAR_16, VAR_17, VAR_18)
return VAR_69, VAR_59
def FUNC_21(VAR_10, VAR_20, VAR_17):
VAR_70 = os.path.join(VAR_20, VAR_10.filename)
if os.path.exists(VAR_70):
VAR_1.info('File was already downloaded %s', VAR_70)
if VAR_17:
try:
VAR_17.check_against_path(VAR_70)
except HashMismatch:
VAR_1.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
VAR_70
)
os.unlink(VAR_70)
return None
return VAR_70
return None
| [
2,
13,
23,
24,
29,
47,
56,
61,
62,
64,
70,
71,
73,
74,
83,
84,
85,
86,
87,
88,
89,
90,
92,
94,
96,
98,
101,
102,
104,
108,
109,
110,
112,
113,
125,
137,
140,
142,
157,
160,
163,
166,
169,
172,
176,
177,
178,
179,
180,
182,
186,
191,
192,
197,
209,
215,
219,
220,
222,
224,
228,
229,
230,
231,
232,
234,
237,
240,
244,
250,
255,
259,
262,
263,
268,
269,
272,
277,
278,
284,
285,
291,
292,
294,
300,
302,
305,
308,
314,
315,
317,
318,
319,
322,
324,
326,
328,
330,
332,
333,
335,
337,
339,
340,
342,
344,
345,
355,
356,
361,
363,
364,
367,
368,
371,
373,
374,
376,
377,
381,
382,
385,
386,
387,
390,
391,
394,
395,
396,
397,
400,
401,
404,
406,
412,
418,
427,
428,
430,
434,
438,
452,
455,
457,
460,
461,
467,
470,
471,
472,
473,
474,
475,
484,
485,
487,
489,
492,
496,
497,
498,
500,
502,
505,
509,
510,
511,
513,
515,
518,
522,
523,
524,
526,
527,
529,
533,
534,
536,
538,
544,
546,
547,
549,
550,
552,
553,
554,
556,
557,
559,
560,
561,
562,
563,
564,
565,
567,
568,
569,
572,
573,
574,
575,
576,
584,
585,
586,
587,
588,
590,
593,
594,
596,
597,
598,
601,
603,
605,
606,
608,
609,
611,
614,
623,
643,
655,
656,
659,
660,
662,
668,
669,
671,
677,
679,
681,
684,
691,
694,
695,
697,
703,
704,
708,
709,
711,
719,
720,
722,
724,
725,
727,
729,
730,
732,
734,
737,
741,
742,
745,
746,
754,
759,
771,
773,
776,
779,
780,
781,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
794,
795,
796,
797,
798,
799,
800,
804,
810,
815,
817,
822,
834,
836,
847,
848,
874,
875,
884,
889,
891,
897,
902,
908,
909,
910,
912,
913,
916,
919,
920,
927,
929,
934,
935,
943,
944,
945,
946,
947,
948,
951,
952,
958,
963,
965,
966,
967,
969,
970,
973,
974,
979,
985,
1002,
1003,
1013,
1022,
1028,
1031,
1032,
1035,
1036,
1040,
1051,
1052,
1060,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1075,
1076,
1077,
1078,
1079,
1080,
1081,
1082,
1083,
1084,
1094,
1097,
1101,
1102,
1117,
1118,
1120,
1126,
1141,
105,
106,
107,
115,
116,
117,
194,
463,
464,
465,
466,
612,
613,
614,
615,
616,
617,
618,
663,
672,
673,
674,
698,
712,
713,
714,
733,
734,
735,
736,
737,
738,
928,
929,
930,
931,
932,
976,
977,
978,
1014,
1015,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
1025,
1026,
1027,
1061,
1121,
1122,
1123,
236,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
258,
304,
305,
306,
307,
308,
309,
310,
311,
312,
408,
414
] | [
2,
13,
23,
24,
29,
47,
56,
61,
62,
64,
71,
72,
74,
75,
84,
85,
86,
87,
88,
89,
90,
91,
93,
95,
97,
99,
102,
103,
105,
109,
110,
111,
113,
114,
126,
138,
141,
143,
158,
161,
164,
167,
170,
173,
177,
178,
179,
180,
181,
183,
187,
192,
193,
198,
210,
216,
220,
221,
223,
225,
229,
230,
231,
232,
233,
235,
238,
241,
245,
251,
256,
260,
263,
264,
269,
270,
273,
278,
279,
285,
286,
292,
293,
295,
301,
303,
306,
309,
315,
316,
318,
319,
320,
323,
325,
327,
329,
331,
333,
334,
336,
338,
340,
341,
343,
345,
346,
356,
357,
362,
364,
365,
368,
369,
372,
374,
375,
377,
378,
382,
383,
386,
387,
388,
391,
392,
395,
396,
397,
398,
401,
402,
405,
407,
413,
419,
428,
429,
431,
435,
439,
453,
456,
458,
461,
462,
468,
471,
472,
473,
474,
475,
476,
485,
486,
488,
490,
493,
497,
498,
499,
501,
503,
506,
510,
511,
512,
514,
516,
519,
523,
524,
525,
527,
528,
530,
534,
535,
537,
539,
545,
547,
548,
550,
551,
553,
554,
555,
557,
558,
560,
561,
562,
563,
564,
565,
566,
568,
569,
570,
573,
574,
575,
576,
577,
585,
586,
587,
588,
589,
591,
594,
595,
597,
598,
599,
602,
604,
606,
607,
609,
610,
612,
615,
624,
644,
656,
657,
660,
661,
663,
669,
670,
672,
678,
680,
682,
685,
692,
695,
696,
698,
704,
705,
709,
710,
712,
720,
721,
723,
725,
726,
728,
730,
731,
733,
735,
738,
742,
743,
746,
747,
755,
760,
772,
774,
777,
780,
781,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
794,
795,
796,
797,
798,
799,
800,
801,
805,
811,
816,
818,
823,
835,
837,
848,
849,
875,
876,
885,
890,
892,
898,
903,
909,
910,
911,
913,
914,
917,
920,
921,
928,
930,
935,
936,
944,
945,
946,
947,
948,
949,
952,
953,
959,
964,
966,
967,
968,
970,
971,
974,
975,
980,
986,
1003,
1004,
1014,
1023,
1029,
1032,
1033,
1036,
1037,
1041,
1052,
1053,
1055,
1060,
1061,
1063,
1071,
1072,
1075,
1076,
1084,
1090,
1091,
1092,
1093,
1094,
1095,
1096,
1097,
1098,
1099,
1100,
1101,
1102,
1103,
1104,
1105,
1106,
1107,
1108,
1118,
1121,
1138,
1139,
1141,
1147,
1162,
106,
107,
108,
116,
117,
118,
195,
464,
465,
466,
467,
613,
614,
615,
616,
617,
618,
619,
664,
673,
674,
675,
699,
713,
714,
715,
734,
735,
736,
737,
738,
739,
929,
930,
931,
932,
933,
977,
978,
979,
1015,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
1025,
1026,
1027,
1028,
1056,
1057,
1058,
1064,
1065,
1066,
1067,
1085,
1142,
1143,
1144,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
248,
259,
305,
306,
307,
308,
309,
310,
311,
312,
313,
409,
415
] |
0CWE-22
| import logging
import pathlib
from s3file.storages import local_dev, storage
from . import views
logger = logging.getLogger("s3file")
class S3FileMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
file_fields = request.POST.getlist("s3file")
for field_name in file_fields:
paths = request.POST.getlist(field_name)
request.FILES.setlist(field_name, list(self.get_files_from_storage(paths)))
if local_dev and request.path == "/__s3_mock__/":
return views.S3MockView.as_view()(request)
return self.get_response(request)
@staticmethod
def get_files_from_storage(paths):
"""Return S3 file where the name does not include the path."""
for path in paths:
path = pathlib.PurePosixPath(path)
try:
location = storage.aws_location
except AttributeError:
location = storage.location
try:
f = storage.open(str(path.relative_to(location)))
f.name = path.name
yield f
except (OSError, ValueError):
logger.exception("File not found: %s", path)
| import logging
import pathlib
from django.core import signing
from django.core.exceptions import PermissionDenied, SuspiciousFileOperation
from django.utils.crypto import constant_time_compare
from . import views
from .forms import S3FileInputMixin
from .storages import local_dev, storage
logger = logging.getLogger("s3file")
class S3FileMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
file_fields = request.POST.getlist("s3file")
for field_name in file_fields:
paths = request.POST.getlist(field_name)
if paths:
try:
signature = request.POST[f"{field_name}-s3f-signature"]
except KeyError:
raise PermissionDenied("No signature provided.")
try:
request.FILES.setlist(
field_name, list(self.get_files_from_storage(paths, signature))
)
except SuspiciousFileOperation as e:
raise PermissionDenied("Illegal file name!") from e
if local_dev and request.path == "/__s3_mock__/":
return views.S3MockView.as_view()(request)
return self.get_response(request)
@staticmethod
def get_files_from_storage(paths, signature):
"""Return S3 file where the name does not include the path."""
try:
location = storage.aws_location
except AttributeError:
location = storage.location
signer = signing.Signer(
salt=f"{S3FileInputMixin.__module__}.{S3FileInputMixin.__name__}"
)
for path in paths:
path = pathlib.PurePosixPath(path)
print(path)
print(signer.signature(path.parent), signature)
if not constant_time_compare(signer.signature(path.parent), signature):
raise PermissionDenied("Illegal signature!")
try:
relative_path = str(path.relative_to(location))
except ValueError as e:
raise SuspiciousFileOperation(
f"Path is not inside the designated upload location: {path}"
) from e
try:
f = storage.open(relative_path)
f.name = path.name
yield f
except (OSError, ValueError):
logger.exception("File not found: %s", path)
| path_disclosure | {
"code": [
"from s3file.storages import local_dev, storage",
" request.FILES.setlist(field_name, list(self.get_files_from_storage(paths)))",
" def get_files_from_storage(paths):",
" location = storage.aws_location",
" except AttributeError:",
" location = storage.location",
" f = storage.open(str(path.relative_to(location)))"
],
"line_no": [
4,
19,
27,
32,
33,
34,
36
]
} | {
"code": [
"from django.core import signing",
"from django.utils.crypto import constant_time_compare",
"from .storages import local_dev, storage",
" if paths:",
" signature = request.POST[f\"{field_name}-s3f-signature\"]",
" except KeyError:",
" try:",
" request.FILES.setlist(",
" field_name, list(self.get_files_from_storage(paths, signature))",
" )",
" except SuspiciousFileOperation as e:",
" raise PermissionDenied(\"Illegal file name!\") from e",
" def get_files_from_storage(paths, signature):",
" try:",
" location = storage.aws_location",
" except AttributeError:",
" location = storage.location",
" signer = signing.Signer(",
" salt=f\"{S3FileInputMixin.__module__}.{S3FileInputMixin.__name__}\"",
" )",
" print(path)",
" print(signer.signature(path.parent), signature)",
" if not constant_time_compare(signer.signature(path.parent), signature):",
" raise PermissionDenied(\"Illegal signature!\")",
" relative_path = str(path.relative_to(location))",
" except ValueError as e:",
" raise SuspiciousFileOperation(",
" f\"Path is not inside the designated upload location: {path}\"",
" ) from e",
" f = storage.open(relative_path)"
],
"line_no": [
4,
6,
10,
24,
26,
27,
29,
30,
31,
32,
33,
34,
42,
44,
45,
46,
47,
48,
49,
50,
53,
54,
55,
56,
58,
59,
60,
61,
62,
65
]
} | import logging
import .pathlib
from s3file.storages import local_dev, storage
from . import views
VAR_0 = logging.getLogger("s3file")
class CLASS_0:
def __init__(self, VAR_1):
self.get_response = VAR_1
def __call__(self, VAR_2):
VAR_4 = VAR_2.POST.getlist("s3file")
for field_name in VAR_4:
VAR_3 = VAR_2.POST.getlist(field_name)
VAR_2.FILES.setlist(field_name, list(self.get_files_from_storage(VAR_3)))
if local_dev and VAR_2.path == "/__s3_mock__/":
return views.S3MockView.as_view()(VAR_2)
return self.get_response(VAR_2)
@staticmethod
def FUNC_0(VAR_3):
for VAR_5 in VAR_3:
VAR_5 = pathlib.PurePosixPath(VAR_5)
try:
VAR_6 = storage.aws_location
except AttributeError:
VAR_6 = storage.location
try:
VAR_7 = storage.open(str(VAR_5.relative_to(VAR_6)))
VAR_7.name = VAR_5.name
yield VAR_7
except (OSError, ValueError):
VAR_0.exception("File not found: %s", VAR_5)
| import logging
import .pathlib
from django.core import signing
from django.core.exceptions import PermissionDenied, SuspiciousFileOperation
from django.utils.crypto import constant_time_compare
from . import views
from .forms import S3FileInputMixin
from .storages import local_dev, storage
VAR_0 = logging.getLogger("s3file")
class CLASS_0:
def __init__(self, VAR_1):
self.get_response = VAR_1
def __call__(self, VAR_2):
VAR_5 = VAR_2.POST.getlist("s3file")
for field_name in VAR_5:
VAR_3 = VAR_2.POST.getlist(field_name)
if VAR_3:
try:
VAR_4 = VAR_2.POST[f"{field_name}-s3f-signature"]
except KeyError:
raise PermissionDenied("No VAR_4 provided.")
try:
VAR_2.FILES.setlist(
field_name, list(self.get_files_from_storage(VAR_3, VAR_4))
)
except SuspiciousFileOperation as e:
raise PermissionDenied("Illegal file name!") from e
if local_dev and VAR_2.path == "/__s3_mock__/":
return views.S3MockView.as_view()(VAR_2)
return self.get_response(VAR_2)
@staticmethod
def FUNC_0(VAR_3, VAR_4):
try:
VAR_7 = storage.aws_location
except AttributeError:
VAR_7 = storage.location
VAR_6 = signing.Signer(
salt=f"{S3FileInputMixin.__module__}.{S3FileInputMixin.__name__}"
)
for VAR_8 in VAR_3:
VAR_8 = pathlib.PurePosixPath(VAR_8)
print(VAR_8)
print(VAR_6.signature(VAR_8.parent), VAR_4)
if not constant_time_compare(VAR_6.signature(VAR_8.parent), VAR_4):
raise PermissionDenied("Illegal VAR_4!")
try:
VAR_9 = str(VAR_8.relative_to(VAR_7))
except ValueError as e:
raise SuspiciousFileOperation(
f"Path is not inside the designated upload VAR_7: {VAR_8}"
) from e
try:
VAR_10 = storage.open(VAR_9)
VAR_10.name = VAR_8.name
yield VAR_10
except (OSError, ValueError):
VAR_0.exception("File not found: %s", VAR_8)
| [
3,
5,
7,
9,
10,
14,
20,
23,
25,
41,
28
] | [
3,
7,
11,
13,
14,
18,
22,
35,
38,
40,
63,
70,
43
] |
0CWE-22
| """``chameleon.tales`` expressions."""
import warnings
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif name.startswith('_'):
raise NotFound(name)
if ITraversable.providedBy(base):
base = getattr(base, cls.traverse_method)(name)
else:
base = traversePathElement(base, name, path_items,
request=request)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| """``chameleon.tales`` expressions."""
import warnings
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.SecurityManagement import getSecurityManager
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
validate = getSecurityManager().validate
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if ITraversable.providedBy(base):
base = getattr(base, cls.traverse_method)(name)
else:
found = traversePathElement(base, name, path_items,
request=request)
# If traverse_method is something other than
# ``restrictedTraverse`` then traversal is assumed to be
# unrestricted. This emulates ``unrestrictedTraverse``
if cls.traverse_method != 'restrictedTraverse':
base = found
continue
# Special backwards compatibility exception for the name ``_``,
# which was often used for translation message factories.
# Allow and continue traversal.
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
base = found
continue
# All other names starting with ``_`` are disallowed.
# This emulates what restrictedTraverse does.
if name.startswith('_'):
raise NotFound(name)
# traversePathElement doesn't apply any Zope security policy,
# so we validate access explicitly here.
try:
validate(base, base, name, found)
base = found
except Unauthorized:
# Convert Unauthorized to prevent information disclosures
raise NotFound(name)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| path_disclosure | {
"code": [
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" 'and will be removed in Zope 6.',",
" DeprecationWarning)",
" elif name.startswith('_'):",
" raise NotFound(name)",
" base = traversePathElement(base, name, path_items,",
" request=request)"
],
"line_no": [
66,
67,
68,
69,
70,
71,
76,
77
]
} | {
"code": [
"from AccessControl.SecurityManagement import getSecurityManager",
" validate = getSecurityManager().validate",
" found = traversePathElement(base, name, path_items,",
" if cls.traverse_method != 'restrictedTraverse':",
" continue",
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" DeprecationWarning)",
" base = found",
" if name.startswith('_'):",
" raise NotFound(name)",
" try:",
" base = found",
" raise NotFound(name)"
],
"line_no": [
13,
61,
71,
77,
79,
84,
85,
87,
88,
93,
94,
98,
100,
103
]
} |
import warnings
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_30 = VAR_12.pop()
if VAR_30 == '_':
warnings.warn('Traversing to the VAR_30 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif VAR_30.startswith('_'):
raise NotFound(VAR_30)
if ITraversable.providedBy(VAR_10):
VAR_10 = getattr(VAR_10, VAR_9.traverse_method)(VAR_30)
else:
VAR_10 = traversePathElement(VAR_10, VAR_30, VAR_12,
VAR_11=request)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_28 = self.secured.get(VAR_18.id)
if VAR_28 is not None:
return Symbol(VAR_28)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_30: FUNC_0(builtin) for (VAR_30, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_29 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_29, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
|
import warnings
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.SecurityManagement import getSecurityManager
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_28 = getSecurityManager().validate
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_31 = VAR_12.pop()
if ITraversable.providedBy(VAR_10):
base = getattr(VAR_10, VAR_9.traverse_method)(VAR_31)
else:
VAR_32 = traversePathElement(VAR_10, VAR_31, VAR_12,
VAR_11=request)
if VAR_9.traverse_method != 'restrictedTraverse':
VAR_10 = VAR_32
continue
if VAR_31 == '_':
warnings.warn('Traversing to the VAR_31 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
VAR_10 = VAR_32
continue
if VAR_31.startswith('_'):
raise NotFound(VAR_31)
try:
VAR_28(VAR_10, VAR_10, VAR_31, VAR_32)
VAR_10 = VAR_32
except Unauthorized:
raise NotFound(VAR_31)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_29 = self.secured.get(VAR_18.id)
if VAR_29 is not None:
return Symbol(VAR_29)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_31: FUNC_0(builtin) for (VAR_31, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_30 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_30, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
| [
2,
6,
12,
28,
31,
32,
34,
45,
46,
49,
50,
53,
55,
59,
62,
65,
72,
78,
80,
83,
86,
89,
93,
95,
96,
99,
101,
104,
106,
109,
113,
115,
116,
119,
123,
124,
129,
130,
133,
134,
137,
138,
147,
152,
154,
155,
159,
160,
162,
163,
167,
171,
172,
174,
175,
177,
179,
180,
181,
182,
183,
184,
185,
189,
194,
195,
204,
205,
208,
211,
223,
224,
229,
230,
232,
233,
236,
237,
239,
240,
243,
1,
188,
189,
190,
191,
58
] | [
2,
6,
12,
29,
32,
33,
35,
46,
47,
50,
51,
54,
56,
60,
64,
67,
73,
74,
75,
76,
80,
81,
82,
83,
90,
91,
92,
95,
96,
97,
102,
104,
106,
109,
112,
115,
119,
121,
122,
125,
127,
130,
132,
135,
139,
141,
142,
145,
149,
150,
155,
156,
159,
160,
163,
164,
173,
178,
180,
181,
185,
186,
188,
189,
193,
197,
198,
200,
201,
203,
205,
206,
207,
208,
209,
210,
211,
215,
220,
221,
230,
231,
234,
237,
249,
250,
255,
256,
258,
259,
262,
263,
265,
266,
269,
1,
214,
215,
216,
217,
59
] |
0CWE-22
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RESTful Filesystem access using HTTP
------------------------------------
This controller and helper classes exposes parts or all of the server's
filesystem. Means to retrieve and delete files are provided as well as the
ability to list folder contents.
The generated responses are returned as JSON data with appropriate HTTP headers.
Output will be compressed using gzip most of the times.
Example calls using curl
++++++++++++++++++++++++
The following examples assume that the FileController instance is accessible
as '/file' on 'localhost', port 18888 (http://localhost:18888/file).
Fetch list of files and folders in root folder:
curl --noproxy localhost -iv http://localhost:18888/file
Fetch example file 'example.txt'
curl --noproxy localhost -iv http://localhost:18888/file/example.txt
Fetch gzipped example file 'example.txt'
curl --compressed -H "Accept-Encoding: gzip" --noproxy localhost -iv http://localhost:18888/file/example.txt
Delete example file 'example.txt'
curl --noproxy localhost -iv -X DELETE http://localhost:18888/file/example.txt
"""
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
import file
MANY_SLASHES_PATTERN = r'[\/]+'
MANY_SLASHES_REGEX = re.compile(MANY_SLASHES_PATTERN)
#: default path from which files will be served
DEFAULT_ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
#: CORS - HTTP headers the client may use
CORS_ALLOWED_CLIENT_HEADERS = [
'Content-Type',
]
#: CORS - HTTP methods the client may use
CORS_ALLOWED_METHODS_DEFAULT = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
#: CORS - default origin header value
CORS_DEFAULT_ALLOW_ORIGIN = '*'
#: CORS - HTTP headers the server will send as part of OPTIONS response
CORS_DEFAULT = {
'Access-Control-Allow-Origin': CORS_DEFAULT_ALLOW_ORIGIN,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(CORS_ALLOWED_METHODS_DEFAULT),
'Access-Control-Allow-Headers': ', '.join(CORS_ALLOWED_CLIENT_HEADERS)
}
#: paths where file delete operations shall be allowed
DELETE_WHITELIST = [
'/media',
]
class FileController(twisted.web.resource.Resource):
isLeaf = True
_override_args = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
_resource_prefix = '/file'
_root = os.path.abspath(os.path.dirname(__file__))
_do_delete = False
_delete_whitelist = DELETE_WHITELIST
never_gzip_extensions = ('.ts',)
def __init__(self, *args, **kwargs):
"""
Default Constructor.
Args:
resource_prefix: Prefix value for this controller instance.
Default is :py:data:`FileController._resource_prefix`
root: Root path of files to be served.
Default is the path where the current file is located
do_delete: Try to actually delete files?
Default is False.
delete_whitelist: Folder prefixes where delete operations are
allowed _at all_. Default is :py:data:`DELETE_WHITELIST`
"""
if args:
for key, value in zip(self._override_args, args):
kwargs[key] = value
for arg_name in self._override_args:
if kwargs.get(arg_name) is not None:
attr_name = '_{:s}'.format(arg_name)
setattr(self, attr_name, kwargs.get(arg_name))
self.session = kwargs.get("session")
def _json_response(self, request, data):
"""
Create a JSON representation for *data* and set HTTP headers indicating
that JSON encoded data is returned.
Args:
request (twisted.web.server.Request): HTTP request object
data: response content
Returns:
JSON representation of *data* with appropriate HTTP headers
"""
request.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(data, indent=2)
def get_response_data_template(self, request):
"""
Generate a response data :class:`dict` containing default values and
some request attribute values for debugging purposes.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
(dict) response template data
"""
file_path = None
if request.path.startswith(self._resource_prefix):
file_path = request.path[len(self._resource_prefix):]
response_data = {
"_request": {
"path": request.path,
"uri": request.uri,
"method": request.method,
"postpath": request.postpath,
"file_path": file_path,
},
"result": False,
}
return response_data
def error_response(self, request, response_code=None, **kwargs):
"""
Create and return an HTTP error response with data as JSON.
Args:
request (twisted.web.server.Request): HTTP request object
response_code: HTTP Status Code (default is 500)
**kwargs: additional key/value pairs
Returns:
JSON encoded data with appropriate HTTP headers
"""
if response_code is None:
response_code = http.INTERNAL_SERVER_ERROR
response_data = self.get_response_data_template(request)
response_data.update(**kwargs)
response_data['me'] = dict()
for arg_name in self._override_args:
attr_name = '_{:s}'.format(arg_name)
response_data['me'][attr_name] = getattr(self, attr_name)
request.setResponseCode(response_code)
return self._json_response(request, response_data)
def _existing_path_or_bust(self, request):
"""
Verify that a filesystem location which is contained in *request.path*
is valid and an existing path.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
path
Raises:
ValueError: If contained path value is invalid.
IOError: If contained path value is not existing.
"""
rq_path = urlparse.unquote(request.path)
if not rq_path.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(request.path))
file_path = os.path.join(
self._root, rq_path[len(self._resource_prefix) + 1:])
file_path = re.sub(MANY_SLASHES_REGEX, '/', file_path)
if not os.path.exists(file_path):
raise IOError("Not Found {!r}".format(file_path))
return file_path
def render_OPTIONS(self, request):
"""
Render response for an HTTP OPTIONS request.
Example request
curl -iv --noproxy localhost http://localhost:18888/file
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
for key in CORS_DEFAULT:
request.setHeader(key, CORS_DEFAULT[key])
return ''
def render_legacy(self, request):
"""
Render response for an HTTP GET request. In order to maintain
backward compatibility this method emulates the behaviour of the
legacy method implementation.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
return file.FileController().render(request)
def _glob(self, path, pattern='*'):
if path == '/':
glob_me = '/' + pattern
else:
glob_me = '/'.join((path, pattern))
return glob.iglob(glob_me)
def _walk(self, path):
for root, dirs, files in os.walk(path):
for dir_item in dirs:
yield os.path.join(root, dir_item)
for file_item in files:
yield os.path.join(root, file_item)
def render_path_listing(self, request, path):
"""
Generate a file/folder listing of *path*'s contents.
Args:
request (twisted.web.server.Request): HTTP request object
path: folder location
Returns:
HTTP response with headers
"""
response_data = self.get_response_data_template(request)
response_data.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
generator = None
if "pattern" in request.args:
generator = self._glob(path, request.args["pattern"][0])
if "recursive" in request.args:
generator = self._walk(path)
if generator is None:
generator = self._glob(path)
for item in generator:
if os.path.isdir(item):
response_data['dirs'].append(item)
else:
response_data['files'].append(item)
return self._json_response(request, response_data)
def render_file(self, request, path):
"""
Return the contents of file *path*.
Args:
request (twisted.web.server.Request): HTTP request object
path: file path
Returns:
HTTP response with headers
"""
(_, ext) = os.path.splitext(path)
if ext in self.never_gzip_extensions:
# hack: remove gzip from the list of supported encodings
acceptHeaders = request.requestHeaders.getRawHeaders(
'accept-encoding', [])
supported = ','.join(acceptHeaders).split(',')
request.requestHeaders.setRawHeaders(
'accept-encoding', list(set(supported) - {'gzip'}))
result = twisted.web.static.File(
path, defaultType="application/octet-stream")
return result.render(request)
def render_GET(self, request):
"""
HTTP GET request handler returning
* legacy response if the query *file* or *dir* parameter is set
* file contents if *request.path* contains a file path
* directory listing if *request.path* contains a folder path
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
attic_args = {'file', 'dir'}
if len(attic_args & set(request.args.keys())) >= 1:
return self.render_legacy(request)
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.render_path_listing(request, target_path)
else:
return self.render_file(request, target_path)
def render_POST(self, request):
"""
HTTP POST request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_PUT(self, request):
"""
HTTP PUT request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_DELETE(self, request):
"""
HTTP DELETE request handler which may try to delete a file if its
path's prefix is in :py:data:`FileController._delete_whitelist` and
:py:data:`FileController._do_delete` is True.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.error_response(
request, response_code=http.NOT_IMPLEMENTED,
message='Will not remove folder {!r}'.format(target_path))
for prefix in self._delete_whitelist:
if not target_path.startswith(os.path.abspath(prefix)):
return self.error_response(request,
response_code=http.FORBIDDEN)
response_data = self.get_response_data_template(request)
try:
response_data['result'] = True
if self._do_delete:
os.unlink(target_path)
message = 'Removed {!r}'.format(target_path)
else:
message = 'WOULD remove {!r}'.format(target_path)
response_data['message'] = message
except Exception as eexc:
response_data['message'] = 'Cannot remove {!r}: {!s}'.format(
target_path, eexc.message)
request.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(request, response_data)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
# standard factory example
factory_s = Site(FileController(DEFAULT_ROOT_PATH))
# experimental factory
root = Resource()
root.putChild("/", FileController)
root.putChild("/file", FileController)
factory_r = Site(root)
# experimental factory: enable gzip compression
wrapped = EncodingResourceWrapper(
FileController(
root=DEFAULT_ROOT_PATH,
# DANGER, WILL ROBINSON! These values allow deletion of ALL files!
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
factory_s_gz = Site(wrapped)
reactor.listenTCP(18888, factory_s_gz)
reactor.run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RESTful Filesystem access using HTTP
------------------------------------
This controller and helper classes exposes parts or all of the server's
filesystem. Means to retrieve and delete files are provided as well as the
ability to list folder contents.
The generated responses are returned as JSON data with appropriate HTTP headers.
Output will be compressed using gzip most of the times.
Example calls using curl
++++++++++++++++++++++++
The following examples assume that the FileController instance is accessible
as '/file' on 'localhost', port 18888 (http://localhost:18888/file).
Fetch list of files and folders in root folder:
curl --noproxy localhost -iv http://localhost:18888/file
Fetch example file 'example.txt'
curl --noproxy localhost -iv http://localhost:18888/file/example.txt
Fetch gzipped example file 'example.txt'
curl --compressed -H "Accept-Encoding: gzip" --noproxy localhost -iv http://localhost:18888/file/example.txt
Delete example file 'example.txt'
curl --noproxy localhost -iv -X DELETE http://localhost:18888/file/example.txt
"""
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
from utilities import MANY_SLASHES_REGEX
import file
#: default path from which files will be served
DEFAULT_ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
#: CORS - HTTP headers the client may use
CORS_ALLOWED_CLIENT_HEADERS = [
'Content-Type',
]
#: CORS - HTTP methods the client may use
CORS_ALLOWED_METHODS_DEFAULT = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
#: CORS - default origin header value
CORS_DEFAULT_ALLOW_ORIGIN = '*'
#: CORS - HTTP headers the server will send as part of OPTIONS response
CORS_DEFAULT = {
'Access-Control-Allow-Origin': CORS_DEFAULT_ALLOW_ORIGIN,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(CORS_ALLOWED_METHODS_DEFAULT),
'Access-Control-Allow-Headers': ', '.join(CORS_ALLOWED_CLIENT_HEADERS)
}
#: paths where file delete operations shall be allowed
DELETE_WHITELIST = [
'/media',
]
class FileController(twisted.web.resource.Resource):
isLeaf = True
_override_args = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
_resource_prefix = '/file'
_root = os.path.abspath(os.path.dirname(__file__))
_do_delete = False
_delete_whitelist = DELETE_WHITELIST
never_gzip_extensions = ('.ts',)
def __init__(self, *args, **kwargs):
"""
Default Constructor.
Args:
resource_prefix: Prefix value for this controller instance.
Default is :py:data:`FileController._resource_prefix`
root: Root path of files to be served.
Default is the path where the current file is located
do_delete: Try to actually delete files?
Default is False.
delete_whitelist: Folder prefixes where delete operations are
allowed _at all_. Default is :py:data:`DELETE_WHITELIST`
"""
if args:
for key, value in zip(self._override_args, args):
kwargs[key] = value
for arg_name in self._override_args:
if kwargs.get(arg_name) is not None:
attr_name = '_{:s}'.format(arg_name)
setattr(self, attr_name, kwargs.get(arg_name))
self.session = kwargs.get("session")
def _json_response(self, request, data):
"""
Create a JSON representation for *data* and set HTTP headers indicating
that JSON encoded data is returned.
Args:
request (twisted.web.server.Request): HTTP request object
data: response content
Returns:
JSON representation of *data* with appropriate HTTP headers
"""
request.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(data, indent=2)
def get_response_data_template(self, request):
"""
Generate a response data :class:`dict` containing default values and
some request attribute values for debugging purposes.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
(dict) response template data
"""
file_path = None
if request.path.startswith(self._resource_prefix):
file_path = request.path[len(self._resource_prefix):]
response_data = {
"_request": {
"path": request.path,
"uri": request.uri,
"method": request.method,
"postpath": request.postpath,
"file_path": file_path,
},
"result": False,
}
return response_data
def error_response(self, request, response_code=None, **kwargs):
"""
Create and return an HTTP error response with data as JSON.
Args:
request (twisted.web.server.Request): HTTP request object
response_code: HTTP Status Code (default is 500)
**kwargs: additional key/value pairs
Returns:
JSON encoded data with appropriate HTTP headers
"""
if response_code is None:
response_code = http.INTERNAL_SERVER_ERROR
response_data = self.get_response_data_template(request)
response_data.update(**kwargs)
response_data['me'] = dict()
for arg_name in self._override_args:
attr_name = '_{:s}'.format(arg_name)
response_data['me'][attr_name] = getattr(self, attr_name)
request.setResponseCode(response_code)
return self._json_response(request, response_data)
def _existing_path_or_bust(self, request):
"""
Verify that a filesystem location which is contained in *request.path*
is valid and an existing path.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
path
Raises:
ValueError: If contained path value is invalid.
IOError: If contained path value is not existing.
"""
rq_path = urlparse.unquote(request.path)
if not rq_path.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(request.path))
file_path = os.path.join(
self._root, rq_path[len(self._resource_prefix) + 1:])
file_path = re.sub(MANY_SLASHES_REGEX, '/', file_path)
if not os.path.exists(file_path):
raise IOError("Not Found {!r}".format(file_path))
return file_path
def render_OPTIONS(self, request):
"""
Render response for an HTTP OPTIONS request.
Example request
curl -iv --noproxy localhost http://localhost:18888/file
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
for key in CORS_DEFAULT:
request.setHeader(key, CORS_DEFAULT[key])
return ''
def render_legacy(self, request):
"""
Render response for an HTTP GET request. In order to maintain
backward compatibility this method emulates the behaviour of the
legacy method implementation.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
return file.FileController().render(request)
def _glob(self, path, pattern='*'):
if path == '/':
glob_me = '/' + pattern
else:
glob_me = '/'.join((path, pattern))
return glob.iglob(glob_me)
def _walk(self, path):
for root, dirs, files in os.walk(path):
for dir_item in dirs:
yield os.path.join(root, dir_item)
for file_item in files:
yield os.path.join(root, file_item)
def render_path_listing(self, request, path):
"""
Generate a file/folder listing of *path*'s contents.
Args:
request (twisted.web.server.Request): HTTP request object
path: folder location
Returns:
HTTP response with headers
"""
response_data = self.get_response_data_template(request)
response_data.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
generator = None
if "pattern" in request.args:
generator = self._glob(path, request.args["pattern"][0])
if "recursive" in request.args:
generator = self._walk(path)
if generator is None:
generator = self._glob(path)
for item in generator:
if os.path.isdir(item):
response_data['dirs'].append(item)
else:
response_data['files'].append(item)
return self._json_response(request, response_data)
def render_file(self, request, path):
"""
Return the contents of file *path*.
Args:
request (twisted.web.server.Request): HTTP request object
path: file path
Returns:
HTTP response with headers
"""
(_, ext) = os.path.splitext(path)
if ext in self.never_gzip_extensions:
# hack: remove gzip from the list of supported encodings
acceptHeaders = request.requestHeaders.getRawHeaders(
'accept-encoding', [])
supported = ','.join(acceptHeaders).split(',')
request.requestHeaders.setRawHeaders(
'accept-encoding', list(set(supported) - {'gzip'}))
result = twisted.web.static.File(
path, defaultType="application/octet-stream")
return result.render(request)
def render_GET(self, request):
"""
HTTP GET request handler returning
* legacy response if the query *file* or *dir* parameter is set
* file contents if *request.path* contains a file path
* directory listing if *request.path* contains a folder path
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
attic_args = {'file', 'dir'}
if len(attic_args & set(request.args.keys())) >= 1:
return self.render_legacy(request)
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.render_path_listing(request, target_path)
else:
return self.render_file(request, target_path)
def render_POST(self, request):
"""
HTTP POST request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_PUT(self, request):
"""
HTTP PUT request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_DELETE(self, request):
"""
HTTP DELETE request handler which may try to delete a file if its
path's prefix is in :py:data:`FileController._delete_whitelist` and
:py:data:`FileController._do_delete` is True.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.error_response(
request, response_code=http.NOT_IMPLEMENTED,
message='Will not remove folder {!r}'.format(target_path))
for prefix in self._delete_whitelist:
if not target_path.startswith(os.path.abspath(prefix)):
return self.error_response(request,
response_code=http.FORBIDDEN)
response_data = self.get_response_data_template(request)
try:
response_data['result'] = True
if self._do_delete:
os.unlink(target_path)
message = 'Removed {!r}'.format(target_path)
else:
message = 'WOULD remove {!r}'.format(target_path)
response_data['message'] = message
except Exception as eexc:
response_data['message'] = 'Cannot remove {!r}: {!s}'.format(
target_path, eexc.message)
request.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(request, response_data)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
# standard factory example
factory_s = Site(FileController(DEFAULT_ROOT_PATH))
# experimental factory
root = Resource()
root.putChild("/", FileController)
root.putChild("/file", FileController)
factory_r = Site(root)
# experimental factory: enable gzip compression
wrapped = EncodingResourceWrapper(
FileController(
root=DEFAULT_ROOT_PATH,
# DANGER, WILL ROBINSON! These values allow deletion of ALL files!
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
factory_s_gz = Site(wrapped)
reactor.listenTCP(18888, factory_s_gz)
reactor.run()
| path_disclosure | {
"code": [
"MANY_SLASHES_PATTERN = r'[\\/]+'",
"MANY_SLASHES_REGEX = re.compile(MANY_SLASHES_PATTERN)"
],
"line_no": [
48,
49
]
} | {
"code": [
"from utilities import MANY_SLASHES_REGEX"
],
"line_no": [
46
]
} |
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
import file
VAR_0 = r'[\/]+'
VAR_1 = re.compile(VAR_0)
VAR_2 = os.path.abspath(os.path.dirname(__file__))
VAR_3 = [
'Content-Type',
]
VAR_4 = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
VAR_5 = '*'
VAR_6 = {
'Access-Control-Allow-Origin': VAR_5,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(VAR_4),
'Access-Control-Allow-Headers': ', '.join(VAR_3)
}
VAR_7 = [
'/media',
]
class CLASS_0(twisted.web.resource.Resource):
VAR_8 = True
VAR_9 = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
VAR_10 = '/file'
VAR_11 = os.path.abspath(os.path.dirname(__file__))
VAR_12 = False
VAR_13 = VAR_7
VAR_14 = ('.ts',)
def __init__(self, *VAR_15, **VAR_16):
if VAR_15:
for VAR_40, value in zip(self._override_args, VAR_15):
VAR_16[VAR_40] = value
for arg_name in self._override_args:
if VAR_16.get(arg_name) is not None:
VAR_35 = '_{:s}'.format(arg_name)
setattr(self, VAR_35, VAR_16.get(arg_name))
self.session = VAR_16.get("session")
def FUNC_0(self, VAR_17, VAR_18):
VAR_17.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(VAR_18, indent=2)
def FUNC_1(self, VAR_17):
VAR_27 = None
if VAR_17.path.startswith(self._resource_prefix):
VAR_27 = VAR_17.path[len(self._resource_prefix):]
VAR_28 = {
"_request": {
"path": VAR_17.path,
"uri": VAR_17.uri,
"method": VAR_17.method,
"postpath": VAR_17.postpath,
"file_path": VAR_27,
},
"result": False,
}
return VAR_28
def FUNC_2(self, VAR_17, VAR_19=None, **VAR_16):
if VAR_19 is None:
VAR_19 = http.INTERNAL_SERVER_ERROR
VAR_28 = self.get_response_data_template(VAR_17)
VAR_28.update(**VAR_16)
VAR_28['me'] = dict()
for arg_name in self._override_args:
VAR_35 = '_{:s}'.format(arg_name)
VAR_28['me'][VAR_35] = getattr(self, VAR_35)
VAR_17.setResponseCode(VAR_19)
return self._json_response(VAR_17, VAR_28)
def FUNC_3(self, VAR_17):
VAR_29 = urlparse.unquote(VAR_17.path)
if not VAR_29.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(VAR_17.path))
VAR_27 = os.path.join(
self._root, VAR_29[len(self._resource_prefix) + 1:])
VAR_27 = re.sub(VAR_1, '/', VAR_27)
if not os.path.exists(VAR_27):
raise IOError("Not Found {!r}".format(VAR_27))
return VAR_27
def FUNC_4(self, VAR_17):
for VAR_40 in VAR_6:
VAR_17.setHeader(VAR_40, VAR_6[VAR_40])
return ''
def FUNC_5(self, VAR_17):
return file.FileController().render(VAR_17)
def FUNC_6(self, VAR_20, VAR_21='*'):
if VAR_20 == '/':
VAR_36 = '/' + VAR_21
else:
VAR_36 = '/'.join((VAR_20, VAR_21))
return glob.iglob(VAR_36)
def FUNC_7(self, VAR_20):
for VAR_23, dirs, files in os.walk(VAR_20):
for dir_item in dirs:
yield os.path.join(VAR_23, dir_item)
for file_item in files:
yield os.path.join(VAR_23, file_item)
def FUNC_8(self, VAR_17, VAR_20):
VAR_28 = self.get_response_data_template(VAR_17)
VAR_28.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
VAR_30 = None
if "pattern" in VAR_17.args:
VAR_30 = self._glob(VAR_20, VAR_17.args["pattern"][0])
if "recursive" in VAR_17.args:
VAR_30 = self._walk(VAR_20)
if VAR_30 is None:
VAR_30 = self._glob(VAR_20)
for item in VAR_30:
if os.path.isdir(item):
VAR_28['dirs'].append(item)
else:
VAR_28['files'].append(item)
return self._json_response(VAR_17, VAR_28)
def FUNC_9(self, VAR_17, VAR_20):
(VAR_31, VAR_32) = os.path.splitext(VAR_20)
if VAR_32 in self.never_gzip_extensions:
VAR_37 = VAR_17.requestHeaders.getRawHeaders(
'accept-encoding', [])
VAR_38 = ','.join(VAR_37).split(',')
VAR_17.requestHeaders.setRawHeaders(
'accept-encoding', list(set(VAR_38) - {'gzip'}))
VAR_33 = twisted.web.static.File(
VAR_20, defaultType="application/octet-stream")
return VAR_33.render(VAR_17)
def FUNC_10(self, VAR_17):
VAR_34 = {'file', 'dir'}
if len(VAR_34 & set(VAR_17.args.keys())) >= 1:
return self.render_legacy(VAR_17)
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
try:
VAR_39 = self._existing_path_or_bust(VAR_17)
except ValueError as vexc:
return self.error_response(
VAR_17, VAR_19=http.BAD_REQUEST, VAR_41=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_17, VAR_19=http.NOT_FOUND, VAR_41=iexc.message)
if os.path.isdir(VAR_39):
return self.render_path_listing(VAR_17, VAR_39)
else:
return self.render_file(VAR_17, VAR_39)
def FUNC_11(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
return self.error_response(VAR_17, VAR_19=http.NOT_IMPLEMENTED)
def FUNC_12(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
return self.error_response(VAR_17, VAR_19=http.NOT_IMPLEMENTED)
def FUNC_13(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
try:
VAR_39 = self._existing_path_or_bust(VAR_17)
except ValueError as vexc:
return self.error_response(
VAR_17, VAR_19=http.BAD_REQUEST, VAR_41=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_17, VAR_19=http.NOT_FOUND, VAR_41=iexc.message)
if os.path.isdir(VAR_39):
return self.error_response(
VAR_17, VAR_19=http.NOT_IMPLEMENTED,
VAR_41='Will not remove folder {!r}'.format(VAR_39))
for prefix in self._delete_whitelist:
if not VAR_39.startswith(os.path.abspath(prefix)):
return self.error_response(VAR_17,
VAR_19=http.FORBIDDEN)
VAR_28 = self.get_response_data_template(VAR_17)
try:
VAR_28['result'] = True
if self._do_delete:
os.unlink(VAR_39)
VAR_41 = 'Removed {!r}'.format(VAR_39)
else:
VAR_41 = 'WOULD remove {!r}'.format(VAR_39)
VAR_28['message'] = VAR_41
except Exception as eexc:
VAR_28['message'] = 'Cannot remove {!r}: {!s}'.format(
VAR_39, eexc.message)
VAR_17.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(VAR_17, VAR_28)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
VAR_22 = Site(CLASS_0(VAR_2))
VAR_23 = Resource()
VAR_23.putChild("/", CLASS_0)
VAR_23.putChild("/file", CLASS_0)
VAR_24 = Site(VAR_23)
VAR_25 = EncodingResourceWrapper(
CLASS_0(
VAR_23=VAR_2,
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
VAR_26 = Site(VAR_25)
reactor.listenTCP(18888, VAR_26)
reactor.run()
|
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
from utilities import MANY_SLASHES_REGEX
import file
VAR_0 = os.path.abspath(os.path.dirname(__file__))
VAR_1 = [
'Content-Type',
]
VAR_2 = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
VAR_3 = '*'
VAR_4 = {
'Access-Control-Allow-Origin': VAR_3,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(VAR_2),
'Access-Control-Allow-Headers': ', '.join(VAR_1)
}
VAR_5 = [
'/media',
]
class CLASS_0(twisted.web.resource.Resource):
VAR_6 = True
VAR_7 = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
VAR_8 = '/file'
VAR_9 = os.path.abspath(os.path.dirname(__file__))
VAR_10 = False
VAR_11 = VAR_5
VAR_12 = ('.ts',)
def __init__(self, *VAR_13, **VAR_14):
if VAR_13:
for VAR_38, value in zip(self._override_args, VAR_13):
VAR_14[VAR_38] = value
for arg_name in self._override_args:
if VAR_14.get(arg_name) is not None:
VAR_33 = '_{:s}'.format(arg_name)
setattr(self, VAR_33, VAR_14.get(arg_name))
self.session = VAR_14.get("session")
def FUNC_0(self, VAR_15, VAR_16):
VAR_15.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(VAR_16, indent=2)
def FUNC_1(self, VAR_15):
VAR_25 = None
if VAR_15.path.startswith(self._resource_prefix):
VAR_25 = VAR_15.path[len(self._resource_prefix):]
VAR_26 = {
"_request": {
"path": VAR_15.path,
"uri": VAR_15.uri,
"method": VAR_15.method,
"postpath": VAR_15.postpath,
"file_path": VAR_25,
},
"result": False,
}
return VAR_26
def FUNC_2(self, VAR_15, VAR_17=None, **VAR_14):
if VAR_17 is None:
VAR_17 = http.INTERNAL_SERVER_ERROR
VAR_26 = self.get_response_data_template(VAR_15)
VAR_26.update(**VAR_14)
VAR_26['me'] = dict()
for arg_name in self._override_args:
VAR_33 = '_{:s}'.format(arg_name)
VAR_26['me'][VAR_33] = getattr(self, VAR_33)
VAR_15.setResponseCode(VAR_17)
return self._json_response(VAR_15, VAR_26)
def FUNC_3(self, VAR_15):
VAR_27 = urlparse.unquote(VAR_15.path)
if not VAR_27.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(VAR_15.path))
VAR_25 = os.path.join(
self._root, VAR_27[len(self._resource_prefix) + 1:])
VAR_25 = re.sub(MANY_SLASHES_REGEX, '/', VAR_25)
if not os.path.exists(VAR_25):
raise IOError("Not Found {!r}".format(VAR_25))
return VAR_25
def FUNC_4(self, VAR_15):
for VAR_38 in VAR_4:
VAR_15.setHeader(VAR_38, VAR_4[VAR_38])
return ''
def FUNC_5(self, VAR_15):
return file.FileController().render(VAR_15)
def FUNC_6(self, VAR_18, VAR_19='*'):
if VAR_18 == '/':
VAR_34 = '/' + VAR_19
else:
VAR_34 = '/'.join((VAR_18, VAR_19))
return glob.iglob(VAR_34)
def FUNC_7(self, VAR_18):
for VAR_21, dirs, files in os.walk(VAR_18):
for dir_item in dirs:
yield os.path.join(VAR_21, dir_item)
for file_item in files:
yield os.path.join(VAR_21, file_item)
def FUNC_8(self, VAR_15, VAR_18):
VAR_26 = self.get_response_data_template(VAR_15)
VAR_26.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
VAR_28 = None
if "pattern" in VAR_15.args:
VAR_28 = self._glob(VAR_18, VAR_15.args["pattern"][0])
if "recursive" in VAR_15.args:
VAR_28 = self._walk(VAR_18)
if VAR_28 is None:
VAR_28 = self._glob(VAR_18)
for item in VAR_28:
if os.path.isdir(item):
VAR_26['dirs'].append(item)
else:
VAR_26['files'].append(item)
return self._json_response(VAR_15, VAR_26)
def FUNC_9(self, VAR_15, VAR_18):
(VAR_29, VAR_30) = os.path.splitext(VAR_18)
if VAR_30 in self.never_gzip_extensions:
VAR_35 = VAR_15.requestHeaders.getRawHeaders(
'accept-encoding', [])
VAR_36 = ','.join(VAR_35).split(',')
VAR_15.requestHeaders.setRawHeaders(
'accept-encoding', list(set(VAR_36) - {'gzip'}))
VAR_31 = twisted.web.static.File(
VAR_18, defaultType="application/octet-stream")
return VAR_31.render(VAR_15)
def FUNC_10(self, VAR_15):
VAR_32 = {'file', 'dir'}
if len(VAR_32 & set(VAR_15.args.keys())) >= 1:
return self.render_legacy(VAR_15)
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
try:
VAR_37 = self._existing_path_or_bust(VAR_15)
except ValueError as vexc:
return self.error_response(
VAR_15, VAR_17=http.BAD_REQUEST, VAR_39=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_15, VAR_17=http.NOT_FOUND, VAR_39=iexc.message)
if os.path.isdir(VAR_37):
return self.render_path_listing(VAR_15, VAR_37)
else:
return self.render_file(VAR_15, VAR_37)
def FUNC_11(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
return self.error_response(VAR_15, VAR_17=http.NOT_IMPLEMENTED)
def FUNC_12(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
return self.error_response(VAR_15, VAR_17=http.NOT_IMPLEMENTED)
def FUNC_13(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
try:
VAR_37 = self._existing_path_or_bust(VAR_15)
except ValueError as vexc:
return self.error_response(
VAR_15, VAR_17=http.BAD_REQUEST, VAR_39=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_15, VAR_17=http.NOT_FOUND, VAR_39=iexc.message)
if os.path.isdir(VAR_37):
return self.error_response(
VAR_15, VAR_17=http.NOT_IMPLEMENTED,
VAR_39='Will not remove folder {!r}'.format(VAR_37))
for prefix in self._delete_whitelist:
if not VAR_37.startswith(os.path.abspath(prefix)):
return self.error_response(VAR_15,
VAR_17=http.FORBIDDEN)
VAR_26 = self.get_response_data_template(VAR_15)
try:
VAR_26['result'] = True
if self._do_delete:
os.unlink(VAR_37)
VAR_39 = 'Removed {!r}'.format(VAR_37)
else:
VAR_39 = 'WOULD remove {!r}'.format(VAR_37)
VAR_26['message'] = VAR_39
except Exception as eexc:
VAR_26['message'] = 'Cannot remove {!r}: {!s}'.format(
VAR_37, eexc.message)
VAR_15.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(VAR_15, VAR_26)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
VAR_20 = Site(CLASS_0(VAR_0))
VAR_21 = Resource()
VAR_21.putChild("/", CLASS_0)
VAR_21.putChild("/file", CLASS_0)
VAR_22 = Site(VAR_21)
VAR_23 = EncodingResourceWrapper(
CLASS_0(
VAR_21=VAR_0,
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
VAR_24 = Site(VAR_23)
reactor.listenTCP(18888, VAR_24)
reactor.run()
| [
1,
2,
6,
10,
13,
16,
19,
21,
23,
25,
27,
29,
31,
33,
35,
42,
45,
47,
50,
51,
53,
54,
58,
59,
61,
62,
64,
65,
73,
74,
78,
79,
89,
93,
107,
113,
118,
127,
132,
141,
152,
154,
158,
168,
171,
176,
179,
184,
196,
200,
203,
205,
209,
211,
213,
221,
223,
229,
236,
243,
250,
254,
269,
273,
276,
279,
285,
287,
291,
299,
301,
307,
310,
312,
316,
320,
327,
330,
333,
342,
347,
351,
360,
364,
373,
379,
387,
396,
401,
406,
420,
422,
423,
428,
429,
431,
432,
437,
438,
442,
447,
450,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
137,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
252,
253,
254,
255,
256,
257,
258,
259,
260,
289,
290,
291,
292,
293,
294,
295,
296,
297,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
349,
350,
351,
352,
353,
354,
355,
356,
362,
363,
364,
365,
366,
367,
368,
369,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384
] | [
1,
2,
6,
10,
13,
16,
19,
21,
23,
25,
27,
29,
31,
33,
35,
42,
45,
48,
49,
51,
52,
56,
57,
59,
60,
62,
63,
71,
72,
76,
77,
87,
91,
105,
111,
116,
125,
130,
139,
150,
152,
156,
166,
169,
174,
177,
182,
194,
198,
201,
203,
207,
209,
211,
219,
221,
227,
234,
241,
248,
252,
267,
271,
274,
277,
283,
285,
289,
297,
299,
305,
308,
310,
314,
318,
325,
328,
331,
340,
345,
349,
358,
362,
371,
377,
385,
394,
399,
404,
418,
420,
421,
426,
427,
429,
430,
435,
436,
440,
445,
448,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
127,
128,
129,
130,
131,
132,
133,
134,
135,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
250,
251,
252,
253,
254,
255,
256,
257,
258,
287,
288,
289,
290,
291,
292,
293,
294,
295,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
347,
348,
349,
350,
351,
352,
353,
354,
360,
361,
362,
363,
364,
365,
366,
367,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 Matrix.org Federation C.I.C
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
)
from prometheus_client import Counter, Gauge, Histogram
from twisted.internet import defer
from twisted.internet.abstract import isIPAddress
from twisted.python import failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import (
AuthError,
Codes,
FederationError,
IncompatibleRoomVersionError,
NotFoundError,
SynapseError,
UnsupportedRoomVersionError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.http.endpoint import parse_server_name
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
run_in_background,
)
from synapse.logging.opentracing import log_kv, start_active_span_from_edu, trace
from synapse.logging.utils import log_function
from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.types import JsonDict, get_domain_from_id
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.caches.response_cache import ResponseCache
if TYPE_CHECKING:
from synapse.server import HomeServer
# when processing incoming transactions, we try to handle multiple rooms in
# parallel, up to this limit.
TRANSACTION_CONCURRENCY_LIMIT = 10
logger = logging.getLogger(__name__)
received_pdus_counter = Counter("synapse_federation_server_received_pdus", "")
received_edus_counter = Counter("synapse_federation_server_received_edus", "")
received_queries_counter = Counter(
"synapse_federation_server_received_queries", "", ["type"]
)
pdu_process_time = Histogram(
"synapse_federation_server_pdu_process_time", "Time taken to process an event",
)
last_pdu_age_metric = Gauge(
"synapse_federation_last_received_pdu_age",
"The age (in seconds) of the last PDU successfully received from the given domain",
labelnames=("server_name",),
)
class FederationServer(FederationBase):
def __init__(self, hs):
super().__init__(hs)
self.auth = hs.get_auth()
self.handler = hs.get_federation_handler()
self.state = hs.get_state_handler()
self.device_handler = hs.get_device_handler()
# Ensure the following handlers are loaded since they register callbacks
# with FederationHandlerRegistry.
hs.get_directory_handler()
self._federation_ratelimiter = hs.get_federation_ratelimiter()
self._server_linearizer = Linearizer("fed_server")
self._transaction_linearizer = Linearizer("fed_txn_handler")
# We cache results for transaction with the same ID
self._transaction_resp_cache = ResponseCache(
hs, "fed_txn_handler", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self.transaction_actions = TransactionActions(self.store)
self.registry = hs.get_federation_registry()
# We cache responses to state queries, as they take a while and often
# come in waves.
self._state_resp_cache = ResponseCache(
hs, "state_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._state_ids_resp_cache = ResponseCache(
hs, "state_ids_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._federation_metrics_domains = (
hs.get_config().federation.federation_metrics_domains
)
async def on_backfill_request(
self, origin: str, room_id: str, versions: List[str], limit: int
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
pdus = await self.handler.on_backfill_request(
origin, room_id, versions, limit
)
res = self._transaction_from_pdus(pdus).get_dict()
return 200, res
async def on_incoming_transaction(
self, origin: str, transaction_data: JsonDict
) -> Tuple[int, Dict[str, Any]]:
# keep this as early as possible to make the calculated origin ts as
# accurate as possible.
request_time = self._clock.time_msec()
transaction = Transaction(**transaction_data)
transaction_id = transaction.transaction_id # type: ignore
if not transaction_id:
raise Exception("Transaction missing transaction_id")
logger.debug("[%s] Got transaction", transaction_id)
# We wrap in a ResponseCache so that we de-duplicate retried
# transactions.
return await self._transaction_resp_cache.wrap(
(origin, transaction_id),
self._on_incoming_transaction_inner,
origin,
transaction,
request_time,
)
async def _on_incoming_transaction_inner(
self, origin: str, transaction: Transaction, request_time: int
) -> Tuple[int, Dict[str, Any]]:
# Use a linearizer to ensure that transactions from a remote are
# processed in order.
with await self._transaction_linearizer.queue(origin):
# We rate limit here *after* we've queued up the incoming requests,
# so that we don't fill up the ratelimiter with blocked requests.
#
# This is important as the ratelimiter allows N concurrent requests
# at a time, and only starts ratelimiting if there are more requests
# than that being processed at a time. If we queued up requests in
# the linearizer/response cache *after* the ratelimiting then those
# queued up requests would count as part of the allowed limit of N
# concurrent requests.
with self._federation_ratelimiter.ratelimit(origin) as d:
await d
result = await self._handle_incoming_transaction(
origin, transaction, request_time
)
return result
async def _handle_incoming_transaction(
self, origin: str, transaction: Transaction, request_time: int
) -> Tuple[int, Dict[str, Any]]:
""" Process an incoming transaction and return the HTTP response
Args:
origin: the server making the request
transaction: incoming transaction
request_time: timestamp that the HTTP request arrived at
Returns:
HTTP response code and body
"""
response = await self.transaction_actions.have_responded(origin, transaction)
if response:
logger.debug(
"[%s] We've already responded to this request",
transaction.transaction_id, # type: ignore
)
return response
logger.debug("[%s] Transaction is new", transaction.transaction_id) # type: ignore
# Reject if PDU count > 50 or EDU count > 100
if len(transaction.pdus) > 50 or ( # type: ignore
hasattr(transaction, "edus") and len(transaction.edus) > 100 # type: ignore
):
logger.info("Transaction PDU or EDU count too large. Returning 400")
response = {}
await self.transaction_actions.set_response(
origin, transaction, 400, response
)
return 400, response
# We process PDUs and EDUs in parallel. This is important as we don't
# want to block things like to device messages from reaching clients
# behind the potentially expensive handling of PDUs.
pdu_results, _ = await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self._handle_pdus_in_txn, origin, transaction, request_time
),
run_in_background(self._handle_edus_in_txn, origin, transaction),
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
response = {"pdus": pdu_results}
logger.debug("Returning: %s", str(response))
await self.transaction_actions.set_response(origin, transaction, 200, response)
return 200, response
async def _handle_pdus_in_txn(
self, origin: str, transaction: Transaction, request_time: int
) -> Dict[str, dict]:
"""Process the PDUs in a received transaction.
Args:
origin: the server making the request
transaction: incoming transaction
request_time: timestamp that the HTTP request arrived at
Returns:
A map from event ID of a processed PDU to any errors we should
report back to the sending server.
"""
received_pdus_counter.inc(len(transaction.pdus)) # type: ignore
origin_host, _ = parse_server_name(origin)
pdus_by_room = {} # type: Dict[str, List[EventBase]]
newest_pdu_ts = 0
for p in transaction.pdus: # type: ignore
# FIXME (richardv): I don't think this works:
# https://github.com/matrix-org/synapse/issues/8429
if "unsigned" in p:
unsigned = p["unsigned"]
if "age" in unsigned:
p["age"] = unsigned["age"]
if "age" in p:
p["age_ts"] = request_time - int(p["age"])
del p["age"]
# We try and pull out an event ID so that if later checks fail we
# can log something sensible. We don't mandate an event ID here in
# case future event formats get rid of the key.
possible_event_id = p.get("event_id", "<Unknown>")
# Now we get the room ID so that we can check that we know the
# version of the room.
room_id = p.get("room_id")
if not room_id:
logger.info(
"Ignoring PDU as does not have a room_id. Event ID: %s",
possible_event_id,
)
continue
try:
room_version = await self.store.get_room_version(room_id)
except NotFoundError:
logger.info("Ignoring PDU for unknown room_id: %s", room_id)
continue
except UnsupportedRoomVersionError as e:
# this can happen if support for a given room version is withdrawn,
# so that we still get events for said room.
logger.info("Ignoring PDU: %s", e)
continue
event = event_from_pdu_json(p, room_version)
pdus_by_room.setdefault(room_id, []).append(event)
if event.origin_server_ts > newest_pdu_ts:
newest_pdu_ts = event.origin_server_ts
pdu_results = {}
# we can process different rooms in parallel (which is useful if they
# require callouts to other servers to fetch missing events), but
# impose a limit to avoid going too crazy with ram/cpu.
async def process_pdus_for_room(room_id: str):
logger.debug("Processing PDUs for %s", room_id)
try:
await self.check_server_matches_acl(origin_host, room_id)
except AuthError as e:
logger.warning("Ignoring PDUs for room %s from banned server", room_id)
for pdu in pdus_by_room[room_id]:
event_id = pdu.event_id
pdu_results[event_id] = e.error_dict()
return
for pdu in pdus_by_room[room_id]:
event_id = pdu.event_id
with pdu_process_time.time():
with nested_logging_context(event_id):
try:
await self._handle_received_pdu(origin, pdu)
pdu_results[event_id] = {}
except FederationError as e:
logger.warning("Error handling PDU %s: %s", event_id, e)
pdu_results[event_id] = {"error": str(e)}
except Exception as e:
f = failure.Failure()
pdu_results[event_id] = {"error": str(e)}
logger.error(
"Failed to handle PDU %s",
event_id,
exc_info=(f.type, f.value, f.getTracebackObject()),
)
await concurrently_execute(
process_pdus_for_room, pdus_by_room.keys(), TRANSACTION_CONCURRENCY_LIMIT
)
if newest_pdu_ts and origin in self._federation_metrics_domains:
newest_pdu_age = self._clock.time_msec() - newest_pdu_ts
last_pdu_age_metric.labels(server_name=origin).set(newest_pdu_age / 1000)
return pdu_results
async def _handle_edus_in_txn(self, origin: str, transaction: Transaction):
"""Process the EDUs in a received transaction.
"""
async def _process_edu(edu_dict):
received_edus_counter.inc()
edu = Edu(
origin=origin,
destination=self.server_name,
edu_type=edu_dict["edu_type"],
content=edu_dict["content"],
)
await self.registry.on_edu(edu.edu_type, origin, edu.content)
await concurrently_execute(
_process_edu,
getattr(transaction, "edus", []),
TRANSACTION_CONCURRENCY_LIMIT,
)
async def on_room_state_request(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# we grab the linearizer to protect ourselves from servers which hammer
# us. In theory we might already have the response to this query
# in the cache so we could return it without waiting for the linearizer
# - but that's non-trivial to get right, and anyway somewhat defeats
# the point of the linearizer.
with (await self._server_linearizer.queue((origin, room_id))):
resp = dict(
await self._state_resp_cache.wrap(
(room_id, event_id),
self._on_context_state_request_compute,
room_id,
event_id,
)
)
room_version = await self.store.get_room_version_id(room_id)
resp["room_version"] = room_version
return 200, resp
async def on_state_ids_request(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
if not event_id:
raise NotImplementedError("Specify an event")
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
resp = await self._state_ids_resp_cache.wrap(
(room_id, event_id), self._on_state_ids_request_compute, room_id, event_id,
)
return 200, resp
async def _on_state_ids_request_compute(self, room_id, event_id):
state_ids = await self.handler.get_state_ids_for_pdu(room_id, event_id)
auth_chain_ids = await self.store.get_auth_chain_ids(state_ids)
return {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}
async def _on_context_state_request_compute(
self, room_id: str, event_id: str
) -> Dict[str, list]:
if event_id:
pdus = await self.handler.get_state_for_pdu(room_id, event_id)
else:
pdus = (await self.state.get_current_state(room_id)).values()
auth_chain = await self.store.get_auth_chain([pdu.event_id for pdu in pdus])
return {
"pdus": [pdu.get_pdu_json() for pdu in pdus],
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
}
async def on_pdu_request(
self, origin: str, event_id: str
) -> Tuple[int, Union[JsonDict, str]]:
pdu = await self.handler.get_persisted_pdu(origin, event_id)
if pdu:
return 200, self._transaction_from_pdus([pdu]).get_dict()
else:
return 404, ""
async def on_query_request(
self, query_type: str, args: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
received_queries_counter.labels(query_type).inc()
resp = await self.registry.on_query(query_type, args)
return 200, resp
async def on_make_join_request(
self, origin: str, room_id: str, user_id: str, supported_versions: List[str]
) -> Dict[str, Any]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
room_version = await self.store.get_room_version_id(room_id)
if room_version not in supported_versions:
logger.warning(
"Room version %s not in %s", room_version, supported_versions
)
raise IncompatibleRoomVersionError(room_version=room_version)
pdu = await self.handler.on_make_join_request(origin, room_id, user_id)
time_now = self._clock.time_msec()
return {"event": pdu.get_pdu_json(time_now), "room_version": room_version}
async def on_invite_request(
self, origin: str, content: JsonDict, room_version_id: str
) -> Dict[str, Any]:
room_version = KNOWN_ROOM_VERSIONS.get(room_version_id)
if not room_version:
raise SynapseError(
400,
"Homeserver does not support this room version",
Codes.UNSUPPORTED_ROOM_VERSION,
)
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
pdu = await self._check_sigs_and_hash(room_version, pdu)
ret_pdu = await self.handler.on_invite_request(origin, pdu, room_version)
time_now = self._clock.time_msec()
return {"event": ret_pdu.get_pdu_json(time_now)}
async def on_send_join_request(
self, origin: str, content: JsonDict
) -> Dict[str, Any]:
logger.debug("on_send_join_request: content: %s", content)
assert_params_in_dict(content, ["room_id"])
room_version = await self.store.get_room_version(content["room_id"])
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
pdu = await self._check_sigs_and_hash(room_version, pdu)
res_pdus = await self.handler.on_send_join_request(origin, pdu)
time_now = self._clock.time_msec()
return {
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
"auth_chain": [p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]],
}
async def on_make_leave_request(
self, origin: str, room_id: str, user_id: str
) -> Dict[str, Any]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
pdu = await self.handler.on_make_leave_request(origin, room_id, user_id)
room_version = await self.store.get_room_version_id(room_id)
time_now = self._clock.time_msec()
return {"event": pdu.get_pdu_json(time_now), "room_version": room_version}
async def on_send_leave_request(self, origin: str, content: JsonDict) -> dict:
logger.debug("on_send_leave_request: content: %s", content)
assert_params_in_dict(content, ["room_id"])
room_version = await self.store.get_room_version(content["room_id"])
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
pdu = await self._check_sigs_and_hash(room_version, pdu)
await self.handler.on_send_leave_request(origin, pdu)
return {}
async def on_event_auth(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
time_now = self._clock.time_msec()
auth_pdus = await self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
return 200, res
@log_function
async def on_query_client_keys(
self, origin: str, content: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
return await self.on_query_request("client_keys", content)
async def on_query_user_devices(
self, origin: str, user_id: str
) -> Tuple[int, Dict[str, Any]]:
keys = await self.device_handler.on_federation_query_user_devices(user_id)
return 200, keys
@trace
async def on_claim_client_keys(
self, origin: str, content: JsonDict
) -> Dict[str, Any]:
query = []
for user_id, device_keys in content.get("one_time_keys", {}).items():
for device_id, algorithm in device_keys.items():
query.append((user_id, device_id, algorithm))
log_kv({"message": "Claiming one time keys.", "user, device pairs": query})
results = await self.store.claim_e2e_one_time_keys(query)
json_result = {} # type: Dict[str, Dict[str, dict]]
for user_id, device_keys in results.items():
for device_id, keys in device_keys.items():
for key_id, json_str in keys.items():
json_result.setdefault(user_id, {})[device_id] = {
key_id: json_decoder.decode(json_str)
}
logger.info(
"Claimed one-time-keys: %s",
",".join(
(
"%s for %s:%s" % (key_id, user_id, device_id)
for user_id, user_keys in json_result.items()
for device_id, device_keys in user_keys.items()
for key_id, _ in device_keys.items()
)
),
)
return {"one_time_keys": json_result}
async def on_get_missing_events(
self,
origin: str,
room_id: str,
earliest_events: List[str],
latest_events: List[str],
limit: int,
) -> Dict[str, list]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
logger.debug(
"on_get_missing_events: earliest_events: %r, latest_events: %r,"
" limit: %d",
earliest_events,
latest_events,
limit,
)
missing_events = await self.handler.on_get_missing_events(
origin, room_id, earliest_events, latest_events, limit
)
if len(missing_events) < 5:
logger.debug(
"Returning %d events: %r", len(missing_events), missing_events
)
else:
logger.debug("Returning %d events", len(missing_events))
time_now = self._clock.time_msec()
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
@log_function
async def on_openid_userinfo(self, token: str) -> Optional[str]:
ts_now_ms = self._clock.time_msec()
return await self.store.get_user_id_for_open_id_token(token, ts_now_ms)
def _transaction_from_pdus(self, pdu_list: List[EventBase]) -> Transaction:
"""Returns a new Transaction containing the given PDUs suitable for
transmission.
"""
time_now = self._clock.time_msec()
pdus = [p.get_pdu_json(time_now) for p in pdu_list]
return Transaction(
origin=self.server_name,
pdus=pdus,
origin_server_ts=int(time_now),
destination=None,
)
async def _handle_received_pdu(self, origin: str, pdu: EventBase) -> None:
""" Process a PDU received in a federation /send/ transaction.
If the event is invalid, then this method throws a FederationError.
(The error will then be logged and sent back to the sender (which
probably won't do anything with it), and other events in the
transaction will be processed as normal).
It is likely that we'll then receive other events which refer to
this rejected_event in their prev_events, etc. When that happens,
we'll attempt to fetch the rejected event again, which will presumably
fail, so those second-generation events will also get rejected.
Eventually, we get to the point where there are more than 10 events
between any new events and the original rejected event. Since we
only try to backfill 10 events deep on received pdu, we then accept the
new event, possibly introducing a discontinuity in the DAG, with new
forward extremities, so normal service is approximately returned,
until we try to backfill across the discontinuity.
Args:
origin: server which sent the pdu
pdu: received pdu
Raises: FederationError if the signatures / hash do not match, or
if the event was unacceptable for any other reason (eg, too large,
too many prev_events, couldn't find the prev_events)
"""
# check that it's actually being sent from a valid destination to
# workaround bug #1753 in 0.18.5 and 0.18.6
if origin != get_domain_from_id(pdu.sender):
# We continue to accept join events from any server; this is
# necessary for the federation join dance to work correctly.
# (When we join over federation, the "helper" server is
# responsible for sending out the join event, rather than the
# origin. See bug #1893. This is also true for some third party
# invites).
if not (
pdu.type == "m.room.member"
and pdu.content
and pdu.content.get("membership", None)
in (Membership.JOIN, Membership.INVITE)
):
logger.info(
"Discarding PDU %s from invalid origin %s", pdu.event_id, origin
)
return
else:
logger.info("Accepting join PDU %s from %s", pdu.event_id, origin)
# We've already checked that we know the room version by this point
room_version = await self.store.get_room_version(pdu.room_id)
# Check signature.
try:
pdu = await self._check_sigs_and_hash(room_version, pdu)
except SynapseError as e:
raise FederationError("ERROR", e.code, e.msg, affected=pdu.event_id)
await self.handler.on_receive_pdu(origin, pdu, sent_to_us_directly=True)
def __str__(self):
return "<ReplicationLayer(%s)>" % self.server_name
async def exchange_third_party_invite(
self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict
):
ret = await self.handler.exchange_third_party_invite(
sender_user_id, target_user_id, room_id, signed
)
return ret
async def on_exchange_third_party_invite_request(self, event_dict: Dict):
ret = await self.handler.on_exchange_third_party_invite_request(event_dict)
return ret
async def check_server_matches_acl(self, server_name: str, room_id: str):
"""Check if the given server is allowed by the server ACLs in the room
Args:
server_name: name of server, *without any port part*
room_id: ID of the room to check
Raises:
AuthError if the server does not match the ACL
"""
state_ids = await self.store.get_current_state_ids(room_id)
acl_event_id = state_ids.get((EventTypes.ServerACL, ""))
if not acl_event_id:
return
acl_event = await self.store.get_event(acl_event_id)
if server_matches_acl_event(server_name, acl_event):
return
raise AuthError(code=403, msg="Server is banned from room")
def server_matches_acl_event(server_name: str, acl_event: EventBase) -> bool:
"""Check if the given server is allowed by the ACL event
Args:
server_name: name of server, without any port part
acl_event: m.room.server_acl event
Returns:
True if this server is allowed by the ACLs
"""
logger.debug("Checking %s against acl %s", server_name, acl_event.content)
# first of all, check if literal IPs are blocked, and if so, whether the
# server name is a literal IP
allow_ip_literals = acl_event.content.get("allow_ip_literals", True)
if not isinstance(allow_ip_literals, bool):
logger.warning("Ignoring non-bool allow_ip_literals flag")
allow_ip_literals = True
if not allow_ip_literals:
# check for ipv6 literals. These start with '['.
if server_name[0] == "[":
return False
# check for ipv4 literals. We can just lift the routine from twisted.
if isIPAddress(server_name):
return False
# next, check the deny list
deny = acl_event.content.get("deny", [])
if not isinstance(deny, (list, tuple)):
logger.warning("Ignoring non-list deny ACL %s", deny)
deny = []
for e in deny:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched deny rule %s", server_name, e)
return False
# then the allow list.
allow = acl_event.content.get("allow", [])
if not isinstance(allow, (list, tuple)):
logger.warning("Ignoring non-list allow ACL %s", allow)
allow = []
for e in allow:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched allow rule %s", server_name, e)
return True
# everything else should be rejected.
# logger.info("%s fell through", server_name)
return False
def _acl_entry_matches(server_name: str, acl_entry: Any) -> bool:
if not isinstance(acl_entry, str):
logger.warning(
"Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)
)
return False
regex = glob_to_regex(acl_entry)
return bool(regex.match(server_name))
class FederationHandlerRegistry:
"""Allows classes to register themselves as handlers for a given EDU or
query type for incoming federation traffic.
"""
def __init__(self, hs: "HomeServer"):
self.config = hs.config
self.http_client = hs.get_simple_http_client()
self.clock = hs.get_clock()
self._instance_name = hs.get_instance_name()
# These are safe to load in monolith mode, but will explode if we try
# and use them. However we have guards before we use them to ensure that
# we don't route to ourselves, and in monolith mode that will always be
# the case.
self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs)
self.edu_handlers = (
{}
) # type: Dict[str, Callable[[str, dict], Awaitable[None]]]
self.query_handlers = {} # type: Dict[str, Callable[[dict], Awaitable[None]]]
# Map from type to instance name that we should route EDU handling to.
self._edu_type_to_instance = {} # type: Dict[str, str]
def register_edu_handler(
self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]]
):
"""Sets the handler callable that will be used to handle an incoming
federation EDU of the given type.
Args:
edu_type: The type of the incoming EDU to register handler for
handler: A callable invoked on incoming EDU
of the given type. The arguments are the origin server name and
the EDU contents.
"""
if edu_type in self.edu_handlers:
raise KeyError("Already have an EDU handler for %s" % (edu_type,))
logger.info("Registering federation EDU handler for %r", edu_type)
self.edu_handlers[edu_type] = handler
def register_query_handler(
self, query_type: str, handler: Callable[[dict], defer.Deferred]
):
"""Sets the handler callable that will be used to handle an incoming
federation query of the given type.
Args:
query_type: Category name of the query, which should match
the string used by make_query.
handler: Invoked to handle
incoming queries of this type. The return will be yielded
on and the result used as the response to the query request.
"""
if query_type in self.query_handlers:
raise KeyError("Already have a Query handler for %s" % (query_type,))
logger.info("Registering federation query handler for %r", query_type)
self.query_handlers[query_type] = handler
def register_instance_for_edu(self, edu_type: str, instance_name: str):
"""Register that the EDU handler is on a different instance than master.
"""
self._edu_type_to_instance[edu_type] = instance_name
async def on_edu(self, edu_type: str, origin: str, content: dict):
if not self.config.use_presence and edu_type == "m.presence":
return
# Check if we have a handler on this instance
handler = self.edu_handlers.get(edu_type)
if handler:
with start_active_span_from_edu(content, "handle_edu"):
try:
await handler(origin, content)
except SynapseError as e:
logger.info("Failed to handle edu %r: %r", edu_type, e)
except Exception:
logger.exception("Failed to handle edu %r", edu_type)
return
# Check if we can route it somewhere else that isn't us
route_to = self._edu_type_to_instance.get(edu_type, "master")
if route_to != self._instance_name:
try:
await self._send_edu(
instance_name=route_to,
edu_type=edu_type,
origin=origin,
content=content,
)
except SynapseError as e:
logger.info("Failed to handle edu %r: %r", edu_type, e)
except Exception:
logger.exception("Failed to handle edu %r", edu_type)
return
# Oh well, let's just log and move on.
logger.warning("No handler registered for EDU type %s", edu_type)
async def on_query(self, query_type: str, args: dict):
handler = self.query_handlers.get(query_type)
if handler:
return await handler(args)
# Check if we can route it somewhere else that isn't us
if self._instance_name == "master":
return await self._get_query_client(query_type=query_type, args=args)
# Uh oh, no handler! Let's raise an exception so the request returns an
# error.
logger.warning("No handler registered for query type %s", query_type)
raise NotFoundError("No handler for Query type '%s'" % (query_type,))
| # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 Matrix.org Federation C.I.C
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
)
from prometheus_client import Counter, Gauge, Histogram
from twisted.internet import defer
from twisted.internet.abstract import isIPAddress
from twisted.python import failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import (
AuthError,
Codes,
FederationError,
IncompatibleRoomVersionError,
NotFoundError,
SynapseError,
UnsupportedRoomVersionError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.http.endpoint import parse_server_name
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
run_in_background,
)
from synapse.logging.opentracing import log_kv, start_active_span_from_edu, trace
from synapse.logging.utils import log_function
from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.types import JsonDict, get_domain_from_id
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.caches.response_cache import ResponseCache
if TYPE_CHECKING:
from synapse.server import HomeServer
# when processing incoming transactions, we try to handle multiple rooms in
# parallel, up to this limit.
TRANSACTION_CONCURRENCY_LIMIT = 10
logger = logging.getLogger(__name__)
received_pdus_counter = Counter("synapse_federation_server_received_pdus", "")
received_edus_counter = Counter("synapse_federation_server_received_edus", "")
received_queries_counter = Counter(
"synapse_federation_server_received_queries", "", ["type"]
)
pdu_process_time = Histogram(
"synapse_federation_server_pdu_process_time", "Time taken to process an event",
)
last_pdu_age_metric = Gauge(
"synapse_federation_last_received_pdu_age",
"The age (in seconds) of the last PDU successfully received from the given domain",
labelnames=("server_name",),
)
class FederationServer(FederationBase):
def __init__(self, hs):
super().__init__(hs)
self.auth = hs.get_auth()
self.handler = hs.get_federation_handler()
self.state = hs.get_state_handler()
self.device_handler = hs.get_device_handler()
# Ensure the following handlers are loaded since they register callbacks
# with FederationHandlerRegistry.
hs.get_directory_handler()
self._federation_ratelimiter = hs.get_federation_ratelimiter()
self._server_linearizer = Linearizer("fed_server")
self._transaction_linearizer = Linearizer("fed_txn_handler")
# We cache results for transaction with the same ID
self._transaction_resp_cache = ResponseCache(
hs, "fed_txn_handler", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self.transaction_actions = TransactionActions(self.store)
self.registry = hs.get_federation_registry()
# We cache responses to state queries, as they take a while and often
# come in waves.
self._state_resp_cache = ResponseCache(
hs, "state_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._state_ids_resp_cache = ResponseCache(
hs, "state_ids_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._federation_metrics_domains = (
hs.get_config().federation.federation_metrics_domains
)
async def on_backfill_request(
self, origin: str, room_id: str, versions: List[str], limit: int
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
pdus = await self.handler.on_backfill_request(
origin, room_id, versions, limit
)
res = self._transaction_from_pdus(pdus).get_dict()
return 200, res
async def on_incoming_transaction(
self, origin: str, transaction_data: JsonDict
) -> Tuple[int, Dict[str, Any]]:
# keep this as early as possible to make the calculated origin ts as
# accurate as possible.
request_time = self._clock.time_msec()
transaction = Transaction(**transaction_data)
transaction_id = transaction.transaction_id # type: ignore
if not transaction_id:
raise Exception("Transaction missing transaction_id")
logger.debug("[%s] Got transaction", transaction_id)
# We wrap in a ResponseCache so that we de-duplicate retried
# transactions.
return await self._transaction_resp_cache.wrap(
(origin, transaction_id),
self._on_incoming_transaction_inner,
origin,
transaction,
request_time,
)
async def _on_incoming_transaction_inner(
self, origin: str, transaction: Transaction, request_time: int
) -> Tuple[int, Dict[str, Any]]:
# Use a linearizer to ensure that transactions from a remote are
# processed in order.
with await self._transaction_linearizer.queue(origin):
# We rate limit here *after* we've queued up the incoming requests,
# so that we don't fill up the ratelimiter with blocked requests.
#
# This is important as the ratelimiter allows N concurrent requests
# at a time, and only starts ratelimiting if there are more requests
# than that being processed at a time. If we queued up requests in
# the linearizer/response cache *after* the ratelimiting then those
# queued up requests would count as part of the allowed limit of N
# concurrent requests.
with self._federation_ratelimiter.ratelimit(origin) as d:
await d
result = await self._handle_incoming_transaction(
origin, transaction, request_time
)
return result
async def _handle_incoming_transaction(
self, origin: str, transaction: Transaction, request_time: int
) -> Tuple[int, Dict[str, Any]]:
""" Process an incoming transaction and return the HTTP response
Args:
origin: the server making the request
transaction: incoming transaction
request_time: timestamp that the HTTP request arrived at
Returns:
HTTP response code and body
"""
response = await self.transaction_actions.have_responded(origin, transaction)
if response:
logger.debug(
"[%s] We've already responded to this request",
transaction.transaction_id, # type: ignore
)
return response
logger.debug("[%s] Transaction is new", transaction.transaction_id) # type: ignore
# Reject if PDU count > 50 or EDU count > 100
if len(transaction.pdus) > 50 or ( # type: ignore
hasattr(transaction, "edus") and len(transaction.edus) > 100 # type: ignore
):
logger.info("Transaction PDU or EDU count too large. Returning 400")
response = {}
await self.transaction_actions.set_response(
origin, transaction, 400, response
)
return 400, response
# We process PDUs and EDUs in parallel. This is important as we don't
# want to block things like to device messages from reaching clients
# behind the potentially expensive handling of PDUs.
pdu_results, _ = await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self._handle_pdus_in_txn, origin, transaction, request_time
),
run_in_background(self._handle_edus_in_txn, origin, transaction),
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
response = {"pdus": pdu_results}
logger.debug("Returning: %s", str(response))
await self.transaction_actions.set_response(origin, transaction, 200, response)
return 200, response
async def _handle_pdus_in_txn(
self, origin: str, transaction: Transaction, request_time: int
) -> Dict[str, dict]:
"""Process the PDUs in a received transaction.
Args:
origin: the server making the request
transaction: incoming transaction
request_time: timestamp that the HTTP request arrived at
Returns:
A map from event ID of a processed PDU to any errors we should
report back to the sending server.
"""
received_pdus_counter.inc(len(transaction.pdus)) # type: ignore
origin_host, _ = parse_server_name(origin)
pdus_by_room = {} # type: Dict[str, List[EventBase]]
newest_pdu_ts = 0
for p in transaction.pdus: # type: ignore
# FIXME (richardv): I don't think this works:
# https://github.com/matrix-org/synapse/issues/8429
if "unsigned" in p:
unsigned = p["unsigned"]
if "age" in unsigned:
p["age"] = unsigned["age"]
if "age" in p:
p["age_ts"] = request_time - int(p["age"])
del p["age"]
# We try and pull out an event ID so that if later checks fail we
# can log something sensible. We don't mandate an event ID here in
# case future event formats get rid of the key.
possible_event_id = p.get("event_id", "<Unknown>")
# Now we get the room ID so that we can check that we know the
# version of the room.
room_id = p.get("room_id")
if not room_id:
logger.info(
"Ignoring PDU as does not have a room_id. Event ID: %s",
possible_event_id,
)
continue
try:
room_version = await self.store.get_room_version(room_id)
except NotFoundError:
logger.info("Ignoring PDU for unknown room_id: %s", room_id)
continue
except UnsupportedRoomVersionError as e:
# this can happen if support for a given room version is withdrawn,
# so that we still get events for said room.
logger.info("Ignoring PDU: %s", e)
continue
event = event_from_pdu_json(p, room_version)
pdus_by_room.setdefault(room_id, []).append(event)
if event.origin_server_ts > newest_pdu_ts:
newest_pdu_ts = event.origin_server_ts
pdu_results = {}
# we can process different rooms in parallel (which is useful if they
# require callouts to other servers to fetch missing events), but
# impose a limit to avoid going too crazy with ram/cpu.
async def process_pdus_for_room(room_id: str):
logger.debug("Processing PDUs for %s", room_id)
try:
await self.check_server_matches_acl(origin_host, room_id)
except AuthError as e:
logger.warning("Ignoring PDUs for room %s from banned server", room_id)
for pdu in pdus_by_room[room_id]:
event_id = pdu.event_id
pdu_results[event_id] = e.error_dict()
return
for pdu in pdus_by_room[room_id]:
event_id = pdu.event_id
with pdu_process_time.time():
with nested_logging_context(event_id):
try:
await self._handle_received_pdu(origin, pdu)
pdu_results[event_id] = {}
except FederationError as e:
logger.warning("Error handling PDU %s: %s", event_id, e)
pdu_results[event_id] = {"error": str(e)}
except Exception as e:
f = failure.Failure()
pdu_results[event_id] = {"error": str(e)}
logger.error(
"Failed to handle PDU %s",
event_id,
exc_info=(f.type, f.value, f.getTracebackObject()),
)
await concurrently_execute(
process_pdus_for_room, pdus_by_room.keys(), TRANSACTION_CONCURRENCY_LIMIT
)
if newest_pdu_ts and origin in self._federation_metrics_domains:
newest_pdu_age = self._clock.time_msec() - newest_pdu_ts
last_pdu_age_metric.labels(server_name=origin).set(newest_pdu_age / 1000)
return pdu_results
async def _handle_edus_in_txn(self, origin: str, transaction: Transaction):
"""Process the EDUs in a received transaction.
"""
async def _process_edu(edu_dict):
received_edus_counter.inc()
edu = Edu(
origin=origin,
destination=self.server_name,
edu_type=edu_dict["edu_type"],
content=edu_dict["content"],
)
await self.registry.on_edu(edu.edu_type, origin, edu.content)
await concurrently_execute(
_process_edu,
getattr(transaction, "edus", []),
TRANSACTION_CONCURRENCY_LIMIT,
)
async def on_room_state_request(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# we grab the linearizer to protect ourselves from servers which hammer
# us. In theory we might already have the response to this query
# in the cache so we could return it without waiting for the linearizer
# - but that's non-trivial to get right, and anyway somewhat defeats
# the point of the linearizer.
with (await self._server_linearizer.queue((origin, room_id))):
resp = dict(
await self._state_resp_cache.wrap(
(room_id, event_id),
self._on_context_state_request_compute,
room_id,
event_id,
)
)
room_version = await self.store.get_room_version_id(room_id)
resp["room_version"] = room_version
return 200, resp
async def on_state_ids_request(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
if not event_id:
raise NotImplementedError("Specify an event")
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
resp = await self._state_ids_resp_cache.wrap(
(room_id, event_id), self._on_state_ids_request_compute, room_id, event_id,
)
return 200, resp
async def _on_state_ids_request_compute(self, room_id, event_id):
state_ids = await self.handler.get_state_ids_for_pdu(room_id, event_id)
auth_chain_ids = await self.store.get_auth_chain_ids(state_ids)
return {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}
async def _on_context_state_request_compute(
self, room_id: str, event_id: str
) -> Dict[str, list]:
if event_id:
pdus = await self.handler.get_state_for_pdu(room_id, event_id)
else:
pdus = (await self.state.get_current_state(room_id)).values()
auth_chain = await self.store.get_auth_chain([pdu.event_id for pdu in pdus])
return {
"pdus": [pdu.get_pdu_json() for pdu in pdus],
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
}
async def on_pdu_request(
self, origin: str, event_id: str
) -> Tuple[int, Union[JsonDict, str]]:
pdu = await self.handler.get_persisted_pdu(origin, event_id)
if pdu:
return 200, self._transaction_from_pdus([pdu]).get_dict()
else:
return 404, ""
async def on_query_request(
self, query_type: str, args: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
received_queries_counter.labels(query_type).inc()
resp = await self.registry.on_query(query_type, args)
return 200, resp
async def on_make_join_request(
self, origin: str, room_id: str, user_id: str, supported_versions: List[str]
) -> Dict[str, Any]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
room_version = await self.store.get_room_version_id(room_id)
if room_version not in supported_versions:
logger.warning(
"Room version %s not in %s", room_version, supported_versions
)
raise IncompatibleRoomVersionError(room_version=room_version)
pdu = await self.handler.on_make_join_request(origin, room_id, user_id)
time_now = self._clock.time_msec()
return {"event": pdu.get_pdu_json(time_now), "room_version": room_version}
async def on_invite_request(
self, origin: str, content: JsonDict, room_version_id: str
) -> Dict[str, Any]:
room_version = KNOWN_ROOM_VERSIONS.get(room_version_id)
if not room_version:
raise SynapseError(
400,
"Homeserver does not support this room version",
Codes.UNSUPPORTED_ROOM_VERSION,
)
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
pdu = await self._check_sigs_and_hash(room_version, pdu)
ret_pdu = await self.handler.on_invite_request(origin, pdu, room_version)
time_now = self._clock.time_msec()
return {"event": ret_pdu.get_pdu_json(time_now)}
async def on_send_join_request(
self, origin: str, content: JsonDict
) -> Dict[str, Any]:
logger.debug("on_send_join_request: content: %s", content)
assert_params_in_dict(content, ["room_id"])
room_version = await self.store.get_room_version(content["room_id"])
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
pdu = await self._check_sigs_and_hash(room_version, pdu)
res_pdus = await self.handler.on_send_join_request(origin, pdu)
time_now = self._clock.time_msec()
return {
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
"auth_chain": [p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]],
}
async def on_make_leave_request(
self, origin: str, room_id: str, user_id: str
) -> Dict[str, Any]:
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
pdu = await self.handler.on_make_leave_request(origin, room_id, user_id)
room_version = await self.store.get_room_version_id(room_id)
time_now = self._clock.time_msec()
return {"event": pdu.get_pdu_json(time_now), "room_version": room_version}
async def on_send_leave_request(self, origin: str, content: JsonDict) -> dict:
logger.debug("on_send_leave_request: content: %s", content)
assert_params_in_dict(content, ["room_id"])
room_version = await self.store.get_room_version(content["room_id"])
pdu = event_from_pdu_json(content, room_version)
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, pdu.room_id)
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
pdu = await self._check_sigs_and_hash(room_version, pdu)
await self.handler.on_send_leave_request(origin, pdu)
return {}
async def on_event_auth(
self, origin: str, room_id: str, event_id: str
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
time_now = self._clock.time_msec()
auth_pdus = await self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
return 200, res
@log_function
async def on_query_client_keys(
self, origin: str, content: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
return await self.on_query_request("client_keys", content)
async def on_query_user_devices(
self, origin: str, user_id: str
) -> Tuple[int, Dict[str, Any]]:
keys = await self.device_handler.on_federation_query_user_devices(user_id)
return 200, keys
@trace
async def on_claim_client_keys(
self, origin: str, content: JsonDict
) -> Dict[str, Any]:
query = []
for user_id, device_keys in content.get("one_time_keys", {}).items():
for device_id, algorithm in device_keys.items():
query.append((user_id, device_id, algorithm))
log_kv({"message": "Claiming one time keys.", "user, device pairs": query})
results = await self.store.claim_e2e_one_time_keys(query)
json_result = {} # type: Dict[str, Dict[str, dict]]
for user_id, device_keys in results.items():
for device_id, keys in device_keys.items():
for key_id, json_str in keys.items():
json_result.setdefault(user_id, {})[device_id] = {
key_id: json_decoder.decode(json_str)
}
logger.info(
"Claimed one-time-keys: %s",
",".join(
(
"%s for %s:%s" % (key_id, user_id, device_id)
for user_id, user_keys in json_result.items()
for device_id, device_keys in user_keys.items()
for key_id, _ in device_keys.items()
)
),
)
return {"one_time_keys": json_result}
async def on_get_missing_events(
self,
origin: str,
room_id: str,
earliest_events: List[str],
latest_events: List[str],
limit: int,
) -> Dict[str, list]:
with (await self._server_linearizer.queue((origin, room_id))):
origin_host, _ = parse_server_name(origin)
await self.check_server_matches_acl(origin_host, room_id)
logger.debug(
"on_get_missing_events: earliest_events: %r, latest_events: %r,"
" limit: %d",
earliest_events,
latest_events,
limit,
)
missing_events = await self.handler.on_get_missing_events(
origin, room_id, earliest_events, latest_events, limit
)
if len(missing_events) < 5:
logger.debug(
"Returning %d events: %r", len(missing_events), missing_events
)
else:
logger.debug("Returning %d events", len(missing_events))
time_now = self._clock.time_msec()
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
@log_function
async def on_openid_userinfo(self, token: str) -> Optional[str]:
ts_now_ms = self._clock.time_msec()
return await self.store.get_user_id_for_open_id_token(token, ts_now_ms)
def _transaction_from_pdus(self, pdu_list: List[EventBase]) -> Transaction:
"""Returns a new Transaction containing the given PDUs suitable for
transmission.
"""
time_now = self._clock.time_msec()
pdus = [p.get_pdu_json(time_now) for p in pdu_list]
return Transaction(
origin=self.server_name,
pdus=pdus,
origin_server_ts=int(time_now),
destination=None,
)
async def _handle_received_pdu(self, origin: str, pdu: EventBase) -> None:
""" Process a PDU received in a federation /send/ transaction.
If the event is invalid, then this method throws a FederationError.
(The error will then be logged and sent back to the sender (which
probably won't do anything with it), and other events in the
transaction will be processed as normal).
It is likely that we'll then receive other events which refer to
this rejected_event in their prev_events, etc. When that happens,
we'll attempt to fetch the rejected event again, which will presumably
fail, so those second-generation events will also get rejected.
Eventually, we get to the point where there are more than 10 events
between any new events and the original rejected event. Since we
only try to backfill 10 events deep on received pdu, we then accept the
new event, possibly introducing a discontinuity in the DAG, with new
forward extremities, so normal service is approximately returned,
until we try to backfill across the discontinuity.
Args:
origin: server which sent the pdu
pdu: received pdu
Raises: FederationError if the signatures / hash do not match, or
if the event was unacceptable for any other reason (eg, too large,
too many prev_events, couldn't find the prev_events)
"""
# check that it's actually being sent from a valid destination to
# workaround bug #1753 in 0.18.5 and 0.18.6
if origin != get_domain_from_id(pdu.sender):
# We continue to accept join events from any server; this is
# necessary for the federation join dance to work correctly.
# (When we join over federation, the "helper" server is
# responsible for sending out the join event, rather than the
# origin. See bug #1893. This is also true for some third party
# invites).
if not (
pdu.type == "m.room.member"
and pdu.content
and pdu.content.get("membership", None)
in (Membership.JOIN, Membership.INVITE)
):
logger.info(
"Discarding PDU %s from invalid origin %s", pdu.event_id, origin
)
return
else:
logger.info("Accepting join PDU %s from %s", pdu.event_id, origin)
# We've already checked that we know the room version by this point
room_version = await self.store.get_room_version(pdu.room_id)
# Check signature.
try:
pdu = await self._check_sigs_and_hash(room_version, pdu)
except SynapseError as e:
raise FederationError("ERROR", e.code, e.msg, affected=pdu.event_id)
await self.handler.on_receive_pdu(origin, pdu, sent_to_us_directly=True)
def __str__(self):
return "<ReplicationLayer(%s)>" % self.server_name
async def exchange_third_party_invite(
self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict
):
ret = await self.handler.exchange_third_party_invite(
sender_user_id, target_user_id, room_id, signed
)
return ret
async def on_exchange_third_party_invite_request(self, event_dict: Dict):
ret = await self.handler.on_exchange_third_party_invite_request(event_dict)
return ret
async def check_server_matches_acl(self, server_name: str, room_id: str):
"""Check if the given server is allowed by the server ACLs in the room
Args:
server_name: name of server, *without any port part*
room_id: ID of the room to check
Raises:
AuthError if the server does not match the ACL
"""
state_ids = await self.store.get_current_state_ids(room_id)
acl_event_id = state_ids.get((EventTypes.ServerACL, ""))
if not acl_event_id:
return
acl_event = await self.store.get_event(acl_event_id)
if server_matches_acl_event(server_name, acl_event):
return
raise AuthError(code=403, msg="Server is banned from room")
def server_matches_acl_event(server_name: str, acl_event: EventBase) -> bool:
"""Check if the given server is allowed by the ACL event
Args:
server_name: name of server, without any port part
acl_event: m.room.server_acl event
Returns:
True if this server is allowed by the ACLs
"""
logger.debug("Checking %s against acl %s", server_name, acl_event.content)
# first of all, check if literal IPs are blocked, and if so, whether the
# server name is a literal IP
allow_ip_literals = acl_event.content.get("allow_ip_literals", True)
if not isinstance(allow_ip_literals, bool):
logger.warning("Ignoring non-bool allow_ip_literals flag")
allow_ip_literals = True
if not allow_ip_literals:
# check for ipv6 literals. These start with '['.
if server_name[0] == "[":
return False
# check for ipv4 literals. We can just lift the routine from twisted.
if isIPAddress(server_name):
return False
# next, check the deny list
deny = acl_event.content.get("deny", [])
if not isinstance(deny, (list, tuple)):
logger.warning("Ignoring non-list deny ACL %s", deny)
deny = []
for e in deny:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched deny rule %s", server_name, e)
return False
# then the allow list.
allow = acl_event.content.get("allow", [])
if not isinstance(allow, (list, tuple)):
logger.warning("Ignoring non-list allow ACL %s", allow)
allow = []
for e in allow:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched allow rule %s", server_name, e)
return True
# everything else should be rejected.
# logger.info("%s fell through", server_name)
return False
def _acl_entry_matches(server_name: str, acl_entry: Any) -> bool:
if not isinstance(acl_entry, str):
logger.warning(
"Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)
)
return False
regex = glob_to_regex(acl_entry)
return bool(regex.match(server_name))
class FederationHandlerRegistry:
"""Allows classes to register themselves as handlers for a given EDU or
query type for incoming federation traffic.
"""
def __init__(self, hs: "HomeServer"):
self.config = hs.config
self.clock = hs.get_clock()
self._instance_name = hs.get_instance_name()
# These are safe to load in monolith mode, but will explode if we try
# and use them. However we have guards before we use them to ensure that
# we don't route to ourselves, and in monolith mode that will always be
# the case.
self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs)
self.edu_handlers = (
{}
) # type: Dict[str, Callable[[str, dict], Awaitable[None]]]
self.query_handlers = {} # type: Dict[str, Callable[[dict], Awaitable[None]]]
# Map from type to instance name that we should route EDU handling to.
self._edu_type_to_instance = {} # type: Dict[str, str]
def register_edu_handler(
self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]]
):
"""Sets the handler callable that will be used to handle an incoming
federation EDU of the given type.
Args:
edu_type: The type of the incoming EDU to register handler for
handler: A callable invoked on incoming EDU
of the given type. The arguments are the origin server name and
the EDU contents.
"""
if edu_type in self.edu_handlers:
raise KeyError("Already have an EDU handler for %s" % (edu_type,))
logger.info("Registering federation EDU handler for %r", edu_type)
self.edu_handlers[edu_type] = handler
def register_query_handler(
self, query_type: str, handler: Callable[[dict], defer.Deferred]
):
"""Sets the handler callable that will be used to handle an incoming
federation query of the given type.
Args:
query_type: Category name of the query, which should match
the string used by make_query.
handler: Invoked to handle
incoming queries of this type. The return will be yielded
on and the result used as the response to the query request.
"""
if query_type in self.query_handlers:
raise KeyError("Already have a Query handler for %s" % (query_type,))
logger.info("Registering federation query handler for %r", query_type)
self.query_handlers[query_type] = handler
def register_instance_for_edu(self, edu_type: str, instance_name: str):
"""Register that the EDU handler is on a different instance than master.
"""
self._edu_type_to_instance[edu_type] = instance_name
async def on_edu(self, edu_type: str, origin: str, content: dict):
if not self.config.use_presence and edu_type == "m.presence":
return
# Check if we have a handler on this instance
handler = self.edu_handlers.get(edu_type)
if handler:
with start_active_span_from_edu(content, "handle_edu"):
try:
await handler(origin, content)
except SynapseError as e:
logger.info("Failed to handle edu %r: %r", edu_type, e)
except Exception:
logger.exception("Failed to handle edu %r", edu_type)
return
# Check if we can route it somewhere else that isn't us
route_to = self._edu_type_to_instance.get(edu_type, "master")
if route_to != self._instance_name:
try:
await self._send_edu(
instance_name=route_to,
edu_type=edu_type,
origin=origin,
content=content,
)
except SynapseError as e:
logger.info("Failed to handle edu %r: %r", edu_type, e)
except Exception:
logger.exception("Failed to handle edu %r", edu_type)
return
# Oh well, let's just log and move on.
logger.warning("No handler registered for EDU type %s", edu_type)
async def on_query(self, query_type: str, args: dict):
handler = self.query_handlers.get(query_type)
if handler:
return await handler(args)
# Check if we can route it somewhere else that isn't us
if self._instance_name == "master":
return await self._get_query_client(query_type=query_type, args=args)
# Uh oh, no handler! Let's raise an exception so the request returns an
# error.
logger.warning("No handler registered for query type %s", query_type)
raise NotFoundError("No handler for Query type '%s'" % (query_type,))
| open_redirect | {
"code": [
" self.http_client = hs.get_simple_http_client()"
],
"line_no": [
848
]
} | {
"code": [],
"line_no": []
} |
import logging
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
)
from prometheus_client import Counter, Gauge, Histogram
from twisted.internet import defer
from twisted.internet.abstract import isIPAddress
from twisted.python import .failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import (
AuthError,
Codes,
FederationError,
IncompatibleRoomVersionError,
NotFoundError,
SynapseError,
UnsupportedRoomVersionError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.http.endpoint import .parse_server_name
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
run_in_background,
)
from synapse.logging.opentracing import log_kv, start_active_span_from_edu, trace
from synapse.logging.utils import log_function
from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.types import JsonDict, get_domain_from_id
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.caches.response_cache import ResponseCache
if TYPE_CHECKING:
from synapse.server import HomeServer
VAR_0 = 10
VAR_1 = logging.getLogger(__name__)
VAR_2 = Counter("synapse_federation_server_received_pdus", "")
VAR_3 = Counter("synapse_federation_server_received_edus", "")
VAR_4 = Counter(
"synapse_federation_server_received_queries", "", ["type"]
)
VAR_5 = Histogram(
"synapse_federation_server_pdu_process_time", "Time taken to process an event",
)
VAR_6 = Gauge(
"synapse_federation_last_received_pdu_age",
"The age (in seconds) of the last PDU successfully received from the given domain",
labelnames=("server_name",),
)
class CLASS_0(FederationBase):
def __init__(self, VAR_10):
super().__init__(VAR_10)
self.auth = VAR_10.get_auth()
self.handler = VAR_10.get_federation_handler()
self.state = VAR_10.get_state_handler()
self.device_handler = VAR_10.get_device_handler()
VAR_10.get_directory_handler()
self._federation_ratelimiter = VAR_10.get_federation_ratelimiter()
self._server_linearizer = Linearizer("fed_server")
self._transaction_linearizer = Linearizer("fed_txn_handler")
self._transaction_resp_cache = ResponseCache(
VAR_10, "fed_txn_handler", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self.transaction_actions = TransactionActions(self.store)
self.registry = VAR_10.get_federation_registry()
self._state_resp_cache = ResponseCache(
VAR_10, "state_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._state_ids_resp_cache = ResponseCache(
VAR_10, "state_ids_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._federation_metrics_domains = (
VAR_10.get_config().federation.federation_metrics_domains
)
async def FUNC_2(
self, VAR_11: str, VAR_12: str, VAR_13: List[str], VAR_14: int
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_63 = await self.handler.on_backfill_request(
VAR_11, VAR_12, VAR_13, VAR_14
)
VAR_67 = self._transaction_from_pdus(VAR_63).get_dict()
return 200, VAR_67
async def FUNC_3(
self, VAR_11: str, VAR_15: JsonDict
) -> Tuple[int, Dict[str, Any]]:
VAR_17 = self._clock.time_msec()
VAR_16 = Transaction(**VAR_15)
VAR_41 = VAR_16.transaction_id # type: ignore
if not VAR_41:
raise Exception("Transaction missing transaction_id")
VAR_1.debug("[%s] Got transaction", VAR_41)
return await self._transaction_resp_cache.wrap(
(VAR_11, VAR_41),
self._on_incoming_transaction_inner,
VAR_11,
VAR_16,
VAR_17,
)
async def FUNC_4(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Tuple[int, Dict[str, Any]]:
with await self._transaction_linearizer.queue(VAR_11):
with self._federation_ratelimiter.ratelimit(VAR_11) as d:
await d
VAR_74 = await self._handle_incoming_transaction(
VAR_11, VAR_16, VAR_17
)
return VAR_74
async def FUNC_5(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Tuple[int, Dict[str, Any]]:
VAR_42 = await self.transaction_actions.have_responded(VAR_11, VAR_16)
if VAR_42:
VAR_1.debug(
"[%s] We've already responded to this request",
VAR_16.transaction_id, # type: ignore
)
return VAR_42
VAR_1.debug("[%s] Transaction is new", VAR_16.transaction_id) # type: ignore
if len(VAR_16.pdus) > 50 or ( # type: ignore
hasattr(VAR_16, "edus") and len(VAR_16.edus) > 100 # type: ignore
):
VAR_1.info("Transaction PDU or EDU count too large. Returning 400")
VAR_42 = {}
await self.transaction_actions.set_response(
VAR_11, VAR_16, 400, VAR_42
)
return 400, VAR_42
VAR_43, VAR_44 = await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self._handle_pdus_in_txn, VAR_11, VAR_16, VAR_17
),
run_in_background(self._handle_edus_in_txn, VAR_11, VAR_16),
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
VAR_42 = {"pdus": VAR_43}
VAR_1.debug("Returning: %s", str(VAR_42))
await self.transaction_actions.set_response(VAR_11, VAR_16, 200, VAR_42)
return 200, VAR_42
async def FUNC_6(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Dict[str, dict]:
VAR_2.inc(len(VAR_16.pdus)) # type: ignore
VAR_45, VAR_44 = parse_server_name(VAR_11)
VAR_46 = {} # type: Dict[str, List[EventBase]]
VAR_47 = 0
for VAR_76 in VAR_16.pdus: # type: ignore
if "unsigned" in VAR_76:
VAR_75 = VAR_76["unsigned"]
if "age" in VAR_75:
VAR_76["age"] = VAR_75["age"]
if "age" in VAR_76:
VAR_76["age_ts"] = VAR_17 - int(VAR_76["age"])
del VAR_76["age"]
VAR_68 = VAR_76.get("event_id", "<Unknown>")
VAR_12 = VAR_76.get("room_id")
if not VAR_12:
VAR_1.info(
"Ignoring PDU as does not have a VAR_12. Event ID: %s",
VAR_68,
)
continue
try:
VAR_50 = await self.store.get_room_version(VAR_12)
except NotFoundError:
VAR_1.info("Ignoring PDU for unknown VAR_12: %s", VAR_12)
continue
except UnsupportedRoomVersionError as e:
VAR_1.info("Ignoring PDU: %s", e)
continue
VAR_69 = event_from_pdu_json(VAR_76, VAR_50)
VAR_46.setdefault(VAR_12, []).append(VAR_69)
if VAR_69.origin_server_ts > VAR_47:
VAR_47 = VAR_69.origin_server_ts
VAR_43 = {}
async def FUNC_35(VAR_12: str):
VAR_1.debug("Processing PDUs for %s", VAR_12)
try:
await self.check_server_matches_acl(VAR_45, VAR_12)
except AuthError as e:
VAR_1.warning("Ignoring PDUs for room %s from banned server", VAR_12)
for VAR_29 in VAR_46[VAR_12]:
VAR_18 = VAR_29.event_id
VAR_43[VAR_18] = e.error_dict()
return
for VAR_29 in VAR_46[VAR_12]:
VAR_18 = VAR_29.event_id
with VAR_5.time():
with nested_logging_context(VAR_18):
try:
await self._handle_received_pdu(VAR_11, VAR_29)
VAR_43[VAR_18] = {}
except FederationError as e:
VAR_1.warning("Error handling PDU %s: %s", VAR_18, e)
VAR_43[VAR_18] = {"error": str(e)}
except Exception as e:
VAR_78 = failure.Failure()
VAR_43[VAR_18] = {"error": str(e)}
VAR_1.error(
"Failed to handle PDU %s",
VAR_18,
exc_info=(VAR_78.type, VAR_78.value, VAR_78.getTracebackObject()),
)
await concurrently_execute(
FUNC_35, VAR_46.keys(), VAR_0
)
if VAR_47 and VAR_11 in self._federation_metrics_domains:
VAR_70 = self._clock.time_msec() - VAR_47
VAR_6.labels(VAR_7=VAR_11).set(VAR_70 / 1000)
return VAR_43
async def FUNC_7(self, VAR_11: str, VAR_16: Transaction):
async def FUNC_36(VAR_48):
VAR_3.inc()
VAR_71 = Edu(
VAR_11=origin,
destination=self.server_name,
VAR_38=VAR_48["edu_type"],
VAR_23=VAR_48["content"],
)
await self.registry.on_edu(VAR_71.edu_type, VAR_11, VAR_71.content)
await concurrently_execute(
FUNC_36,
getattr(VAR_16, "edus", []),
VAR_0,
)
async def FUNC_8(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_49 = await self.auth.check_host_in_room(VAR_12, VAR_11)
if not VAR_49:
raise AuthError(403, "Host not in room.")
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_51 = dict(
await self._state_resp_cache.wrap(
(VAR_12, VAR_18),
self._on_context_state_request_compute,
VAR_12,
VAR_18,
)
)
VAR_50 = await self.store.get_room_version_id(VAR_12)
VAR_51["room_version"] = VAR_50
return 200, VAR_51
async def FUNC_9(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
if not VAR_18:
raise NotImplementedError("Specify an event")
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_49 = await self.auth.check_host_in_room(VAR_12, VAR_11)
if not VAR_49:
raise AuthError(403, "Host not in room.")
VAR_51 = await self._state_ids_resp_cache.wrap(
(VAR_12, VAR_18), self._on_state_ids_request_compute, VAR_12, VAR_18,
)
return 200, VAR_51
async def FUNC_10(self, VAR_12, VAR_18):
VAR_52 = await self.handler.get_state_ids_for_pdu(VAR_12, VAR_18)
VAR_53 = await self.store.get_auth_chain_ids(VAR_52)
return {"pdu_ids": VAR_52, "auth_chain_ids": VAR_53}
async def FUNC_11(
self, VAR_12: str, VAR_18: str
) -> Dict[str, list]:
if VAR_18:
VAR_63 = await self.handler.get_state_for_pdu(VAR_12, VAR_18)
else:
VAR_63 = (await self.state.get_current_state(VAR_12)).values()
VAR_54 = await self.store.get_auth_chain([VAR_29.event_id for VAR_29 in VAR_63])
return {
"pdus": [VAR_29.get_pdu_json() for VAR_29 in VAR_63],
"auth_chain": [VAR_29.get_pdu_json() for VAR_29 in VAR_54],
}
async def FUNC_12(
self, VAR_11: str, VAR_18: str
) -> Tuple[int, Union[JsonDict, str]]:
VAR_29 = await self.handler.get_persisted_pdu(VAR_11, VAR_18)
if VAR_29:
return 200, self._transaction_from_pdus([VAR_29]).get_dict()
else:
return 404, ""
async def FUNC_13(
self, VAR_19: str, VAR_20: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
VAR_4.labels(VAR_19).inc()
VAR_51 = await self.registry.on_query(VAR_19, VAR_20)
return 200, VAR_51
async def FUNC_14(
self, VAR_11: str, VAR_12: str, VAR_21: str, VAR_22: List[str]
) -> Dict[str, Any]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_50 = await self.store.get_room_version_id(VAR_12)
if VAR_50 not in VAR_22:
VAR_1.warning(
"Room version %s not in %s", VAR_50, VAR_22
)
raise IncompatibleRoomVersionError(VAR_50=room_version)
VAR_29 = await self.handler.on_make_join_request(VAR_11, VAR_12, VAR_21)
VAR_55 = self._clock.time_msec()
return {"event": VAR_29.get_pdu_json(VAR_55), "room_version": VAR_50}
async def FUNC_15(
self, VAR_11: str, VAR_23: JsonDict, VAR_24: str
) -> Dict[str, Any]:
VAR_50 = KNOWN_ROOM_VERSIONS.get(VAR_24)
if not VAR_50:
raise SynapseError(
400,
"Homeserver does not support this room version",
Codes.UNSUPPORTED_ROOM_VERSION,
)
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
VAR_56 = await self.handler.on_invite_request(VAR_11, VAR_29, VAR_50)
VAR_55 = self._clock.time_msec()
return {"event": VAR_56.get_pdu_json(VAR_55)}
async def FUNC_16(
self, VAR_11: str, VAR_23: JsonDict
) -> Dict[str, Any]:
VAR_1.debug("on_send_join_request: VAR_23: %s", VAR_23)
assert_params_in_dict(VAR_23, ["room_id"])
VAR_50 = await self.store.get_room_version(VAR_23["room_id"])
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_1.debug("on_send_join_request: VAR_29 sigs: %s", VAR_29.signatures)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
VAR_57 = await self.handler.on_send_join_request(VAR_11, VAR_29)
VAR_55 = self._clock.time_msec()
return {
"state": [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_57["state"]],
"auth_chain": [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_57["auth_chain"]],
}
async def FUNC_17(
self, VAR_11: str, VAR_12: str, VAR_21: str
) -> Dict[str, Any]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_29 = await self.handler.on_make_leave_request(VAR_11, VAR_12, VAR_21)
VAR_50 = await self.store.get_room_version_id(VAR_12)
VAR_55 = self._clock.time_msec()
return {"event": VAR_29.get_pdu_json(VAR_55), "room_version": VAR_50}
async def FUNC_18(self, VAR_11: str, VAR_23: JsonDict) -> dict:
VAR_1.debug("on_send_leave_request: VAR_23: %s", VAR_23)
assert_params_in_dict(VAR_23, ["room_id"])
VAR_50 = await self.store.get_room_version(VAR_23["room_id"])
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_1.debug("on_send_leave_request: VAR_29 sigs: %s", VAR_29.signatures)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
await self.handler.on_send_leave_request(VAR_11, VAR_29)
return {}
async def FUNC_19(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_55 = self._clock.time_msec()
VAR_72 = await self.handler.on_event_auth(VAR_18)
VAR_67 = {"auth_chain": [a.get_pdu_json(VAR_55) for a in VAR_72]}
return 200, VAR_67
@log_function
async def FUNC_20(
self, VAR_11: str, VAR_23: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
return await self.on_query_request("client_keys", VAR_23)
async def FUNC_21(
self, VAR_11: str, VAR_21: str
) -> Tuple[int, Dict[str, Any]]:
VAR_58 = await self.device_handler.on_federation_query_user_devices(VAR_21)
return 200, VAR_58
@trace
async def FUNC_22(
self, VAR_11: str, VAR_23: JsonDict
) -> Dict[str, Any]:
VAR_59 = []
for VAR_21, device_keys in VAR_23.get("one_time_keys", {}).items():
for VAR_77, algorithm in device_keys.items():
VAR_59.append((VAR_21, VAR_77, algorithm))
log_kv({"message": "Claiming one time VAR_58.", "user, device pairs": VAR_59})
VAR_60 = await self.store.claim_e2e_one_time_keys(VAR_59)
VAR_61 = {} # type: Dict[str, Dict[str, dict]]
for VAR_21, device_keys in VAR_60.items():
for VAR_77, VAR_58 in device_keys.items():
for key_id, json_str in VAR_58.items():
VAR_61.setdefault(VAR_21, {})[VAR_77] = {
key_id: json_decoder.decode(json_str)
}
VAR_1.info(
"Claimed one-time-VAR_58: %s",
",".join(
(
"%s for %s:%s" % (key_id, VAR_21, VAR_77)
for VAR_21, user_keys in VAR_61.items()
for VAR_77, device_keys in user_keys.items()
for key_id, VAR_44 in device_keys.items()
)
),
)
return {"one_time_keys": VAR_61}
async def FUNC_23(
self,
VAR_11: str,
VAR_12: str,
VAR_25: List[str],
VAR_26: List[str],
VAR_14: int,
) -> Dict[str, list]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_1.debug(
"on_get_missing_events: VAR_25: %r, VAR_26: %r,"
" VAR_14: %d",
VAR_25,
VAR_26,
VAR_14,
)
VAR_73 = await self.handler.on_get_missing_events(
VAR_11, VAR_12, VAR_25, VAR_26, VAR_14
)
if len(VAR_73) < 5:
VAR_1.debug(
"Returning %d events: %r", len(VAR_73), missing_events
)
else:
VAR_1.debug("Returning %d events", len(VAR_73))
VAR_55 = self._clock.time_msec()
return {"events": [ev.get_pdu_json(VAR_55) for ev in VAR_73]}
@log_function
async def FUNC_24(self, VAR_27: str) -> Optional[str]:
VAR_62 = self._clock.time_msec()
return await self.store.get_user_id_for_open_id_token(VAR_27, VAR_62)
def FUNC_25(self, VAR_28: List[EventBase]) -> Transaction:
VAR_55 = self._clock.time_msec()
VAR_63 = [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_28]
return Transaction(
VAR_11=self.server_name,
VAR_63=pdus,
origin_server_ts=int(VAR_55),
destination=None,
)
async def FUNC_26(self, VAR_11: str, VAR_29: EventBase) -> None:
if VAR_11 != get_domain_from_id(VAR_29.sender):
if not (
VAR_29.type == "m.room.member"
and VAR_29.content
and VAR_29.content.get("membership", None)
in (Membership.JOIN, Membership.INVITE)
):
VAR_1.info(
"Discarding PDU %s from invalid VAR_11 %s", VAR_29.event_id, VAR_11
)
return
else:
VAR_1.info("Accepting join PDU %s from %s", VAR_29.event_id, VAR_11)
VAR_50 = await self.store.get_room_version(VAR_29.room_id)
try:
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
except SynapseError as e:
raise FederationError("ERROR", e.code, e.msg, affected=VAR_29.event_id)
await self.handler.on_receive_pdu(VAR_11, VAR_29, sent_to_us_directly=True)
def __str__(self):
return "<ReplicationLayer(%s)>" % self.server_name
async def FUNC_27(
self, VAR_30: str, VAR_31: str, VAR_12: str, VAR_32: Dict
):
VAR_64 = await self.handler.exchange_third_party_invite(
VAR_30, VAR_31, VAR_12, VAR_32
)
return VAR_64
async def FUNC_28(self, VAR_33: Dict):
VAR_64 = await self.handler.on_exchange_third_party_invite_request(VAR_33)
return VAR_64
async def FUNC_29(self, VAR_7: str, VAR_12: str):
VAR_52 = await self.store.get_current_state_ids(VAR_12)
VAR_65 = VAR_52.get((EventTypes.ServerACL, ""))
if not VAR_65:
return
VAR_8 = await self.store.get_event(VAR_65)
if FUNC_0(VAR_7, VAR_8):
return
raise AuthError(code=403, msg="Server is banned from room")
def FUNC_0(VAR_7: str, VAR_8: EventBase) -> bool:
VAR_1.debug("Checking %s against acl %s", VAR_7, VAR_8.content)
VAR_34 = VAR_8.content.get("allow_ip_literals", True)
if not isinstance(VAR_34, bool):
VAR_1.warning("Ignoring non-bool VAR_34 flag")
VAR_34 = True
if not VAR_34:
if VAR_7[0] == "[":
return False
if isIPAddress(VAR_7):
return False
VAR_35 = VAR_8.content.get("deny", [])
if not isinstance(VAR_35, (list, tuple)):
VAR_1.warning("Ignoring non-list VAR_35 ACL %s", VAR_35)
VAR_35 = []
for e in VAR_35:
if FUNC_1(VAR_7, e):
return False
VAR_36 = VAR_8.content.get("allow", [])
if not isinstance(VAR_36, (list, tuple)):
VAR_1.warning("Ignoring non-list VAR_36 ACL %s", VAR_36)
VAR_36 = []
for e in VAR_36:
if FUNC_1(VAR_7, e):
return True
return False
def FUNC_1(VAR_7: str, VAR_9: Any) -> bool:
if not isinstance(VAR_9, str):
VAR_1.warning(
"Ignoring non-str ACL entry '%s' (is %s)", VAR_9, type(VAR_9)
)
return False
VAR_37 = glob_to_regex(VAR_9)
return bool(VAR_37.match(VAR_7))
class CLASS_1:
def __init__(self, VAR_10: "HomeServer"):
self.config = VAR_10.config
self.http_client = VAR_10.get_simple_http_client()
self.clock = VAR_10.get_clock()
self._instance_name = VAR_10.get_instance_name()
self._get_query_client = ReplicationGetQueryRestServlet.make_client(VAR_10)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(VAR_10)
self.edu_handlers = (
{}
) # type: Dict[str, Callable[[str, dict], Awaitable[None]]]
self.query_handlers = {} # type: Dict[str, Callable[[dict], Awaitable[None]]]
self._edu_type_to_instance = {} # type: Dict[str, str]
def FUNC_30(
self, VAR_38: str, VAR_39: Callable[[str, JsonDict], Awaitable[None]]
):
if VAR_38 in self.edu_handlers:
raise KeyError("Already have an EDU VAR_39 for %s" % (VAR_38,))
VAR_1.info("Registering federation EDU VAR_39 for %r", VAR_38)
self.edu_handlers[VAR_38] = VAR_39
def FUNC_31(
self, VAR_19: str, VAR_39: Callable[[dict], defer.Deferred]
):
if VAR_19 in self.query_handlers:
raise KeyError("Already have a Query VAR_39 for %s" % (VAR_19,))
VAR_1.info("Registering federation VAR_59 VAR_39 for %r", VAR_19)
self.query_handlers[VAR_19] = VAR_39
def FUNC_32(self, VAR_38: str, VAR_40: str):
self._edu_type_to_instance[VAR_38] = VAR_40
async def FUNC_33(self, VAR_38: str, VAR_11: str, VAR_23: dict):
if not self.config.use_presence and VAR_38 == "m.presence":
return
VAR_39 = self.edu_handlers.get(VAR_38)
if VAR_39:
with start_active_span_from_edu(VAR_23, "handle_edu"):
try:
await VAR_39(VAR_11, VAR_23)
except SynapseError as e:
VAR_1.info("Failed to handle VAR_71 %r: %r", VAR_38, e)
except Exception:
VAR_1.exception("Failed to handle VAR_71 %r", VAR_38)
return
VAR_66 = self._edu_type_to_instance.get(VAR_38, "master")
if VAR_66 != self._instance_name:
try:
await self._send_edu(
VAR_40=VAR_66,
VAR_38=edu_type,
VAR_11=origin,
VAR_23=content,
)
except SynapseError as e:
VAR_1.info("Failed to handle VAR_71 %r: %r", VAR_38, e)
except Exception:
VAR_1.exception("Failed to handle VAR_71 %r", VAR_38)
return
VAR_1.warning("No VAR_39 registered for EDU type %s", VAR_38)
async def FUNC_34(self, VAR_19: str, VAR_20: dict):
VAR_39 = self.query_handlers.get(VAR_19)
if VAR_39:
return await VAR_39(VAR_20)
if self._instance_name == "master":
return await self._get_query_client(VAR_19=query_type, VAR_20=args)
VAR_1.warning("No VAR_39 registered for VAR_59 type %s", VAR_19)
raise NotFoundError("No VAR_39 for Query type '%s'" % (VAR_19,))
|
import logging
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
)
from prometheus_client import Counter, Gauge, Histogram
from twisted.internet import defer
from twisted.internet.abstract import isIPAddress
from twisted.python import .failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import (
AuthError,
Codes,
FederationError,
IncompatibleRoomVersionError,
NotFoundError,
SynapseError,
UnsupportedRoomVersionError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
from synapse.federation.persistence import TransactionActions
from synapse.federation.units import Edu, Transaction
from synapse.http.endpoint import .parse_server_name
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
run_in_background,
)
from synapse.logging.opentracing import log_kv, start_active_span_from_edu, trace
from synapse.logging.utils import log_function
from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.types import JsonDict, get_domain_from_id
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.caches.response_cache import ResponseCache
if TYPE_CHECKING:
from synapse.server import HomeServer
VAR_0 = 10
VAR_1 = logging.getLogger(__name__)
VAR_2 = Counter("synapse_federation_server_received_pdus", "")
VAR_3 = Counter("synapse_federation_server_received_edus", "")
VAR_4 = Counter(
"synapse_federation_server_received_queries", "", ["type"]
)
VAR_5 = Histogram(
"synapse_federation_server_pdu_process_time", "Time taken to process an event",
)
VAR_6 = Gauge(
"synapse_federation_last_received_pdu_age",
"The age (in seconds) of the last PDU successfully received from the given domain",
labelnames=("server_name",),
)
class CLASS_0(FederationBase):
def __init__(self, VAR_10):
super().__init__(VAR_10)
self.auth = VAR_10.get_auth()
self.handler = VAR_10.get_federation_handler()
self.state = VAR_10.get_state_handler()
self.device_handler = VAR_10.get_device_handler()
VAR_10.get_directory_handler()
self._federation_ratelimiter = VAR_10.get_federation_ratelimiter()
self._server_linearizer = Linearizer("fed_server")
self._transaction_linearizer = Linearizer("fed_txn_handler")
self._transaction_resp_cache = ResponseCache(
VAR_10, "fed_txn_handler", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self.transaction_actions = TransactionActions(self.store)
self.registry = VAR_10.get_federation_registry()
self._state_resp_cache = ResponseCache(
VAR_10, "state_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._state_ids_resp_cache = ResponseCache(
VAR_10, "state_ids_resp", timeout_ms=30000
) # type: ResponseCache[Tuple[str, str]]
self._federation_metrics_domains = (
VAR_10.get_config().federation.federation_metrics_domains
)
async def FUNC_2(
self, VAR_11: str, VAR_12: str, VAR_13: List[str], VAR_14: int
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_63 = await self.handler.on_backfill_request(
VAR_11, VAR_12, VAR_13, VAR_14
)
VAR_67 = self._transaction_from_pdus(VAR_63).get_dict()
return 200, VAR_67
async def FUNC_3(
self, VAR_11: str, VAR_15: JsonDict
) -> Tuple[int, Dict[str, Any]]:
VAR_17 = self._clock.time_msec()
VAR_16 = Transaction(**VAR_15)
VAR_41 = VAR_16.transaction_id # type: ignore
if not VAR_41:
raise Exception("Transaction missing transaction_id")
VAR_1.debug("[%s] Got transaction", VAR_41)
return await self._transaction_resp_cache.wrap(
(VAR_11, VAR_41),
self._on_incoming_transaction_inner,
VAR_11,
VAR_16,
VAR_17,
)
async def FUNC_4(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Tuple[int, Dict[str, Any]]:
with await self._transaction_linearizer.queue(VAR_11):
with self._federation_ratelimiter.ratelimit(VAR_11) as d:
await d
VAR_74 = await self._handle_incoming_transaction(
VAR_11, VAR_16, VAR_17
)
return VAR_74
async def FUNC_5(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Tuple[int, Dict[str, Any]]:
VAR_42 = await self.transaction_actions.have_responded(VAR_11, VAR_16)
if VAR_42:
VAR_1.debug(
"[%s] We've already responded to this request",
VAR_16.transaction_id, # type: ignore
)
return VAR_42
VAR_1.debug("[%s] Transaction is new", VAR_16.transaction_id) # type: ignore
if len(VAR_16.pdus) > 50 or ( # type: ignore
hasattr(VAR_16, "edus") and len(VAR_16.edus) > 100 # type: ignore
):
VAR_1.info("Transaction PDU or EDU count too large. Returning 400")
VAR_42 = {}
await self.transaction_actions.set_response(
VAR_11, VAR_16, 400, VAR_42
)
return 400, VAR_42
VAR_43, VAR_44 = await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self._handle_pdus_in_txn, VAR_11, VAR_16, VAR_17
),
run_in_background(self._handle_edus_in_txn, VAR_11, VAR_16),
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
VAR_42 = {"pdus": VAR_43}
VAR_1.debug("Returning: %s", str(VAR_42))
await self.transaction_actions.set_response(VAR_11, VAR_16, 200, VAR_42)
return 200, VAR_42
async def FUNC_6(
self, VAR_11: str, VAR_16: Transaction, VAR_17: int
) -> Dict[str, dict]:
VAR_2.inc(len(VAR_16.pdus)) # type: ignore
VAR_45, VAR_44 = parse_server_name(VAR_11)
VAR_46 = {} # type: Dict[str, List[EventBase]]
VAR_47 = 0
for VAR_76 in VAR_16.pdus: # type: ignore
if "unsigned" in VAR_76:
VAR_75 = VAR_76["unsigned"]
if "age" in VAR_75:
VAR_76["age"] = VAR_75["age"]
if "age" in VAR_76:
VAR_76["age_ts"] = VAR_17 - int(VAR_76["age"])
del VAR_76["age"]
VAR_68 = VAR_76.get("event_id", "<Unknown>")
VAR_12 = VAR_76.get("room_id")
if not VAR_12:
VAR_1.info(
"Ignoring PDU as does not have a VAR_12. Event ID: %s",
VAR_68,
)
continue
try:
VAR_50 = await self.store.get_room_version(VAR_12)
except NotFoundError:
VAR_1.info("Ignoring PDU for unknown VAR_12: %s", VAR_12)
continue
except UnsupportedRoomVersionError as e:
VAR_1.info("Ignoring PDU: %s", e)
continue
VAR_69 = event_from_pdu_json(VAR_76, VAR_50)
VAR_46.setdefault(VAR_12, []).append(VAR_69)
if VAR_69.origin_server_ts > VAR_47:
VAR_47 = VAR_69.origin_server_ts
VAR_43 = {}
async def FUNC_35(VAR_12: str):
VAR_1.debug("Processing PDUs for %s", VAR_12)
try:
await self.check_server_matches_acl(VAR_45, VAR_12)
except AuthError as e:
VAR_1.warning("Ignoring PDUs for room %s from banned server", VAR_12)
for VAR_29 in VAR_46[VAR_12]:
VAR_18 = VAR_29.event_id
VAR_43[VAR_18] = e.error_dict()
return
for VAR_29 in VAR_46[VAR_12]:
VAR_18 = VAR_29.event_id
with VAR_5.time():
with nested_logging_context(VAR_18):
try:
await self._handle_received_pdu(VAR_11, VAR_29)
VAR_43[VAR_18] = {}
except FederationError as e:
VAR_1.warning("Error handling PDU %s: %s", VAR_18, e)
VAR_43[VAR_18] = {"error": str(e)}
except Exception as e:
VAR_78 = failure.Failure()
VAR_43[VAR_18] = {"error": str(e)}
VAR_1.error(
"Failed to handle PDU %s",
VAR_18,
exc_info=(VAR_78.type, VAR_78.value, VAR_78.getTracebackObject()),
)
await concurrently_execute(
FUNC_35, VAR_46.keys(), VAR_0
)
if VAR_47 and VAR_11 in self._federation_metrics_domains:
VAR_70 = self._clock.time_msec() - VAR_47
VAR_6.labels(VAR_7=VAR_11).set(VAR_70 / 1000)
return VAR_43
async def FUNC_7(self, VAR_11: str, VAR_16: Transaction):
async def FUNC_36(VAR_48):
VAR_3.inc()
VAR_71 = Edu(
VAR_11=origin,
destination=self.server_name,
VAR_38=VAR_48["edu_type"],
VAR_23=VAR_48["content"],
)
await self.registry.on_edu(VAR_71.edu_type, VAR_11, VAR_71.content)
await concurrently_execute(
FUNC_36,
getattr(VAR_16, "edus", []),
VAR_0,
)
async def FUNC_8(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_49 = await self.auth.check_host_in_room(VAR_12, VAR_11)
if not VAR_49:
raise AuthError(403, "Host not in room.")
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_51 = dict(
await self._state_resp_cache.wrap(
(VAR_12, VAR_18),
self._on_context_state_request_compute,
VAR_12,
VAR_18,
)
)
VAR_50 = await self.store.get_room_version_id(VAR_12)
VAR_51["room_version"] = VAR_50
return 200, VAR_51
async def FUNC_9(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
if not VAR_18:
raise NotImplementedError("Specify an event")
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_49 = await self.auth.check_host_in_room(VAR_12, VAR_11)
if not VAR_49:
raise AuthError(403, "Host not in room.")
VAR_51 = await self._state_ids_resp_cache.wrap(
(VAR_12, VAR_18), self._on_state_ids_request_compute, VAR_12, VAR_18,
)
return 200, VAR_51
async def FUNC_10(self, VAR_12, VAR_18):
VAR_52 = await self.handler.get_state_ids_for_pdu(VAR_12, VAR_18)
VAR_53 = await self.store.get_auth_chain_ids(VAR_52)
return {"pdu_ids": VAR_52, "auth_chain_ids": VAR_53}
async def FUNC_11(
self, VAR_12: str, VAR_18: str
) -> Dict[str, list]:
if VAR_18:
VAR_63 = await self.handler.get_state_for_pdu(VAR_12, VAR_18)
else:
VAR_63 = (await self.state.get_current_state(VAR_12)).values()
VAR_54 = await self.store.get_auth_chain([VAR_29.event_id for VAR_29 in VAR_63])
return {
"pdus": [VAR_29.get_pdu_json() for VAR_29 in VAR_63],
"auth_chain": [VAR_29.get_pdu_json() for VAR_29 in VAR_54],
}
async def FUNC_12(
self, VAR_11: str, VAR_18: str
) -> Tuple[int, Union[JsonDict, str]]:
VAR_29 = await self.handler.get_persisted_pdu(VAR_11, VAR_18)
if VAR_29:
return 200, self._transaction_from_pdus([VAR_29]).get_dict()
else:
return 404, ""
async def FUNC_13(
self, VAR_19: str, VAR_20: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
VAR_4.labels(VAR_19).inc()
VAR_51 = await self.registry.on_query(VAR_19, VAR_20)
return 200, VAR_51
async def FUNC_14(
self, VAR_11: str, VAR_12: str, VAR_21: str, VAR_22: List[str]
) -> Dict[str, Any]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_50 = await self.store.get_room_version_id(VAR_12)
if VAR_50 not in VAR_22:
VAR_1.warning(
"Room version %s not in %s", VAR_50, VAR_22
)
raise IncompatibleRoomVersionError(VAR_50=room_version)
VAR_29 = await self.handler.on_make_join_request(VAR_11, VAR_12, VAR_21)
VAR_55 = self._clock.time_msec()
return {"event": VAR_29.get_pdu_json(VAR_55), "room_version": VAR_50}
async def FUNC_15(
self, VAR_11: str, VAR_23: JsonDict, VAR_24: str
) -> Dict[str, Any]:
VAR_50 = KNOWN_ROOM_VERSIONS.get(VAR_24)
if not VAR_50:
raise SynapseError(
400,
"Homeserver does not support this room version",
Codes.UNSUPPORTED_ROOM_VERSION,
)
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
VAR_56 = await self.handler.on_invite_request(VAR_11, VAR_29, VAR_50)
VAR_55 = self._clock.time_msec()
return {"event": VAR_56.get_pdu_json(VAR_55)}
async def FUNC_16(
self, VAR_11: str, VAR_23: JsonDict
) -> Dict[str, Any]:
VAR_1.debug("on_send_join_request: VAR_23: %s", VAR_23)
assert_params_in_dict(VAR_23, ["room_id"])
VAR_50 = await self.store.get_room_version(VAR_23["room_id"])
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_1.debug("on_send_join_request: VAR_29 sigs: %s", VAR_29.signatures)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
VAR_57 = await self.handler.on_send_join_request(VAR_11, VAR_29)
VAR_55 = self._clock.time_msec()
return {
"state": [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_57["state"]],
"auth_chain": [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_57["auth_chain"]],
}
async def FUNC_17(
self, VAR_11: str, VAR_12: str, VAR_21: str
) -> Dict[str, Any]:
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_29 = await self.handler.on_make_leave_request(VAR_11, VAR_12, VAR_21)
VAR_50 = await self.store.get_room_version_id(VAR_12)
VAR_55 = self._clock.time_msec()
return {"event": VAR_29.get_pdu_json(VAR_55), "room_version": VAR_50}
async def FUNC_18(self, VAR_11: str, VAR_23: JsonDict) -> dict:
VAR_1.debug("on_send_leave_request: VAR_23: %s", VAR_23)
assert_params_in_dict(VAR_23, ["room_id"])
VAR_50 = await self.store.get_room_version(VAR_23["room_id"])
VAR_29 = event_from_pdu_json(VAR_23, VAR_50)
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_29.room_id)
VAR_1.debug("on_send_leave_request: VAR_29 sigs: %s", VAR_29.signatures)
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
await self.handler.on_send_leave_request(VAR_11, VAR_29)
return {}
async def FUNC_19(
self, VAR_11: str, VAR_12: str, VAR_18: str
) -> Tuple[int, Dict[str, Any]]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_55 = self._clock.time_msec()
VAR_72 = await self.handler.on_event_auth(VAR_18)
VAR_67 = {"auth_chain": [a.get_pdu_json(VAR_55) for a in VAR_72]}
return 200, VAR_67
@log_function
async def FUNC_20(
self, VAR_11: str, VAR_23: Dict[str, str]
) -> Tuple[int, Dict[str, Any]]:
return await self.on_query_request("client_keys", VAR_23)
async def FUNC_21(
self, VAR_11: str, VAR_21: str
) -> Tuple[int, Dict[str, Any]]:
VAR_58 = await self.device_handler.on_federation_query_user_devices(VAR_21)
return 200, VAR_58
@trace
async def FUNC_22(
self, VAR_11: str, VAR_23: JsonDict
) -> Dict[str, Any]:
VAR_59 = []
for VAR_21, device_keys in VAR_23.get("one_time_keys", {}).items():
for VAR_77, algorithm in device_keys.items():
VAR_59.append((VAR_21, VAR_77, algorithm))
log_kv({"message": "Claiming one time VAR_58.", "user, device pairs": VAR_59})
VAR_60 = await self.store.claim_e2e_one_time_keys(VAR_59)
VAR_61 = {} # type: Dict[str, Dict[str, dict]]
for VAR_21, device_keys in VAR_60.items():
for VAR_77, VAR_58 in device_keys.items():
for key_id, json_str in VAR_58.items():
VAR_61.setdefault(VAR_21, {})[VAR_77] = {
key_id: json_decoder.decode(json_str)
}
VAR_1.info(
"Claimed one-time-VAR_58: %s",
",".join(
(
"%s for %s:%s" % (key_id, VAR_21, VAR_77)
for VAR_21, user_keys in VAR_61.items()
for VAR_77, device_keys in user_keys.items()
for key_id, VAR_44 in device_keys.items()
)
),
)
return {"one_time_keys": VAR_61}
async def FUNC_23(
self,
VAR_11: str,
VAR_12: str,
VAR_25: List[str],
VAR_26: List[str],
VAR_14: int,
) -> Dict[str, list]:
with (await self._server_linearizer.queue((VAR_11, VAR_12))):
VAR_45, VAR_44 = parse_server_name(VAR_11)
await self.check_server_matches_acl(VAR_45, VAR_12)
VAR_1.debug(
"on_get_missing_events: VAR_25: %r, VAR_26: %r,"
" VAR_14: %d",
VAR_25,
VAR_26,
VAR_14,
)
VAR_73 = await self.handler.on_get_missing_events(
VAR_11, VAR_12, VAR_25, VAR_26, VAR_14
)
if len(VAR_73) < 5:
VAR_1.debug(
"Returning %d events: %r", len(VAR_73), missing_events
)
else:
VAR_1.debug("Returning %d events", len(VAR_73))
VAR_55 = self._clock.time_msec()
return {"events": [ev.get_pdu_json(VAR_55) for ev in VAR_73]}
@log_function
async def FUNC_24(self, VAR_27: str) -> Optional[str]:
VAR_62 = self._clock.time_msec()
return await self.store.get_user_id_for_open_id_token(VAR_27, VAR_62)
def FUNC_25(self, VAR_28: List[EventBase]) -> Transaction:
VAR_55 = self._clock.time_msec()
VAR_63 = [VAR_76.get_pdu_json(VAR_55) for VAR_76 in VAR_28]
return Transaction(
VAR_11=self.server_name,
VAR_63=pdus,
origin_server_ts=int(VAR_55),
destination=None,
)
async def FUNC_26(self, VAR_11: str, VAR_29: EventBase) -> None:
if VAR_11 != get_domain_from_id(VAR_29.sender):
if not (
VAR_29.type == "m.room.member"
and VAR_29.content
and VAR_29.content.get("membership", None)
in (Membership.JOIN, Membership.INVITE)
):
VAR_1.info(
"Discarding PDU %s from invalid VAR_11 %s", VAR_29.event_id, VAR_11
)
return
else:
VAR_1.info("Accepting join PDU %s from %s", VAR_29.event_id, VAR_11)
VAR_50 = await self.store.get_room_version(VAR_29.room_id)
try:
VAR_29 = await self._check_sigs_and_hash(VAR_50, VAR_29)
except SynapseError as e:
raise FederationError("ERROR", e.code, e.msg, affected=VAR_29.event_id)
await self.handler.on_receive_pdu(VAR_11, VAR_29, sent_to_us_directly=True)
def __str__(self):
return "<ReplicationLayer(%s)>" % self.server_name
async def FUNC_27(
self, VAR_30: str, VAR_31: str, VAR_12: str, VAR_32: Dict
):
VAR_64 = await self.handler.exchange_third_party_invite(
VAR_30, VAR_31, VAR_12, VAR_32
)
return VAR_64
async def FUNC_28(self, VAR_33: Dict):
VAR_64 = await self.handler.on_exchange_third_party_invite_request(VAR_33)
return VAR_64
async def FUNC_29(self, VAR_7: str, VAR_12: str):
VAR_52 = await self.store.get_current_state_ids(VAR_12)
VAR_65 = VAR_52.get((EventTypes.ServerACL, ""))
if not VAR_65:
return
VAR_8 = await self.store.get_event(VAR_65)
if FUNC_0(VAR_7, VAR_8):
return
raise AuthError(code=403, msg="Server is banned from room")
def FUNC_0(VAR_7: str, VAR_8: EventBase) -> bool:
VAR_1.debug("Checking %s against acl %s", VAR_7, VAR_8.content)
VAR_34 = VAR_8.content.get("allow_ip_literals", True)
if not isinstance(VAR_34, bool):
VAR_1.warning("Ignoring non-bool VAR_34 flag")
VAR_34 = True
if not VAR_34:
if VAR_7[0] == "[":
return False
if isIPAddress(VAR_7):
return False
VAR_35 = VAR_8.content.get("deny", [])
if not isinstance(VAR_35, (list, tuple)):
VAR_1.warning("Ignoring non-list VAR_35 ACL %s", VAR_35)
VAR_35 = []
for e in VAR_35:
if FUNC_1(VAR_7, e):
return False
VAR_36 = VAR_8.content.get("allow", [])
if not isinstance(VAR_36, (list, tuple)):
VAR_1.warning("Ignoring non-list VAR_36 ACL %s", VAR_36)
VAR_36 = []
for e in VAR_36:
if FUNC_1(VAR_7, e):
return True
return False
def FUNC_1(VAR_7: str, VAR_9: Any) -> bool:
if not isinstance(VAR_9, str):
VAR_1.warning(
"Ignoring non-str ACL entry '%s' (is %s)", VAR_9, type(VAR_9)
)
return False
VAR_37 = glob_to_regex(VAR_9)
return bool(VAR_37.match(VAR_7))
class CLASS_1:
def __init__(self, VAR_10: "HomeServer"):
self.config = VAR_10.config
self.clock = VAR_10.get_clock()
self._instance_name = VAR_10.get_instance_name()
self._get_query_client = ReplicationGetQueryRestServlet.make_client(VAR_10)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(VAR_10)
self.edu_handlers = (
{}
) # type: Dict[str, Callable[[str, dict], Awaitable[None]]]
self.query_handlers = {} # type: Dict[str, Callable[[dict], Awaitable[None]]]
self._edu_type_to_instance = {} # type: Dict[str, str]
def FUNC_30(
self, VAR_38: str, VAR_39: Callable[[str, JsonDict], Awaitable[None]]
):
if VAR_38 in self.edu_handlers:
raise KeyError("Already have an EDU VAR_39 for %s" % (VAR_38,))
VAR_1.info("Registering federation EDU VAR_39 for %r", VAR_38)
self.edu_handlers[VAR_38] = VAR_39
def FUNC_31(
self, VAR_19: str, VAR_39: Callable[[dict], defer.Deferred]
):
if VAR_19 in self.query_handlers:
raise KeyError("Already have a Query VAR_39 for %s" % (VAR_19,))
VAR_1.info("Registering federation VAR_59 VAR_39 for %r", VAR_19)
self.query_handlers[VAR_19] = VAR_39
def FUNC_32(self, VAR_38: str, VAR_40: str):
self._edu_type_to_instance[VAR_38] = VAR_40
async def FUNC_33(self, VAR_38: str, VAR_11: str, VAR_23: dict):
if not self.config.use_presence and VAR_38 == "m.presence":
return
VAR_39 = self.edu_handlers.get(VAR_38)
if VAR_39:
with start_active_span_from_edu(VAR_23, "handle_edu"):
try:
await VAR_39(VAR_11, VAR_23)
except SynapseError as e:
VAR_1.info("Failed to handle VAR_71 %r: %r", VAR_38, e)
except Exception:
VAR_1.exception("Failed to handle VAR_71 %r", VAR_38)
return
VAR_66 = self._edu_type_to_instance.get(VAR_38, "master")
if VAR_66 != self._instance_name:
try:
await self._send_edu(
VAR_40=VAR_66,
VAR_38=edu_type,
VAR_11=origin,
VAR_23=content,
)
except SynapseError as e:
VAR_1.info("Failed to handle VAR_71 %r: %r", VAR_38, e)
except Exception:
VAR_1.exception("Failed to handle VAR_71 %r", VAR_38)
return
VAR_1.warning("No VAR_39 registered for EDU type %s", VAR_38)
async def FUNC_34(self, VAR_19: str, VAR_20: dict):
VAR_39 = self.query_handlers.get(VAR_19)
if VAR_39:
return await VAR_39(VAR_20)
if self._instance_name == "master":
return await self._get_query_client(VAR_19=query_type, VAR_20=args)
VAR_1.warning("No VAR_39 registered for VAR_59 type %s", VAR_19)
raise NotFoundError("No VAR_39 for Query type '%s'" % (VAR_19,))
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
29,
31,
35,
68,
71,
72,
73,
75,
77,
79,
81,
85,
89,
90,
96,
97,
101,
105,
107,
108,
109,
111,
113,
116,
117,
121,
123,
125,
126,
127,
134,
138,
145,
149,
151,
153,
157,
158,
160,
163,
166,
168,
169,
170,
178,
182,
183,
185,
186,
187,
188,
189,
190,
191,
192,
193,
196,
200,
202,
207,
212,
217,
224,
226,
227,
231,
233,
239,
240,
241,
242,
254,
256,
258,
261,
266,
271,
276,
278,
280,
282,
284,
286,
287,
295,
296,
297,
298,
300,
301,
302,
310,
317,
318,
321,
324,
327,
329,
330,
331,
332,
333,
344,
363,
367,
371,
373,
377,
380,
388,
394,
400,
404,
405,
406,
407,
408,
409,
419,
422,
424,
430,
433,
437,
441,
443,
448,
456,
458,
463,
468,
473,
480,
486,
493,
497,
508,
516,
521,
525,
528,
530,
532,
539,
546,
548,
551,
554,
558,
561,
563,
565,
568,
575,
580,
586,
592,
601,
604,
612,
624,
626,
638,
646,
650,
657,
659,
661,
666,
679,
682,
687,
692,
699,
703,
708,
709,
711,
712,
713,
714,
715,
716,
729,
730,
732,
733,
738,
740,
743,
751,
755,
758,
762,
768,
771,
775,
777,
778,
781,
785,
790,
791,
792,
798,
801,
802,
805,
806,
813,
815,
816,
823,
825,
826,
827,
829,
830,
839,
840,
845,
851,
852,
853,
854,
855,
858,
863,
864,
866,
872,
881,
883,
885,
891,
901,
903,
905,
910,
914,
915,
926,
927,
942,
943,
945,
950,
951,
954,
955,
956,
959,
780,
781,
782,
783,
784,
785,
786,
787,
788,
842,
843,
844,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
375,
376,
668,
669,
670,
681,
682,
683,
684,
685,
686,
687,
688,
689,
690,
691,
692,
693,
694,
695,
696,
697,
698,
699,
700,
701,
702,
703,
704,
705,
706,
707,
757,
758,
759,
760,
761,
762,
763,
764,
765,
870,
871,
872,
873,
874,
875,
876,
877,
878,
889,
890,
891,
892,
893,
894,
895,
896,
897,
898,
907,
908
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
29,
31,
35,
68,
71,
72,
73,
75,
77,
79,
81,
85,
89,
90,
96,
97,
101,
105,
107,
108,
109,
111,
113,
116,
117,
121,
123,
125,
126,
127,
134,
138,
145,
149,
151,
153,
157,
158,
160,
163,
166,
168,
169,
170,
178,
182,
183,
185,
186,
187,
188,
189,
190,
191,
192,
193,
196,
200,
202,
207,
212,
217,
224,
226,
227,
231,
233,
239,
240,
241,
242,
254,
256,
258,
261,
266,
271,
276,
278,
280,
282,
284,
286,
287,
295,
296,
297,
298,
300,
301,
302,
310,
317,
318,
321,
324,
327,
329,
330,
331,
332,
333,
344,
363,
367,
371,
373,
377,
380,
388,
394,
400,
404,
405,
406,
407,
408,
409,
419,
422,
424,
430,
433,
437,
441,
443,
448,
456,
458,
463,
468,
473,
480,
486,
493,
497,
508,
516,
521,
525,
528,
530,
532,
539,
546,
548,
551,
554,
558,
561,
563,
565,
568,
575,
580,
586,
592,
601,
604,
612,
624,
626,
638,
646,
650,
657,
659,
661,
666,
679,
682,
687,
692,
699,
703,
708,
709,
711,
712,
713,
714,
715,
716,
729,
730,
732,
733,
738,
740,
743,
751,
755,
758,
762,
768,
771,
775,
777,
778,
781,
785,
790,
791,
792,
798,
801,
802,
805,
806,
813,
815,
816,
823,
825,
826,
827,
829,
830,
839,
840,
845,
850,
851,
852,
853,
854,
857,
862,
863,
865,
871,
880,
882,
884,
890,
900,
902,
904,
909,
913,
914,
925,
926,
941,
942,
944,
949,
950,
953,
954,
955,
958,
780,
781,
782,
783,
784,
785,
786,
787,
788,
842,
843,
844,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
375,
376,
668,
669,
670,
681,
682,
683,
684,
685,
686,
687,
688,
689,
690,
691,
692,
693,
694,
695,
696,
697,
698,
699,
700,
701,
702,
703,
704,
705,
706,
707,
757,
758,
759,
760,
761,
762,
763,
764,
765,
869,
870,
871,
872,
873,
874,
875,
876,
877,
888,
889,
890,
891,
892,
893,
894,
895,
896,
897,
906,
907
] |
2CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from typing import Any, Dict, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.api.urls import (
FEDERATION_UNSTABLE_PREFIX,
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
logger = logging.getLogger(__name__)
class TransportLayerClient:
"""Sends federation HTTP requests to other servers"""
def __init__(self, hs):
self.server_name = hs.hostname
self.client = hs.get_http_client()
@log_function
def get_room_state_ids(self, destination, room_id, event_id):
""" Requests all state for a given room from the given server at the
given event. Returns the state's event_id's
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
context (str): The name of the context we want the state of
event_id (str): The event we want the context at.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_room_state_ids dest=%s, room=%s", destination, room_id)
path = _create_v1_path("/state_ids/%s", room_id)
return self.client.get_json(
destination,
path=path,
args={"event_id": event_id},
try_trailing_slash_on_400=True,
)
@log_function
def get_event(self, destination, event_id, timeout=None):
""" Requests the pdu with give id and origin from the given server.
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
event_id (str): The id of the event being requested.
timeout (int): How long to try (in ms) the destination for before
giving up. None indicates no timeout.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_pdu dest=%s, event_id=%s", destination, event_id)
path = _create_v1_path("/event/%s", event_id)
return self.client.get_json(
destination, path=path, timeout=timeout, try_trailing_slash_on_400=True
)
@log_function
def backfill(self, destination, room_id, event_tuples, limit):
""" Requests `limit` previous PDUs in a given context before list of
PDUs.
Args:
dest (str)
room_id (str)
event_tuples (list)
limit (int)
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug(
"backfill dest=%s, room_id=%s, event_tuples=%r, limit=%s",
destination,
room_id,
event_tuples,
str(limit),
)
if not event_tuples:
# TODO: raise?
return
path = _create_v1_path("/backfill/%s", room_id)
args = {"v": event_tuples, "limit": [str(limit)]}
return self.client.get_json(
destination, path=path, args=args, try_trailing_slash_on_400=True
)
@log_function
async def send_transaction(self, transaction, json_data_callback=None):
""" Sends the given Transaction to its destination
Args:
transaction (Transaction)
Returns:
Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server.
Fails with ``FederationDeniedError`` if this destination
is not on our federation whitelist
"""
logger.debug(
"send_data dest=%s, txid=%s",
transaction.destination,
transaction.transaction_id,
)
if transaction.destination == self.server_name:
raise RuntimeError("Transport layer cannot send to itself!")
# FIXME: This is only used by the tests. The actual json sent is
# generated by the json_data_callback.
json_data = transaction.get_dict()
path = _create_v1_path("/send/%s", transaction.transaction_id)
response = await self.client.put_json(
transaction.destination,
path=path,
data=json_data,
json_data_callback=json_data_callback,
long_retries=True,
backoff_on_404=True, # If we get a 404 the other side has gone
try_trailing_slash_on_400=True,
)
return response
@log_function
async def make_query(
self, destination, query_type, args, retry_on_dns_fail, ignore_backoff=False
):
path = _create_v1_path("/query/%s", query_type)
content = await self.client.get_json(
destination=destination,
path=path,
args=args,
retry_on_dns_fail=retry_on_dns_fail,
timeout=10000,
ignore_backoff=ignore_backoff,
)
return content
@log_function
async def make_membership_event(
self, destination, room_id, user_id, membership, params
):
"""Asks a remote server to build and sign us a membership event
Note that this does not append any events to any graphs.
Args:
destination (str): address of remote homeserver
room_id (str): room to join/leave
user_id (str): user to be joined/left
membership (str): one of join/leave
params (dict[str, str|Iterable[str]]): Query parameters to include in the
request.
Returns:
Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body (ie, the new event).
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server.
Fails with ``FederationDeniedError`` if the remote destination
is not in our federation whitelist
"""
valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships:
raise RuntimeError(
"make_membership_event called with membership='%s', must be one of %s"
% (membership, ",".join(valid_memberships))
)
path = _create_v1_path("/make_%s/%s/%s", membership, room_id, user_id)
ignore_backoff = False
retry_on_dns_fail = False
if membership == Membership.LEAVE:
# we particularly want to do our best to send leave events. The
# problem is that if it fails, we won't retry it later, so if the
# remote server was just having a momentary blip, the room will be
# out of sync.
ignore_backoff = True
retry_on_dns_fail = True
content = await self.client.get_json(
destination=destination,
path=path,
args=params,
retry_on_dns_fail=retry_on_dns_fail,
timeout=20000,
ignore_backoff=ignore_backoff,
)
return content
@log_function
async def send_join_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/send_join/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content
)
return response
@log_function
async def send_join_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/send_join/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content
)
return response
@log_function
async def send_leave_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/send_leave/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination,
path=path,
data=content,
# we want to do our best to send this through. The problem is
# that if it fails, we won't retry it later, so if the remote
# server was just having a momentary blip, the room will be out of
# sync.
ignore_backoff=True,
)
return response
@log_function
async def send_leave_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/send_leave/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination,
path=path,
data=content,
# we want to do our best to send this through. The problem is
# that if it fails, we won't retry it later, so if the remote
# server was just having a momentary blip, the room will be out of
# sync.
ignore_backoff=True,
)
return response
@log_function
async def send_invite_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/invite/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
return response
@log_function
async def send_invite_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/invite/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
return response
@log_function
async def get_public_rooms(
self,
remote_server: str,
limit: Optional[int] = None,
since_token: Optional[str] = None,
search_filter: Optional[Dict] = None,
include_all_networks: bool = False,
third_party_instance_id: Optional[str] = None,
):
"""Get the list of public rooms from a remote homeserver
See synapse.federation.federation_client.FederationClient.get_public_rooms for
more information.
"""
if search_filter:
# this uses MSC2197 (Search Filtering over Federation)
path = _create_v1_path("/publicRooms")
data = {
"include_all_networks": "true" if include_all_networks else "false"
} # type: Dict[str, Any]
if third_party_instance_id:
data["third_party_instance_id"] = third_party_instance_id
if limit:
data["limit"] = str(limit)
if since_token:
data["since"] = since_token
data["filter"] = search_filter
try:
response = await self.client.post_json(
destination=remote_server, path=path, data=data, ignore_backoff=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (remote_server,),
errcode=Codes.FORBIDDEN,
)
raise
else:
path = _create_v1_path("/publicRooms")
args = {
"include_all_networks": "true" if include_all_networks else "false"
} # type: Dict[str, Any]
if third_party_instance_id:
args["third_party_instance_id"] = (third_party_instance_id,)
if limit:
args["limit"] = [str(limit)]
if since_token:
args["since"] = [since_token]
try:
response = await self.client.get_json(
destination=remote_server, path=path, args=args, ignore_backoff=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (remote_server,),
errcode=Codes.FORBIDDEN,
)
raise
return response
@log_function
async def exchange_third_party_invite(self, destination, room_id, event_dict):
path = _create_v1_path("/exchange_third_party_invite/%s", room_id)
response = await self.client.put_json(
destination=destination, path=path, data=event_dict
)
return response
@log_function
async def get_event_auth(self, destination, room_id, event_id):
path = _create_v1_path("/event_auth/%s/%s", room_id, event_id)
content = await self.client.get_json(destination=destination, path=path)
return content
@log_function
async def query_client_keys(self, destination, query_content, timeout):
"""Query the device keys for a list of user ids hosted on a remote
server.
Request:
{
"device_keys": {
"<user_id>": ["<device_id>"]
}
}
Response:
{
"device_keys": {
"<user_id>": {
"<device_id>": {...}
}
},
"master_key": {
"<user_id>": {...}
}
},
"self_signing_key": {
"<user_id>": {...}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing device and cross-signing keys.
"""
path = _create_v1_path("/user/keys/query")
content = await self.client.post_json(
destination=destination, path=path, data=query_content, timeout=timeout
)
return content
@log_function
async def query_user_devices(self, destination, user_id, timeout):
"""Query the devices for a user id hosted on a remote server.
Response:
{
"stream_id": "...",
"devices": [ { ... } ],
"master_key": {
"user_id": "<user_id>",
"usage": [...],
"keys": {...},
"signatures": {
"<user_id>": {...}
}
},
"self_signing_key": {
"user_id": "<user_id>",
"usage": [...],
"keys": {...},
"signatures": {
"<user_id>": {...}
}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing device and cross-signing keys.
"""
path = _create_v1_path("/user/devices/%s", user_id)
content = await self.client.get_json(
destination=destination, path=path, timeout=timeout
)
return content
@log_function
async def claim_client_keys(self, destination, query_content, timeout):
"""Claim one-time keys for a list of devices hosted on a remote server.
Request:
{
"one_time_keys": {
"<user_id>": {
"<device_id>": "<algorithm>"
}
}
}
Response:
{
"device_keys": {
"<user_id>": {
"<device_id>": {
"<algorithm>:<key_id>": "<key_base64>"
}
}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing the one-time keys.
"""
path = _create_v1_path("/user/keys/claim")
content = await self.client.post_json(
destination=destination, path=path, data=query_content, timeout=timeout
)
return content
@log_function
async def get_missing_events(
self,
destination,
room_id,
earliest_events,
latest_events,
limit,
min_depth,
timeout,
):
path = _create_v1_path("/get_missing_events/%s", room_id)
content = await self.client.post_json(
destination=destination,
path=path,
data={
"limit": int(limit),
"min_depth": int(min_depth),
"earliest_events": earliest_events,
"latest_events": latest_events,
},
timeout=timeout,
)
return content
@log_function
def get_group_profile(self, destination, group_id, requester_user_id):
"""Get a group profile
"""
path = _create_v1_path("/groups/%s/profile", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_profile(self, destination, group_id, requester_user_id, content):
"""Update a remote group profile
Args:
destination (str)
group_id (str)
requester_user_id (str)
content (dict): The new profile of the group
"""
path = _create_v1_path("/groups/%s/profile", group_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def get_group_summary(self, destination, group_id, requester_user_id):
"""Get a group summary
"""
path = _create_v1_path("/groups/%s/summary", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_rooms_in_group(self, destination, group_id, requester_user_id):
"""Get all rooms in a group
"""
path = _create_v1_path("/groups/%s/rooms", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
def add_room_to_group(
self, destination, group_id, requester_user_id, room_id, content
):
"""Add a room to a group
"""
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
def update_room_in_group(
self, destination, group_id, requester_user_id, room_id, config_key, content
):
"""Update room in group
"""
path = _create_v1_path(
"/groups/%s/room/%s/config/%s", group_id, room_id, config_key
)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
def remove_room_from_group(self, destination, group_id, requester_user_id, room_id):
"""Remove a room from a group
"""
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_users_in_group(self, destination, group_id, requester_user_id):
"""Get users in a group
"""
path = _create_v1_path("/groups/%s/users", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_invited_users_in_group(self, destination, group_id, requester_user_id):
"""Get users that have been invited to a group
"""
path = _create_v1_path("/groups/%s/invited_users", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def accept_group_invite(self, destination, group_id, user_id, content):
"""Accept a group invite
"""
path = _create_v1_path("/groups/%s/users/%s/accept_invite", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def join_group(self, destination, group_id, user_id, content):
"""Attempts to join a group
"""
path = _create_v1_path("/groups/%s/users/%s/join", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def invite_to_group(
self, destination, group_id, user_id, requester_user_id, content
):
"""Invite a user to a group
"""
path = _create_v1_path("/groups/%s/users/%s/invite", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def invite_to_group_notification(self, destination, group_id, user_id, content):
"""Sent by group server to inform a user's server that they have been
invited.
"""
path = _create_v1_path("/groups/local/%s/users/%s/invite", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def remove_user_from_group(
self, destination, group_id, requester_user_id, user_id, content
):
"""Remove a user from a group
"""
path = _create_v1_path("/groups/%s/users/%s/remove", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def remove_user_from_group_notification(
self, destination, group_id, user_id, content
):
"""Sent by group server to inform a user's server that they have been
kicked from the group.
"""
path = _create_v1_path("/groups/local/%s/users/%s/remove", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def renew_group_attestation(self, destination, group_id, user_id, content):
"""Sent by either a group server or a user's server to periodically update
the attestations
"""
path = _create_v1_path("/groups/%s/renew_attestation/%s", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def update_group_summary_room(
self, destination, group_id, user_id, room_id, category_id, content
):
"""Update a room entry in a group summary
"""
if category_id:
path = _create_v1_path(
"/groups/%s/summary/categories/%s/rooms/%s",
group_id,
category_id,
room_id,
)
else:
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_summary_room(
self, destination, group_id, user_id, room_id, category_id
):
"""Delete a room entry in a group summary
"""
if category_id:
path = _create_v1_path(
"/groups/%s/summary/categories/%s/rooms/%s",
group_id,
category_id,
room_id,
)
else:
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": user_id},
ignore_backoff=True,
)
@log_function
def get_group_categories(self, destination, group_id, requester_user_id):
"""Get all categories in a group
"""
path = _create_v1_path("/groups/%s/categories", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_category(self, destination, group_id, requester_user_id, category_id):
"""Get category info in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_category(
self, destination, group_id, requester_user_id, category_id, content
):
"""Update a category in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_category(
self, destination, group_id, requester_user_id, category_id
):
"""Delete a category in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_roles(self, destination, group_id, requester_user_id):
"""Get all roles in a group
"""
path = _create_v1_path("/groups/%s/roles", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_role(self, destination, group_id, requester_user_id, role_id):
"""Get a roles info
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_role(
self, destination, group_id, requester_user_id, role_id, content
):
"""Update a role in a group
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_role(self, destination, group_id, requester_user_id, role_id):
"""Delete a role in a group
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_summary_user(
self, destination, group_id, requester_user_id, user_id, role_id, content
):
"""Update a users entry in a group
"""
if role_id:
path = _create_v1_path(
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
)
else:
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def set_group_join_policy(self, destination, group_id, requester_user_id, content):
"""Sets the join policy for a group
"""
path = _create_v1_path("/groups/%s/settings/m.join_policy", group_id)
return self.client.put_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_summary_user(
self, destination, group_id, requester_user_id, user_id, role_id
):
"""Delete a users entry in a group
"""
if role_id:
path = _create_v1_path(
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
)
else:
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
def bulk_get_publicised_groups(self, destination, user_ids):
"""Get the groups a list of users are publicising
"""
path = _create_v1_path("/get_groups_publicised")
content = {"user_ids": user_ids}
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
def get_room_complexity(self, destination, room_id):
"""
Args:
destination (str): The remote server
room_id (str): The room ID to ask about.
"""
path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", room_id)
return self.client.get_json(destination=destination, path=path)
def _create_path(federation_prefix, path, *args):
"""
Ensures that all args are url encoded.
"""
return federation_prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
def _create_v1_path(path, *args):
"""Creates a path against V1 federation API from the path template and
args. Ensures that all args are url encoded.
Example:
_create_v1_path("/event/%s", event_id)
Args:
path (str): String template for the path
args: ([str]): Args to insert into path. Each arg will be url encoded
Returns:
str
"""
return _create_path(FEDERATION_V1_PREFIX, path, *args)
def _create_v2_path(path, *args):
"""Creates a path against V2 federation API from the path template and
args. Ensures that all args are url encoded.
Example:
_create_v2_path("/event/%s", event_id)
Args:
path (str): String template for the path
args: ([str]): Args to insert into path. Each arg will be url encoded
Returns:
str
"""
return _create_path(FEDERATION_V2_PREFIX, path, *args)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from typing import Any, Dict, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.api.urls import (
FEDERATION_UNSTABLE_PREFIX,
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
logger = logging.getLogger(__name__)
class TransportLayerClient:
"""Sends federation HTTP requests to other servers"""
def __init__(self, hs):
self.server_name = hs.hostname
self.client = hs.get_federation_http_client()
@log_function
def get_room_state_ids(self, destination, room_id, event_id):
""" Requests all state for a given room from the given server at the
given event. Returns the state's event_id's
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
context (str): The name of the context we want the state of
event_id (str): The event we want the context at.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_room_state_ids dest=%s, room=%s", destination, room_id)
path = _create_v1_path("/state_ids/%s", room_id)
return self.client.get_json(
destination,
path=path,
args={"event_id": event_id},
try_trailing_slash_on_400=True,
)
@log_function
def get_event(self, destination, event_id, timeout=None):
""" Requests the pdu with give id and origin from the given server.
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
event_id (str): The id of the event being requested.
timeout (int): How long to try (in ms) the destination for before
giving up. None indicates no timeout.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_pdu dest=%s, event_id=%s", destination, event_id)
path = _create_v1_path("/event/%s", event_id)
return self.client.get_json(
destination, path=path, timeout=timeout, try_trailing_slash_on_400=True
)
@log_function
def backfill(self, destination, room_id, event_tuples, limit):
""" Requests `limit` previous PDUs in a given context before list of
PDUs.
Args:
dest (str)
room_id (str)
event_tuples (list)
limit (int)
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug(
"backfill dest=%s, room_id=%s, event_tuples=%r, limit=%s",
destination,
room_id,
event_tuples,
str(limit),
)
if not event_tuples:
# TODO: raise?
return
path = _create_v1_path("/backfill/%s", room_id)
args = {"v": event_tuples, "limit": [str(limit)]}
return self.client.get_json(
destination, path=path, args=args, try_trailing_slash_on_400=True
)
@log_function
async def send_transaction(self, transaction, json_data_callback=None):
""" Sends the given Transaction to its destination
Args:
transaction (Transaction)
Returns:
Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server.
Fails with ``FederationDeniedError`` if this destination
is not on our federation whitelist
"""
logger.debug(
"send_data dest=%s, txid=%s",
transaction.destination,
transaction.transaction_id,
)
if transaction.destination == self.server_name:
raise RuntimeError("Transport layer cannot send to itself!")
# FIXME: This is only used by the tests. The actual json sent is
# generated by the json_data_callback.
json_data = transaction.get_dict()
path = _create_v1_path("/send/%s", transaction.transaction_id)
response = await self.client.put_json(
transaction.destination,
path=path,
data=json_data,
json_data_callback=json_data_callback,
long_retries=True,
backoff_on_404=True, # If we get a 404 the other side has gone
try_trailing_slash_on_400=True,
)
return response
@log_function
async def make_query(
self, destination, query_type, args, retry_on_dns_fail, ignore_backoff=False
):
path = _create_v1_path("/query/%s", query_type)
content = await self.client.get_json(
destination=destination,
path=path,
args=args,
retry_on_dns_fail=retry_on_dns_fail,
timeout=10000,
ignore_backoff=ignore_backoff,
)
return content
@log_function
async def make_membership_event(
self, destination, room_id, user_id, membership, params
):
"""Asks a remote server to build and sign us a membership event
Note that this does not append any events to any graphs.
Args:
destination (str): address of remote homeserver
room_id (str): room to join/leave
user_id (str): user to be joined/left
membership (str): one of join/leave
params (dict[str, str|Iterable[str]]): Query parameters to include in the
request.
Returns:
Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body (ie, the new event).
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server.
Fails with ``FederationDeniedError`` if the remote destination
is not in our federation whitelist
"""
valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships:
raise RuntimeError(
"make_membership_event called with membership='%s', must be one of %s"
% (membership, ",".join(valid_memberships))
)
path = _create_v1_path("/make_%s/%s/%s", membership, room_id, user_id)
ignore_backoff = False
retry_on_dns_fail = False
if membership == Membership.LEAVE:
# we particularly want to do our best to send leave events. The
# problem is that if it fails, we won't retry it later, so if the
# remote server was just having a momentary blip, the room will be
# out of sync.
ignore_backoff = True
retry_on_dns_fail = True
content = await self.client.get_json(
destination=destination,
path=path,
args=params,
retry_on_dns_fail=retry_on_dns_fail,
timeout=20000,
ignore_backoff=ignore_backoff,
)
return content
@log_function
async def send_join_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/send_join/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content
)
return response
@log_function
async def send_join_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/send_join/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content
)
return response
@log_function
async def send_leave_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/send_leave/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination,
path=path,
data=content,
# we want to do our best to send this through. The problem is
# that if it fails, we won't retry it later, so if the remote
# server was just having a momentary blip, the room will be out of
# sync.
ignore_backoff=True,
)
return response
@log_function
async def send_leave_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/send_leave/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination,
path=path,
data=content,
# we want to do our best to send this through. The problem is
# that if it fails, we won't retry it later, so if the remote
# server was just having a momentary blip, the room will be out of
# sync.
ignore_backoff=True,
)
return response
@log_function
async def send_invite_v1(self, destination, room_id, event_id, content):
path = _create_v1_path("/invite/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
return response
@log_function
async def send_invite_v2(self, destination, room_id, event_id, content):
path = _create_v2_path("/invite/%s/%s", room_id, event_id)
response = await self.client.put_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
return response
@log_function
async def get_public_rooms(
self,
remote_server: str,
limit: Optional[int] = None,
since_token: Optional[str] = None,
search_filter: Optional[Dict] = None,
include_all_networks: bool = False,
third_party_instance_id: Optional[str] = None,
):
"""Get the list of public rooms from a remote homeserver
See synapse.federation.federation_client.FederationClient.get_public_rooms for
more information.
"""
if search_filter:
# this uses MSC2197 (Search Filtering over Federation)
path = _create_v1_path("/publicRooms")
data = {
"include_all_networks": "true" if include_all_networks else "false"
} # type: Dict[str, Any]
if third_party_instance_id:
data["third_party_instance_id"] = third_party_instance_id
if limit:
data["limit"] = str(limit)
if since_token:
data["since"] = since_token
data["filter"] = search_filter
try:
response = await self.client.post_json(
destination=remote_server, path=path, data=data, ignore_backoff=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (remote_server,),
errcode=Codes.FORBIDDEN,
)
raise
else:
path = _create_v1_path("/publicRooms")
args = {
"include_all_networks": "true" if include_all_networks else "false"
} # type: Dict[str, Any]
if third_party_instance_id:
args["third_party_instance_id"] = (third_party_instance_id,)
if limit:
args["limit"] = [str(limit)]
if since_token:
args["since"] = [since_token]
try:
response = await self.client.get_json(
destination=remote_server, path=path, args=args, ignore_backoff=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (remote_server,),
errcode=Codes.FORBIDDEN,
)
raise
return response
@log_function
async def exchange_third_party_invite(self, destination, room_id, event_dict):
path = _create_v1_path("/exchange_third_party_invite/%s", room_id)
response = await self.client.put_json(
destination=destination, path=path, data=event_dict
)
return response
@log_function
async def get_event_auth(self, destination, room_id, event_id):
path = _create_v1_path("/event_auth/%s/%s", room_id, event_id)
content = await self.client.get_json(destination=destination, path=path)
return content
@log_function
async def query_client_keys(self, destination, query_content, timeout):
"""Query the device keys for a list of user ids hosted on a remote
server.
Request:
{
"device_keys": {
"<user_id>": ["<device_id>"]
}
}
Response:
{
"device_keys": {
"<user_id>": {
"<device_id>": {...}
}
},
"master_key": {
"<user_id>": {...}
}
},
"self_signing_key": {
"<user_id>": {...}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing device and cross-signing keys.
"""
path = _create_v1_path("/user/keys/query")
content = await self.client.post_json(
destination=destination, path=path, data=query_content, timeout=timeout
)
return content
@log_function
async def query_user_devices(self, destination, user_id, timeout):
"""Query the devices for a user id hosted on a remote server.
Response:
{
"stream_id": "...",
"devices": [ { ... } ],
"master_key": {
"user_id": "<user_id>",
"usage": [...],
"keys": {...},
"signatures": {
"<user_id>": {...}
}
},
"self_signing_key": {
"user_id": "<user_id>",
"usage": [...],
"keys": {...},
"signatures": {
"<user_id>": {...}
}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing device and cross-signing keys.
"""
path = _create_v1_path("/user/devices/%s", user_id)
content = await self.client.get_json(
destination=destination, path=path, timeout=timeout
)
return content
@log_function
async def claim_client_keys(self, destination, query_content, timeout):
"""Claim one-time keys for a list of devices hosted on a remote server.
Request:
{
"one_time_keys": {
"<user_id>": {
"<device_id>": "<algorithm>"
}
}
}
Response:
{
"device_keys": {
"<user_id>": {
"<device_id>": {
"<algorithm>:<key_id>": "<key_base64>"
}
}
}
}
Args:
destination(str): The server to query.
query_content(dict): The user ids to query.
Returns:
A dict containing the one-time keys.
"""
path = _create_v1_path("/user/keys/claim")
content = await self.client.post_json(
destination=destination, path=path, data=query_content, timeout=timeout
)
return content
@log_function
async def get_missing_events(
self,
destination,
room_id,
earliest_events,
latest_events,
limit,
min_depth,
timeout,
):
path = _create_v1_path("/get_missing_events/%s", room_id)
content = await self.client.post_json(
destination=destination,
path=path,
data={
"limit": int(limit),
"min_depth": int(min_depth),
"earliest_events": earliest_events,
"latest_events": latest_events,
},
timeout=timeout,
)
return content
@log_function
def get_group_profile(self, destination, group_id, requester_user_id):
"""Get a group profile
"""
path = _create_v1_path("/groups/%s/profile", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_profile(self, destination, group_id, requester_user_id, content):
"""Update a remote group profile
Args:
destination (str)
group_id (str)
requester_user_id (str)
content (dict): The new profile of the group
"""
path = _create_v1_path("/groups/%s/profile", group_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def get_group_summary(self, destination, group_id, requester_user_id):
"""Get a group summary
"""
path = _create_v1_path("/groups/%s/summary", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_rooms_in_group(self, destination, group_id, requester_user_id):
"""Get all rooms in a group
"""
path = _create_v1_path("/groups/%s/rooms", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
def add_room_to_group(
self, destination, group_id, requester_user_id, room_id, content
):
"""Add a room to a group
"""
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
def update_room_in_group(
self, destination, group_id, requester_user_id, room_id, config_key, content
):
"""Update room in group
"""
path = _create_v1_path(
"/groups/%s/room/%s/config/%s", group_id, room_id, config_key
)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
def remove_room_from_group(self, destination, group_id, requester_user_id, room_id):
"""Remove a room from a group
"""
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_users_in_group(self, destination, group_id, requester_user_id):
"""Get users in a group
"""
path = _create_v1_path("/groups/%s/users", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_invited_users_in_group(self, destination, group_id, requester_user_id):
"""Get users that have been invited to a group
"""
path = _create_v1_path("/groups/%s/invited_users", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def accept_group_invite(self, destination, group_id, user_id, content):
"""Accept a group invite
"""
path = _create_v1_path("/groups/%s/users/%s/accept_invite", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def join_group(self, destination, group_id, user_id, content):
"""Attempts to join a group
"""
path = _create_v1_path("/groups/%s/users/%s/join", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def invite_to_group(
self, destination, group_id, user_id, requester_user_id, content
):
"""Invite a user to a group
"""
path = _create_v1_path("/groups/%s/users/%s/invite", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def invite_to_group_notification(self, destination, group_id, user_id, content):
"""Sent by group server to inform a user's server that they have been
invited.
"""
path = _create_v1_path("/groups/local/%s/users/%s/invite", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def remove_user_from_group(
self, destination, group_id, requester_user_id, user_id, content
):
"""Remove a user from a group
"""
path = _create_v1_path("/groups/%s/users/%s/remove", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def remove_user_from_group_notification(
self, destination, group_id, user_id, content
):
"""Sent by group server to inform a user's server that they have been
kicked from the group.
"""
path = _create_v1_path("/groups/local/%s/users/%s/remove", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def renew_group_attestation(self, destination, group_id, user_id, content):
"""Sent by either a group server or a user's server to periodically update
the attestations
"""
path = _create_v1_path("/groups/%s/renew_attestation/%s", group_id, user_id)
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
@log_function
def update_group_summary_room(
self, destination, group_id, user_id, room_id, category_id, content
):
"""Update a room entry in a group summary
"""
if category_id:
path = _create_v1_path(
"/groups/%s/summary/categories/%s/rooms/%s",
group_id,
category_id,
room_id,
)
else:
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_summary_room(
self, destination, group_id, user_id, room_id, category_id
):
"""Delete a room entry in a group summary
"""
if category_id:
path = _create_v1_path(
"/groups/%s/summary/categories/%s/rooms/%s",
group_id,
category_id,
room_id,
)
else:
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": user_id},
ignore_backoff=True,
)
@log_function
def get_group_categories(self, destination, group_id, requester_user_id):
"""Get all categories in a group
"""
path = _create_v1_path("/groups/%s/categories", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_category(self, destination, group_id, requester_user_id, category_id):
"""Get category info in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_category(
self, destination, group_id, requester_user_id, category_id, content
):
"""Update a category in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_category(
self, destination, group_id, requester_user_id, category_id
):
"""Delete a category in a group
"""
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_roles(self, destination, group_id, requester_user_id):
"""Get all roles in a group
"""
path = _create_v1_path("/groups/%s/roles", group_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def get_group_role(self, destination, group_id, requester_user_id, role_id):
"""Get a roles info
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.get_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_role(
self, destination, group_id, requester_user_id, role_id, content
):
"""Update a role in a group
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_role(self, destination, group_id, requester_user_id, role_id):
"""Delete a role in a group
"""
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
@log_function
def update_group_summary_user(
self, destination, group_id, requester_user_id, user_id, role_id, content
):
"""Update a users entry in a group
"""
if role_id:
path = _create_v1_path(
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
)
else:
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def set_group_join_policy(self, destination, group_id, requester_user_id, content):
"""Sets the join policy for a group
"""
path = _create_v1_path("/groups/%s/settings/m.join_policy", group_id)
return self.client.put_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function
def delete_group_summary_user(
self, destination, group_id, requester_user_id, user_id, role_id
):
"""Delete a users entry in a group
"""
if role_id:
path = _create_v1_path(
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
)
else:
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
return self.client.delete_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
ignore_backoff=True,
)
def bulk_get_publicised_groups(self, destination, user_ids):
"""Get the groups a list of users are publicising
"""
path = _create_v1_path("/get_groups_publicised")
content = {"user_ids": user_ids}
return self.client.post_json(
destination=destination, path=path, data=content, ignore_backoff=True
)
def get_room_complexity(self, destination, room_id):
"""
Args:
destination (str): The remote server
room_id (str): The room ID to ask about.
"""
path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", room_id)
return self.client.get_json(destination=destination, path=path)
def _create_path(federation_prefix, path, *args):
"""
Ensures that all args are url encoded.
"""
return federation_prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
def _create_v1_path(path, *args):
"""Creates a path against V1 federation API from the path template and
args. Ensures that all args are url encoded.
Example:
_create_v1_path("/event/%s", event_id)
Args:
path (str): String template for the path
args: ([str]): Args to insert into path. Each arg will be url encoded
Returns:
str
"""
return _create_path(FEDERATION_V1_PREFIX, path, *args)
def _create_v2_path(path, *args):
"""Creates a path against V2 federation API from the path template and
args. Ensures that all args are url encoded.
Example:
_create_v2_path("/event/%s", event_id)
Args:
path (str): String template for the path
args: ([str]): Args to insert into path. Each arg will be url encoded
Returns:
str
"""
return _create_path(FEDERATION_V2_PREFIX, path, *args)
| open_redirect | {
"code": [
" self.client = hs.get_http_client()"
],
"line_no": [
38
]
} | {
"code": [
" self.client = hs.get_federation_http_client()"
],
"line_no": [
38
]
} |
import logging
import urllib
from typing import Any, Dict, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.api.urls import (
FEDERATION_UNSTABLE_PREFIX,
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
VAR_0 = logging.getLogger(__name__)
class CLASS_0:
def __init__(self, VAR_4):
self.server_name = VAR_4.hostname
self.client = VAR_4.get_http_client()
@log_function
def FUNC_3(self, VAR_5, VAR_6, VAR_7):
VAR_0.debug("get_room_state_ids dest=%s, room=%s", VAR_5, VAR_6)
VAR_2 = FUNC_1("/state_ids/%s", VAR_6)
return self.client.get_json(
VAR_5,
VAR_2=path,
VAR_3={"event_id": VAR_7},
try_trailing_slash_on_400=True,
)
@log_function
def FUNC_4(self, VAR_5, VAR_7, VAR_8=None):
VAR_0.debug("get_pdu dest=%s, VAR_7=%s", VAR_5, VAR_7)
VAR_2 = FUNC_1("/event/%s", VAR_7)
return self.client.get_json(
VAR_5, VAR_2=path, VAR_8=timeout, try_trailing_slash_on_400=True
)
@log_function
def FUNC_5(self, VAR_5, VAR_6, VAR_9, VAR_10):
VAR_0.debug(
"backfill dest=%s, VAR_6=%s, VAR_9=%r, VAR_10=%s",
VAR_5,
VAR_6,
VAR_9,
str(VAR_10),
)
if not VAR_9:
return
VAR_2 = FUNC_1("/FUNC_5/%s", VAR_6)
VAR_3 = {"v": VAR_9, "limit": [str(VAR_10)]}
return self.client.get_json(
VAR_5, VAR_2=path, VAR_3=args, try_trailing_slash_on_400=True
)
@log_function
async def FUNC_6(self, VAR_11, VAR_12=None):
VAR_0.debug(
"send_data dest=%s, txid=%s",
VAR_11.destination,
VAR_11.transaction_id,
)
if VAR_11.destination == self.server_name:
raise RuntimeError("Transport layer cannot send to itself!")
VAR_36 = VAR_11.get_dict()
VAR_2 = FUNC_1("/send/%s", VAR_11.transaction_id)
VAR_37 = await self.client.put_json(
VAR_11.destination,
VAR_2=path,
VAR_39=VAR_36,
VAR_12=json_data_callback,
long_retries=True,
backoff_on_404=True, # If we get a 404 the other side has gone
try_trailing_slash_on_400=True,
)
return VAR_37
@log_function
async def FUNC_7(
self, VAR_5, VAR_13, VAR_3, VAR_14, VAR_15=False
):
VAR_2 = FUNC_1("/query/%s", VAR_13)
VAR_19 = await self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3=args,
VAR_14=retry_on_dns_fail,
VAR_8=10000,
VAR_15=ignore_backoff,
)
return VAR_19
@log_function
async def FUNC_8(
self, VAR_5, VAR_6, VAR_16, VAR_17, VAR_18
):
VAR_38 = {Membership.JOIN, Membership.LEAVE}
if VAR_17 not in VAR_38:
raise RuntimeError(
"make_membership_event called with VAR_17='%s', must be one of %s"
% (VAR_17, ",".join(VAR_38))
)
VAR_2 = FUNC_1("/make_%s/%s/%s", VAR_17, VAR_6, VAR_16)
VAR_15 = False
VAR_14 = False
if VAR_17 == Membership.LEAVE:
VAR_15 = True
VAR_14 = True
VAR_19 = await self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3=VAR_18,
VAR_14=retry_on_dns_fail,
VAR_8=20000,
VAR_15=ignore_backoff,
)
return VAR_19
@log_function
async def FUNC_9(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/send_join/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19
)
return VAR_37
@log_function
async def FUNC_10(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/send_join/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19
)
return VAR_37
@log_function
async def FUNC_11(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/send_leave/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_39=VAR_19,
VAR_15=True,
)
return VAR_37
@log_function
async def FUNC_12(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/send_leave/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_39=VAR_19,
VAR_15=True,
)
return VAR_37
@log_function
async def FUNC_13(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/invite/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
return VAR_37
@log_function
async def FUNC_14(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/invite/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
return VAR_37
@log_function
async def FUNC_15(
self,
VAR_20: str,
VAR_10: Optional[int] = None,
VAR_21: Optional[str] = None,
VAR_22: Optional[Dict] = None,
VAR_23: bool = False,
VAR_24: Optional[str] = None,
):
if VAR_22:
VAR_2 = FUNC_1("/publicRooms")
VAR_39 = {
"include_all_networks": "true" if VAR_23 else "false"
} # type: Dict[str, Any]
if VAR_24:
VAR_39["third_party_instance_id"] = VAR_24
if VAR_10:
VAR_39["limit"] = str(VAR_10)
if VAR_21:
VAR_39["since"] = VAR_21
VAR_39["filter"] = VAR_22
try:
VAR_37 = await self.client.post_json(
VAR_5=VAR_20, VAR_2=path, VAR_39=data, VAR_15=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (VAR_20,),
errcode=Codes.FORBIDDEN,
)
raise
else:
VAR_2 = FUNC_1("/publicRooms")
VAR_3 = {
"include_all_networks": "true" if VAR_23 else "false"
} # type: Dict[str, Any]
if VAR_24:
VAR_3["third_party_instance_id"] = (VAR_24,)
if VAR_10:
VAR_3["limit"] = [str(VAR_10)]
if VAR_21:
VAR_3["since"] = [VAR_21]
try:
VAR_37 = await self.client.get_json(
VAR_5=VAR_20, VAR_2=path, VAR_3=args, VAR_15=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (VAR_20,),
errcode=Codes.FORBIDDEN,
)
raise
return VAR_37
@log_function
async def FUNC_16(self, VAR_5, VAR_6, VAR_25):
VAR_2 = FUNC_1("/FUNC_16/%s", VAR_6)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_25
)
return VAR_37
@log_function
async def FUNC_17(self, VAR_5, VAR_6, VAR_7):
VAR_2 = FUNC_1("/event_auth/%s/%s", VAR_6, VAR_7)
VAR_19 = await self.client.get_json(VAR_5=destination, VAR_2=path)
return VAR_19
@log_function
async def FUNC_18(self, VAR_5, VAR_26, VAR_8):
VAR_2 = FUNC_1("/user/keys/query")
VAR_19 = await self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_26, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_19(self, VAR_5, VAR_16, VAR_8):
VAR_2 = FUNC_1("/user/devices/%s", VAR_16)
VAR_19 = await self.client.get_json(
VAR_5=destination, VAR_2=path, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_20(self, VAR_5, VAR_26, VAR_8):
VAR_2 = FUNC_1("/user/keys/claim")
VAR_19 = await self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_26, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_21(
self,
VAR_5,
VAR_6,
VAR_27,
VAR_28,
VAR_10,
VAR_29,
VAR_8,
):
VAR_2 = FUNC_1("/FUNC_21/%s", VAR_6)
VAR_19 = await self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_39={
"limit": int(VAR_10),
"min_depth": int(VAR_29),
"earliest_events": VAR_27,
"latest_events": VAR_28,
},
VAR_8=timeout,
)
return VAR_19
@log_function
def FUNC_22(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/profile", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_23(self, VAR_5, VAR_30, VAR_31, VAR_19):
VAR_2 = FUNC_1("/groups/%s/profile", VAR_30)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_24(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/summary", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_25(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/rooms", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
def FUNC_26(
self, VAR_5, VAR_30, VAR_31, VAR_6, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/room/%s", VAR_30, VAR_6)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
def FUNC_27(
self, VAR_5, VAR_30, VAR_31, VAR_6, VAR_32, VAR_19
):
VAR_2 = FUNC_1(
"/groups/%s/room/%s/config/%s", VAR_30, VAR_6, VAR_32
)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
def FUNC_28(self, VAR_5, VAR_30, VAR_31, VAR_6):
VAR_2 = FUNC_1("/groups/%s/room/%s", VAR_30, VAR_6)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_29(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/users", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_30(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/invited_users", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_31(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/users/%s/accept_invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_32(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/users/%s/join", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_33(
self, VAR_5, VAR_30, VAR_16, VAR_31, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/users/%s/invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_34(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/local/%s/users/%s/invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_35(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/users/%s/remove", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_36(
self, VAR_5, VAR_30, VAR_16, VAR_19
):
VAR_2 = FUNC_1("/groups/local/%s/users/%s/remove", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_37(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/renew_attestation/%s", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_38(
self, VAR_5, VAR_30, VAR_16, VAR_6, VAR_33, VAR_19
):
if VAR_33:
VAR_2 = FUNC_1(
"/groups/%s/summary/categories/%s/rooms/%s",
VAR_30,
VAR_33,
VAR_6,
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/rooms/%s", VAR_30, VAR_6)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_16},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_39(
self, VAR_5, VAR_30, VAR_16, VAR_6, VAR_33
):
if VAR_33:
VAR_2 = FUNC_1(
"/groups/%s/summary/categories/%s/rooms/%s",
VAR_30,
VAR_33,
VAR_6,
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/rooms/%s", VAR_30, VAR_6)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_16},
VAR_15=True,
)
@log_function
def FUNC_40(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/categories", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_41(self, VAR_5, VAR_30, VAR_31, VAR_33):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_42(
self, VAR_5, VAR_30, VAR_31, VAR_33, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_43(
self, VAR_5, VAR_30, VAR_31, VAR_33
):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_44(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/roles", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_45(self, VAR_5, VAR_30, VAR_31, VAR_34):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_46(
self, VAR_5, VAR_30, VAR_31, VAR_34, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_47(self, VAR_5, VAR_30, VAR_31, VAR_34):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_48(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_34, VAR_19
):
if VAR_34:
VAR_2 = FUNC_1(
"/groups/%s/summary/roles/%s/users/%s", VAR_30, VAR_34, VAR_16
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/users/%s", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_49(self, VAR_5, VAR_30, VAR_31, VAR_19):
VAR_2 = FUNC_1("/groups/%s/settings/m.join_policy", VAR_30)
return self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_50(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_34
):
if VAR_34:
VAR_2 = FUNC_1(
"/groups/%s/summary/roles/%s/users/%s", VAR_30, VAR_34, VAR_16
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/users/%s", VAR_30, VAR_16)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
def FUNC_51(self, VAR_5, VAR_35):
VAR_2 = FUNC_1("/get_groups_publicised")
VAR_19 = {"user_ids": VAR_35}
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
def FUNC_52(self, VAR_5, VAR_6):
VAR_2 = FUNC_0(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", VAR_6)
return self.client.get_json(VAR_5=destination, VAR_2=path)
def FUNC_0(VAR_1, VAR_2, *VAR_3):
return VAR_1 + VAR_2 % tuple(urllib.parse.quote(arg, "") for arg in VAR_3)
def FUNC_1(VAR_2, *VAR_3):
return FUNC_0(FEDERATION_V1_PREFIX, VAR_2, *VAR_3)
def FUNC_2(VAR_2, *VAR_3):
return FUNC_0(FEDERATION_V2_PREFIX, VAR_2, *VAR_3)
|
import logging
import urllib
from typing import Any, Dict, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.api.urls import (
FEDERATION_UNSTABLE_PREFIX,
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
VAR_0 = logging.getLogger(__name__)
class CLASS_0:
def __init__(self, VAR_4):
self.server_name = VAR_4.hostname
self.client = VAR_4.get_federation_http_client()
@log_function
def FUNC_3(self, VAR_5, VAR_6, VAR_7):
VAR_0.debug("get_room_state_ids dest=%s, room=%s", VAR_5, VAR_6)
VAR_2 = FUNC_1("/state_ids/%s", VAR_6)
return self.client.get_json(
VAR_5,
VAR_2=path,
VAR_3={"event_id": VAR_7},
try_trailing_slash_on_400=True,
)
@log_function
def FUNC_4(self, VAR_5, VAR_7, VAR_8=None):
VAR_0.debug("get_pdu dest=%s, VAR_7=%s", VAR_5, VAR_7)
VAR_2 = FUNC_1("/event/%s", VAR_7)
return self.client.get_json(
VAR_5, VAR_2=path, VAR_8=timeout, try_trailing_slash_on_400=True
)
@log_function
def FUNC_5(self, VAR_5, VAR_6, VAR_9, VAR_10):
VAR_0.debug(
"backfill dest=%s, VAR_6=%s, VAR_9=%r, VAR_10=%s",
VAR_5,
VAR_6,
VAR_9,
str(VAR_10),
)
if not VAR_9:
return
VAR_2 = FUNC_1("/FUNC_5/%s", VAR_6)
VAR_3 = {"v": VAR_9, "limit": [str(VAR_10)]}
return self.client.get_json(
VAR_5, VAR_2=path, VAR_3=args, try_trailing_slash_on_400=True
)
@log_function
async def FUNC_6(self, VAR_11, VAR_12=None):
VAR_0.debug(
"send_data dest=%s, txid=%s",
VAR_11.destination,
VAR_11.transaction_id,
)
if VAR_11.destination == self.server_name:
raise RuntimeError("Transport layer cannot send to itself!")
VAR_36 = VAR_11.get_dict()
VAR_2 = FUNC_1("/send/%s", VAR_11.transaction_id)
VAR_37 = await self.client.put_json(
VAR_11.destination,
VAR_2=path,
VAR_39=VAR_36,
VAR_12=json_data_callback,
long_retries=True,
backoff_on_404=True, # If we get a 404 the other side has gone
try_trailing_slash_on_400=True,
)
return VAR_37
@log_function
async def FUNC_7(
self, VAR_5, VAR_13, VAR_3, VAR_14, VAR_15=False
):
VAR_2 = FUNC_1("/query/%s", VAR_13)
VAR_19 = await self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3=args,
VAR_14=retry_on_dns_fail,
VAR_8=10000,
VAR_15=ignore_backoff,
)
return VAR_19
@log_function
async def FUNC_8(
self, VAR_5, VAR_6, VAR_16, VAR_17, VAR_18
):
VAR_38 = {Membership.JOIN, Membership.LEAVE}
if VAR_17 not in VAR_38:
raise RuntimeError(
"make_membership_event called with VAR_17='%s', must be one of %s"
% (VAR_17, ",".join(VAR_38))
)
VAR_2 = FUNC_1("/make_%s/%s/%s", VAR_17, VAR_6, VAR_16)
VAR_15 = False
VAR_14 = False
if VAR_17 == Membership.LEAVE:
VAR_15 = True
VAR_14 = True
VAR_19 = await self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3=VAR_18,
VAR_14=retry_on_dns_fail,
VAR_8=20000,
VAR_15=ignore_backoff,
)
return VAR_19
@log_function
async def FUNC_9(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/send_join/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19
)
return VAR_37
@log_function
async def FUNC_10(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/send_join/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19
)
return VAR_37
@log_function
async def FUNC_11(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/send_leave/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_39=VAR_19,
VAR_15=True,
)
return VAR_37
@log_function
async def FUNC_12(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/send_leave/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_39=VAR_19,
VAR_15=True,
)
return VAR_37
@log_function
async def FUNC_13(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_1("/invite/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
return VAR_37
@log_function
async def FUNC_14(self, VAR_5, VAR_6, VAR_7, VAR_19):
VAR_2 = FUNC_2("/invite/%s/%s", VAR_6, VAR_7)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
return VAR_37
@log_function
async def FUNC_15(
self,
VAR_20: str,
VAR_10: Optional[int] = None,
VAR_21: Optional[str] = None,
VAR_22: Optional[Dict] = None,
VAR_23: bool = False,
VAR_24: Optional[str] = None,
):
if VAR_22:
VAR_2 = FUNC_1("/publicRooms")
VAR_39 = {
"include_all_networks": "true" if VAR_23 else "false"
} # type: Dict[str, Any]
if VAR_24:
VAR_39["third_party_instance_id"] = VAR_24
if VAR_10:
VAR_39["limit"] = str(VAR_10)
if VAR_21:
VAR_39["since"] = VAR_21
VAR_39["filter"] = VAR_22
try:
VAR_37 = await self.client.post_json(
VAR_5=VAR_20, VAR_2=path, VAR_39=data, VAR_15=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (VAR_20,),
errcode=Codes.FORBIDDEN,
)
raise
else:
VAR_2 = FUNC_1("/publicRooms")
VAR_3 = {
"include_all_networks": "true" if VAR_23 else "false"
} # type: Dict[str, Any]
if VAR_24:
VAR_3["third_party_instance_id"] = (VAR_24,)
if VAR_10:
VAR_3["limit"] = [str(VAR_10)]
if VAR_21:
VAR_3["since"] = [VAR_21]
try:
VAR_37 = await self.client.get_json(
VAR_5=VAR_20, VAR_2=path, VAR_3=args, VAR_15=True
)
except HttpResponseException as e:
if e.code == 403:
raise SynapseError(
403,
"You are not allowed to view the public rooms list of %s"
% (VAR_20,),
errcode=Codes.FORBIDDEN,
)
raise
return VAR_37
@log_function
async def FUNC_16(self, VAR_5, VAR_6, VAR_25):
VAR_2 = FUNC_1("/FUNC_16/%s", VAR_6)
VAR_37 = await self.client.put_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_25
)
return VAR_37
@log_function
async def FUNC_17(self, VAR_5, VAR_6, VAR_7):
VAR_2 = FUNC_1("/event_auth/%s/%s", VAR_6, VAR_7)
VAR_19 = await self.client.get_json(VAR_5=destination, VAR_2=path)
return VAR_19
@log_function
async def FUNC_18(self, VAR_5, VAR_26, VAR_8):
VAR_2 = FUNC_1("/user/keys/query")
VAR_19 = await self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_26, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_19(self, VAR_5, VAR_16, VAR_8):
VAR_2 = FUNC_1("/user/devices/%s", VAR_16)
VAR_19 = await self.client.get_json(
VAR_5=destination, VAR_2=path, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_20(self, VAR_5, VAR_26, VAR_8):
VAR_2 = FUNC_1("/user/keys/claim")
VAR_19 = await self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_26, VAR_8=timeout
)
return VAR_19
@log_function
async def FUNC_21(
self,
VAR_5,
VAR_6,
VAR_27,
VAR_28,
VAR_10,
VAR_29,
VAR_8,
):
VAR_2 = FUNC_1("/FUNC_21/%s", VAR_6)
VAR_19 = await self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_39={
"limit": int(VAR_10),
"min_depth": int(VAR_29),
"earliest_events": VAR_27,
"latest_events": VAR_28,
},
VAR_8=timeout,
)
return VAR_19
@log_function
def FUNC_22(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/profile", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_23(self, VAR_5, VAR_30, VAR_31, VAR_19):
VAR_2 = FUNC_1("/groups/%s/profile", VAR_30)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_24(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/summary", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_25(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/rooms", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
def FUNC_26(
self, VAR_5, VAR_30, VAR_31, VAR_6, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/room/%s", VAR_30, VAR_6)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
def FUNC_27(
self, VAR_5, VAR_30, VAR_31, VAR_6, VAR_32, VAR_19
):
VAR_2 = FUNC_1(
"/groups/%s/room/%s/config/%s", VAR_30, VAR_6, VAR_32
)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
def FUNC_28(self, VAR_5, VAR_30, VAR_31, VAR_6):
VAR_2 = FUNC_1("/groups/%s/room/%s", VAR_30, VAR_6)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_29(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/users", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_30(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/invited_users", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_31(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/users/%s/accept_invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_32(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/users/%s/join", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_33(
self, VAR_5, VAR_30, VAR_16, VAR_31, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/users/%s/invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_34(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/local/%s/users/%s/invite", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_35(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/users/%s/remove", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_36(
self, VAR_5, VAR_30, VAR_16, VAR_19
):
VAR_2 = FUNC_1("/groups/local/%s/users/%s/remove", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_37(self, VAR_5, VAR_30, VAR_16, VAR_19):
VAR_2 = FUNC_1("/groups/%s/renew_attestation/%s", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
@log_function
def FUNC_38(
self, VAR_5, VAR_30, VAR_16, VAR_6, VAR_33, VAR_19
):
if VAR_33:
VAR_2 = FUNC_1(
"/groups/%s/summary/categories/%s/rooms/%s",
VAR_30,
VAR_33,
VAR_6,
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/rooms/%s", VAR_30, VAR_6)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_16},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_39(
self, VAR_5, VAR_30, VAR_16, VAR_6, VAR_33
):
if VAR_33:
VAR_2 = FUNC_1(
"/groups/%s/summary/categories/%s/rooms/%s",
VAR_30,
VAR_33,
VAR_6,
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/rooms/%s", VAR_30, VAR_6)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_16},
VAR_15=True,
)
@log_function
def FUNC_40(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/categories", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_41(self, VAR_5, VAR_30, VAR_31, VAR_33):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_42(
self, VAR_5, VAR_30, VAR_31, VAR_33, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_43(
self, VAR_5, VAR_30, VAR_31, VAR_33
):
VAR_2 = FUNC_1("/groups/%s/categories/%s", VAR_30, VAR_33)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_44(self, VAR_5, VAR_30, VAR_31):
VAR_2 = FUNC_1("/groups/%s/roles", VAR_30)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_45(self, VAR_5, VAR_30, VAR_31, VAR_34):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.get_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_46(
self, VAR_5, VAR_30, VAR_31, VAR_34, VAR_19
):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_47(self, VAR_5, VAR_30, VAR_31, VAR_34):
VAR_2 = FUNC_1("/groups/%s/roles/%s", VAR_30, VAR_34)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
@log_function
def FUNC_48(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_34, VAR_19
):
if VAR_34:
VAR_2 = FUNC_1(
"/groups/%s/summary/roles/%s/users/%s", VAR_30, VAR_34, VAR_16
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/users/%s", VAR_30, VAR_16)
return self.client.post_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_49(self, VAR_5, VAR_30, VAR_31, VAR_19):
VAR_2 = FUNC_1("/groups/%s/settings/m.join_policy", VAR_30)
return self.client.put_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_39=VAR_19,
VAR_15=True,
)
@log_function
def FUNC_50(
self, VAR_5, VAR_30, VAR_31, VAR_16, VAR_34
):
if VAR_34:
VAR_2 = FUNC_1(
"/groups/%s/summary/roles/%s/users/%s", VAR_30, VAR_34, VAR_16
)
else:
VAR_2 = FUNC_1("/groups/%s/summary/users/%s", VAR_30, VAR_16)
return self.client.delete_json(
VAR_5=destination,
VAR_2=path,
VAR_3={"requester_user_id": VAR_31},
VAR_15=True,
)
def FUNC_51(self, VAR_5, VAR_35):
VAR_2 = FUNC_1("/get_groups_publicised")
VAR_19 = {"user_ids": VAR_35}
return self.client.post_json(
VAR_5=destination, VAR_2=path, VAR_39=VAR_19, VAR_15=True
)
def FUNC_52(self, VAR_5, VAR_6):
VAR_2 = FUNC_0(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", VAR_6)
return self.client.get_json(VAR_5=destination, VAR_2=path)
def FUNC_0(VAR_1, VAR_2, *VAR_3):
return VAR_1 + VAR_2 % tuple(urllib.parse.quote(arg, "") for arg in VAR_3)
def FUNC_1(VAR_2, *VAR_3):
return FUNC_0(FEDERATION_V1_PREFIX, VAR_2, *VAR_3)
def FUNC_2(VAR_2, *VAR_3):
return FUNC_0(FEDERATION_V2_PREFIX, VAR_2, *VAR_3)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
20,
29,
31,
32,
35,
39,
44,
50,
55,
63,
67,
74,
79,
84,
89,
95,
106,
108,
110,
112,
114,
118,
122,
125,
129,
132,
135,
144,
147,
148,
149,
151,
153,
163,
165,
171,
180,
182,
188,
190,
198,
202,
205,
208,
219,
222,
224,
225,
226,
227,
230,
239,
241,
245,
249,
251,
255,
259,
261,
265,
270,
271,
272,
273,
276,
278,
282,
287,
288,
289,
290,
293,
295,
299,
303,
305,
309,
313,
315,
327,
332,
334,
344,
346,
362,
372,
386,
388,
392,
396,
398,
402,
404,
406,
411,
418,
434,
442,
447,
451,
473,
481,
486,
490,
499,
510,
517,
519,
524,
537,
549,
551,
557,
564,
568,
576,
584,
590,
597,
603,
610,
617,
625,
634,
642,
647,
654,
660,
667,
673,
680,
686,
690,
696,
700,
708,
716,
722,
724,
728,
736,
744,
752,
754,
758,
764,
766,
770,
786,
794,
810,
817,
823,
830,
836,
843,
851,
859,
867,
874,
880,
887,
893,
900,
908,
916,
922,
929,
942,
950,
956,
964,
977,
984,
988,
990,
992,
996,
1004,
1006,
1007,
1013,
1014,
1018,
1020,
1022,
1026,
1031,
1032,
1036,
1038,
1040,
1044,
1049,
34,
1009,
1010,
1011,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
1025,
1026,
1027,
1028,
1029,
1034,
1035,
1036,
1037,
1038,
1039,
1040,
1041,
1042,
1043,
1044,
1045,
1046,
1047,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
210,
211,
326,
327,
328,
329,
330,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
510,
511,
512,
513,
514,
515,
516,
554,
555,
567,
568,
569,
570,
571,
572,
573,
574,
587,
588,
600,
601,
614,
615,
629,
630,
644,
645,
657,
658,
670,
671,
683,
684,
693,
694,
705,
706,
719,
720,
721,
733,
734,
749,
750,
751,
761,
762,
763,
775,
776,
799,
800,
820,
821,
833,
834,
848,
849,
864,
865,
877,
878,
890,
891,
905,
906,
919,
920,
934,
935,
953,
954,
969,
970,
986,
987,
998,
999,
1000,
1001,
1002
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
20,
29,
31,
32,
35,
39,
44,
50,
55,
63,
67,
74,
79,
84,
89,
95,
106,
108,
110,
112,
114,
118,
122,
125,
129,
132,
135,
144,
147,
148,
149,
151,
153,
163,
165,
171,
180,
182,
188,
190,
198,
202,
205,
208,
219,
222,
224,
225,
226,
227,
230,
239,
241,
245,
249,
251,
255,
259,
261,
265,
270,
271,
272,
273,
276,
278,
282,
287,
288,
289,
290,
293,
295,
299,
303,
305,
309,
313,
315,
327,
332,
334,
344,
346,
362,
372,
386,
388,
392,
396,
398,
402,
404,
406,
411,
418,
434,
442,
447,
451,
473,
481,
486,
490,
499,
510,
517,
519,
524,
537,
549,
551,
557,
564,
568,
576,
584,
590,
597,
603,
610,
617,
625,
634,
642,
647,
654,
660,
667,
673,
680,
686,
690,
696,
700,
708,
716,
722,
724,
728,
736,
744,
752,
754,
758,
764,
766,
770,
786,
794,
810,
817,
823,
830,
836,
843,
851,
859,
867,
874,
880,
887,
893,
900,
908,
916,
922,
929,
942,
950,
956,
964,
977,
984,
988,
990,
992,
996,
1004,
1006,
1007,
1013,
1014,
1018,
1020,
1022,
1026,
1031,
1032,
1036,
1038,
1040,
1044,
1049,
34,
1009,
1010,
1011,
1016,
1017,
1018,
1019,
1020,
1021,
1022,
1023,
1024,
1025,
1026,
1027,
1028,
1029,
1034,
1035,
1036,
1037,
1038,
1039,
1040,
1041,
1042,
1043,
1044,
1045,
1046,
1047,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
210,
211,
326,
327,
328,
329,
330,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
510,
511,
512,
513,
514,
515,
516,
554,
555,
567,
568,
569,
570,
571,
572,
573,
574,
587,
588,
600,
601,
614,
615,
629,
630,
644,
645,
657,
658,
670,
671,
683,
684,
693,
694,
705,
706,
719,
720,
721,
733,
734,
749,
750,
751,
761,
762,
763,
775,
776,
799,
800,
820,
821,
833,
834,
848,
849,
864,
865,
877,
878,
890,
891,
905,
906,
919,
920,
934,
935,
953,
954,
969,
970,
986,
987,
998,
999,
1000,
1001,
1002
] |
1CWE-79
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
from html import escape as html_escape
import re
from typing import Union
from six import ensure_str
from cmk.gui.utils.html import HTML
#.
# .--Escaper-------------------------------------------------------------.
# | _____ |
# | | ____|___ ___ __ _ _ __ ___ _ __ |
# | | _| / __|/ __/ _` | '_ \ / _ \ '__| |
# | | |___\__ \ (_| (_| | |_) | __/ | |
# | |_____|___/\___\__,_| .__/ \___|_| |
# | |_| |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------
# TODO: Figure out if this should actually be HTMLTagValue or HTMLContent or...
# All the HTML-related types are slightly chaotic...
EscapableEntity = Union[None, int, HTML, str]
_UNESCAPER_TEXT = re.compile(
r'<(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)>')
_QUOTE = re.compile(r"(?:"|')")
_A_HREF = re.compile(r'<a href=((?:"|').*?(?:"|'))>')
# TODO: Cleanup the accepted types!
def escape_attribute(value: EscapableEntity) -> str:
"""Escape HTML attributes.
For example: replace '"' with '"', '<' with '<'.
This code is slow. Works on str and unicode without changing
the type. Also works on things that can be converted with '%s'.
Args:
value:
Examples:
>>> escape_attribute("Hello this is <b>dog</b>!")
'Hello this is <b>dog</b>!'
>>> escape_attribute("Hello this is <foo>dog</foo>!")
'Hello this is <foo>dog</foo>!'
Returns:
"""
attr_type = type(value)
if value is None:
return u''
if attr_type == int:
return str(value)
if isinstance(value, HTML):
return value.__html__() # This is HTML code which must not be escaped
if isinstance(attr_type, str):
return html_escape(value, quote=True)
if isinstance(attr_type, bytes): # TODO: Not in the signature!
return html_escape(ensure_str(value), quote=True)
# TODO: What is this case for? Exception?
return html_escape(u"%s" % value, quote=True) # TODO: Not in the signature!
def unescape_attributes(value: str) -> str:
# In python3 use html.unescape
return ensure_str(value #
.replace("&", "&") #
.replace(""", "\"") #
.replace("<", "<") #
.replace(">", ">"))
def escape_text(text: EscapableEntity) -> str:
"""Escape HTML text
We only strip some tags and allow some simple tags
such as <h1>, <b> or <i> to be part of the string.
This is useful for messages where we want to keep formatting
options. (Formerly known as 'permissive_attrencode')
Args:
text:
Examples:
>>> escape_text("Hello this is dog!")
'Hello this is dog!'
This is lame.
>>> escape_text("Hello this <a href=\"\">is dog</a>!")
'Hello this <a href=>is dog</a>!'
Returns:
"""
if isinstance(text, HTML):
return text.__html__()
text = escape_attribute(text)
text = _UNESCAPER_TEXT.sub(r'<\1\2>', text)
for a_href in _A_HREF.finditer(text):
text = text.replace(a_href.group(0), u"<a href=%s>" % _QUOTE.sub(u"\"", a_href.group(1)))
return text.replace(u"&nbsp;", u" ")
def strip_scripts(ht: str) -> str:
"""Strip script tags from text.
This function does not handle all the possible edge cases. Beware.
Args:
ht: A text with possible html in it.
Examples:
>>> strip_scripts('')
''
>>> strip_scripts('foo <script>baz</script> bar')
'foo bar'
Edge cases.
>>> strip_scripts('foo <scr<script></script>ipt>alert()</SCRIPT> bar')
'foo bar'
Returns:
A text without html in it.
"""
prev = None
while prev != ht:
prev = ht
x = ht.lower().find('<script')
if x == -1:
break
y = ht.lower().find('</script')
if y == -1:
break
ht = ht[0:x] + ht[y + 9:]
return ht
def strip_tags(ht: EscapableEntity) -> str:
"""Strip all HTML tags from a text.
Args:
ht: A text with possible HTML tags in it.
Examples:
>>> strip_tags("<b>foobar</b> blah")
'foobar blah'
Edge cases.
>>> strip_tags("<p<b<>re>foobar</</b>b> blah")
're>foobarb> blah'
Returns:
A string without working HTML tags.
"""
if isinstance(ht, HTML):
ht = ht.__html__()
if not isinstance(ht, str):
return u"%s" % ht
ht = ensure_str(ht)
while True:
x = ht.find('<')
if x == -1:
break
y = ht.find('>', x)
if y == -1:
break
ht = ht[0:x] + ht[y + 1:]
return ht.replace(" ", " ")
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
from html import escape as html_escape
import re
from typing import Union
from six import ensure_str
from cmk.gui.utils.html import HTML
#.
# .--Escaper-------------------------------------------------------------.
# | _____ |
# | | ____|___ ___ __ _ _ __ ___ _ __ |
# | | _| / __|/ __/ _` | '_ \ / _ \ '__| |
# | | |___\__ \ (_| (_| | |_) | __/ | |
# | |_____|___/\___\__,_| .__/ \___|_| |
# | |_| |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------
# TODO: Figure out if this should actually be HTMLTagValue or HTMLContent or...
# All the HTML-related types are slightly chaotic...
EscapableEntity = Union[None, int, HTML, str]
_UNESCAPER_TEXT = re.compile(
r'<(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)>')
_A_HREF = re.compile(
r'<a href=(?:(?:"|')(.*?)(?:"|'))(?: target=(?:(?:"|')(.*?)(?:"|')))?>'
)
# TODO: Cleanup the accepted types!
def escape_attribute(value: EscapableEntity) -> str:
"""Escape HTML attributes.
For example: replace '"' with '"', '<' with '<'.
This code is slow. Works on str and unicode without changing
the type. Also works on things that can be converted with '%s'.
Args:
value:
Examples:
>>> escape_attribute("Hello this is <b>dog</b>!")
'Hello this is <b>dog</b>!'
>>> escape_attribute("Hello this is <foo>dog</foo>!")
'Hello this is <foo>dog</foo>!'
Returns:
"""
attr_type = type(value)
if value is None:
return u''
if attr_type == int:
return str(value)
if isinstance(value, HTML):
return value.__html__() # This is HTML code which must not be escaped
if isinstance(attr_type, str):
return html_escape(value, quote=True)
if isinstance(attr_type, bytes): # TODO: Not in the signature!
return html_escape(ensure_str(value), quote=True)
# TODO: What is this case for? Exception?
return html_escape(u"%s" % value, quote=True) # TODO: Not in the signature!
def unescape_attributes(value: str) -> str:
# In python3 use html.unescape
return ensure_str(value #
.replace("&", "&") #
.replace(""", "\"") #
.replace("<", "<") #
.replace(">", ">"))
def escape_text(text: EscapableEntity) -> str:
"""Escape HTML text
We only strip some tags and allow some simple tags
such as <h1>, <b> or <i> to be part of the string.
This is useful for messages where we want to keep formatting
options. (Formerly known as 'permissive_attrencode')
Args:
text:
Examples:
>>> escape_text("Hello this is dog!")
'Hello this is dog!'
This is lame.
>>> escape_text("Hello this <a href=\"\">is dog</a>!")
'Hello this <a href=>is dog</a>!'
Returns:
"""
if isinstance(text, HTML):
return text.__html__()
text = escape_attribute(text)
text = _UNESCAPER_TEXT.sub(r'<\1\2>', text)
for a_href in _A_HREF.finditer(text):
href = a_href.group(1)
target = a_href.group(2)
if target:
unescaped_tag = "<a href=\"%s\" target=\"%s\">" % (href, target)
else:
unescaped_tag = "<a href=\"%s\">" % href
text = text.replace(a_href.group(0), unescaped_tag)
return text.replace("&nbsp;", u" ")
def strip_scripts(ht: str) -> str:
"""Strip script tags from text.
This function does not handle all the possible edge cases. Beware.
Args:
ht: A text with possible html in it.
Examples:
>>> strip_scripts('')
''
>>> strip_scripts('foo <script>baz</script> bar')
'foo bar'
Edge cases.
>>> strip_scripts('foo <scr<script></script>ipt>alert()</SCRIPT> bar')
'foo bar'
Returns:
A text without html in it.
"""
prev = None
while prev != ht:
prev = ht
x = ht.lower().find('<script')
if x == -1:
break
y = ht.lower().find('</script')
if y == -1:
break
ht = ht[0:x] + ht[y + 9:]
return ht
def strip_tags(ht: EscapableEntity) -> str:
"""Strip all HTML tags from a text.
Args:
ht: A text with possible HTML tags in it.
Examples:
>>> strip_tags("<b>foobar</b> blah")
'foobar blah'
Edge cases.
>>> strip_tags("<p<b<>re>foobar</</b>b> blah")
're>foobarb> blah'
Returns:
A string without working HTML tags.
"""
if isinstance(ht, HTML):
ht = ht.__html__()
if not isinstance(ht, str):
return u"%s" % ht
ht = ensure_str(ht)
while True:
x = ht.find('<')
if x == -1:
break
y = ht.find('>', x)
if y == -1:
break
ht = ht[0:x] + ht[y + 1:]
return ht.replace(" ", " ")
| xss | {
"code": [
" text = text.replace(a_href.group(0), u\"<a href=%s>\" % _QUOTE.sub(u\"\\\"\", a_href.group(1)))",
" return text.replace(u\"&nbsp;\", u\" \")"
],
"line_no": [
114,
115
]
} | {
"code": [
"_A_HREF = re.compile(",
" href = a_href.group(1)",
" if target:",
" else:",
" text = text.replace(a_href.group(0), unescaped_tag)"
],
"line_no": [
33,
115,
118,
120,
123
]
} |
from html import escape as html_escape
import re
from typing import Union
from six import ensure_str
from cmk.gui.utils.html import HTML
VAR_0 = Union[None, int, HTML, str]
VAR_1 = re.compile(
r'<(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)>')
VAR_2 = re.compile(r"(?:"|')")
VAR_3 = re.compile(r'<a href=((?:"|').*?(?:"|'))>')
def FUNC_0(VAR_4: VAR_0) -> str:
VAR_7 = type(VAR_4)
if VAR_4 is None:
return u''
if VAR_7 == int:
return str(VAR_4)
if isinstance(VAR_4, HTML):
return VAR_4.__html__() # This is HTML code which must not be escaped
if isinstance(VAR_7, str):
return html_escape(VAR_4, quote=True)
if isinstance(VAR_7, bytes): # TODO: Not in the signature!
return html_escape(ensure_str(VAR_4), quote=True)
return html_escape(u"%s" % VAR_4, quote=True) # TODO: Not in the signature!
def FUNC_1(VAR_4: str) -> str:
return ensure_str(VAR_4 #
.replace("&", "&") #
.replace(""", "\"") #
.replace("<", "<") #
.replace(">", ">"))
def FUNC_2(VAR_5: VAR_0) -> str:
if isinstance(VAR_5, HTML):
return VAR_5.__html__()
VAR_5 = FUNC_0(VAR_5)
VAR_5 = VAR_1.sub(r'<\1\2>', VAR_5)
for a_href in VAR_3.finditer(VAR_5):
VAR_5 = VAR_5.replace(a_href.group(0), u"<a href=%s>" % VAR_2.sub(u"\"", a_href.group(1)))
return VAR_5.replace(u"&nbsp;", u" ")
def FUNC_3(VAR_6: str) -> str:
VAR_8 = None
while VAR_8 != VAR_6:
VAR_8 = VAR_6
VAR_9 = VAR_6.lower().find('<script')
if VAR_9 == -1:
break
VAR_10 = VAR_6.lower().find('</script')
if VAR_10 == -1:
break
VAR_6 = ht[0:VAR_9] + VAR_6[VAR_10 + 9:]
return VAR_6
def FUNC_4(VAR_6: VAR_0) -> str:
if isinstance(VAR_6, HTML):
VAR_6 = VAR_6.__html__()
if not isinstance(VAR_6, str):
return u"%s" % VAR_6
ht = ensure_str(VAR_6)
while True:
VAR_9 = VAR_6.find('<')
if VAR_9 == -1:
break
VAR_10 = VAR_6.find('>', VAR_9)
if VAR_10 == -1:
break
VAR_6 = ht[0:VAR_9] + VAR_6[VAR_10 + 1:]
return VAR_6.replace(" ", " ")
|
from html import escape as html_escape
import re
from typing import Union
from six import ensure_str
from cmk.gui.utils.html import HTML
VAR_0 = Union[None, int, HTML, str]
VAR_1 = re.compile(
r'<(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)>')
VAR_2 = re.compile(
r'<a VAR_8=(?:(?:"|')(.*?)(?:"|'))(?: VAR_9=(?:(?:"|')(.*?)(?:"|')))?>'
)
def FUNC_0(VAR_3: VAR_0) -> str:
VAR_6 = type(VAR_3)
if VAR_3 is None:
return u''
if VAR_6 == int:
return str(VAR_3)
if isinstance(VAR_3, HTML):
return VAR_3.__html__() # This is HTML code which must not be escaped
if isinstance(VAR_6, str):
return html_escape(VAR_3, quote=True)
if isinstance(VAR_6, bytes): # TODO: Not in the signature!
return html_escape(ensure_str(VAR_3), quote=True)
return html_escape(u"%s" % VAR_3, quote=True) # TODO: Not in the signature!
def FUNC_1(VAR_3: str) -> str:
return ensure_str(VAR_3 #
.replace("&", "&") #
.replace(""", "\"") #
.replace("<", "<") #
.replace(">", ">"))
def FUNC_2(VAR_4: VAR_0) -> str:
if isinstance(VAR_4, HTML):
return VAR_4.__html__()
VAR_4 = FUNC_0(VAR_4)
VAR_4 = VAR_1.sub(r'<\1\2>', VAR_4)
for a_href in VAR_2.finditer(VAR_4):
VAR_8 = a_href.group(1)
VAR_9 = a_href.group(2)
if VAR_9:
VAR_12 = "<a VAR_8=\"%s\" VAR_9=\"%s\">" % (VAR_8, VAR_9)
else:
VAR_12 = "<a VAR_8=\"%s\">" % VAR_8
VAR_4 = VAR_4.replace(a_href.group(0), VAR_12)
return VAR_4.replace("&nbsp;", u" ")
def FUNC_3(VAR_5: str) -> str:
VAR_7 = None
while VAR_7 != VAR_5:
VAR_7 = VAR_5
VAR_10 = VAR_5.lower().find('<script')
if VAR_10 == -1:
break
VAR_11 = VAR_5.lower().find('</script')
if VAR_11 == -1:
break
VAR_5 = ht[0:VAR_10] + VAR_5[VAR_11 + 9:]
return VAR_5
def FUNC_4(VAR_5: VAR_0) -> str:
if isinstance(VAR_5, HTML):
VAR_5 = VAR_5.__html__()
if not isinstance(VAR_5, str):
return u"%s" % VAR_5
ht = ensure_str(VAR_5)
while True:
VAR_10 = VAR_5.find('<')
if VAR_10 == -1:
break
VAR_11 = VAR_5.find('>', VAR_10)
if VAR_11 == -1:
break
VAR_5 = ht[0:VAR_10] + VAR_5[VAR_11 + 1:]
return VAR_5.replace(" ", " ")
| [
1,
2,
3,
4,
5,
6,
10,
12,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
30,
35,
36,
37,
40,
44,
47,
49,
52,
55,
56,
58,
71,
73,
74,
76,
82,
83,
86,
91,
94,
96,
99,
101,
104,
106,
110,
116,
117,
120,
122,
125,
129,
132,
134,
137,
140,
152,
154,
155,
158,
161,
165,
167,
170,
173,
177,
180,
182,
192,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
85,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174
] | [
1,
2,
3,
4,
5,
6,
10,
12,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
30,
36,
37,
38,
41,
45,
48,
50,
53,
56,
57,
59,
72,
74,
75,
77,
83,
84,
87,
92,
95,
97,
100,
102,
105,
107,
111,
117,
122,
125,
126,
129,
131,
134,
138,
141,
143,
146,
149,
161,
163,
164,
167,
170,
174,
176,
179,
182,
186,
189,
191,
201,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183
] |
2CWE-601
| # -*- coding: utf-8 -*-
EXPERIMENTAL_STUFF = True
MAXNFILES = 1000
if EXPERIMENTAL_STUFF:
if is_mobile:
response.view = response.view.replace('default/', 'default.mobile/')
response.menu = []
import re
from gluon.admin import *
from gluon.fileutils import abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config
from gluon.compileapp import find_exposed_functions
from glob import glob
from gluon._compat import iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
have_git = True
except ImportError as e:
have_git = False
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi user mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def count_lines(data):
return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')])
def log_progress(app, mode='EDIT', filename=None, progress=0):
progress_file = os.path.join(apath(app, r=request), 'progress.log')
now = str(request.now)[:19]
if not os.path.exists(progress_file):
safe_open(progress_file, 'w').write('[%s] START\n' % now)
if filename:
safe_open(progress_file, 'a').write(
'[%s] %s %s: %s\n' % (now, mode, filename, progress))
def safe_open(a, b):
if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b):
class tmp:
def write(self, data):
pass
def close(self):
pass
return tmp()
if PY2 or 'b' in b:
return open(a, b)
else:
return open(a, b, encoding="utf8")
def safe_read(a, b='r'):
safe_file = safe_open(a, b)
try:
return safe_file.read()
finally:
safe_file.close()
def safe_write(a, value, b='w'):
safe_file = safe_open(a, b)
try:
safe_file.write(value)
finally:
safe_file.close()
def get_app(name=None):
app = name or request.args(0)
if (app and os.path.exists(apath(app, r=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == app)(db.app.owner == auth.user.id).count())):
return app
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def index():
""" Index handler """
send = request.vars.send
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not send:
send = URL('site')
if session.authorized:
redirect(send)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(send, list): # ## why does this happen?
send = str(send[0])
redirect(send)
else:
times_denied = login_record(False)
if times_denied >= allowed_number_of_attempts:
response.flash = \
T('admin disabled because too many invalid login attempts')
elif times_denied == allowed_number_of_attempts - 1:
response.flash = \
T('You have one more login attempt before you are locked out')
else:
response.flash = T('invalid password.')
return dict(send=send)
def check_version():
""" Checks if web2py is up to date """
session.forget()
session._unlock(response)
new_version, version = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if new_version in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not new_version:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % version.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0])
def logout():
""" Logout handler """
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def change_password():
if session.pam_user:
session.flash = T(
'PAM authenticated user, cannot change password here')
redirect(URL('site'))
form = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
_class="span4 well")
if form.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
form.errors.current_admin_password = T('invalid password')
elif form.vars.new_admin_password != form.vars.new_admin_password_again:
form.errors.new_admin_password_again = T('no match')
else:
path = abspath('parameters_%s.py' % request.env.server_port)
safe_write(path, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(form=form)
def site():
""" Site handler """
myversion = request.env.web2py_version
# Shortcut to make the elif statements more legible
file_or_appurl = 'file' in request.vars or 'appurl' in request.vars
class IS_VALID_APPNAME(object):
def __call__(self, value):
if not re.compile('^\w+$').match(value):
return (value, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(r=request), value)):
return (value, T('Application exists already'))
return (value, None)
is_appname = IS_VALID_APPNAME()
form_create = SQLFORM.factory(Field('name', requires=is_appname),
table_name='appcreate')
form_update = SQLFORM.factory(Field('name', requires=is_appname),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
form_create.process()
form_update.process()
if DEMO_MODE:
pass
elif form_create.accepted:
# create a new application
appname = cleanpath(form_create.vars.name)
created, error = app_create(appname, request, info=True)
if created:
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T('new application "%s" created', appname)
gluon.rewrite.load()
redirect(URL('design', args=appname))
else:
session.flash = \
DIV(T('unable to create application "%s"', appname),
PRE(error))
redirect(URL(r=request))
elif form_update.accepted:
if (form_update.vars.url or '').endswith('.git'):
if not have_git:
session.flash = GIT_MISSING
redirect(URL(r=request))
target = os.path.join(apath(r=request), form_update.vars.name)
try:
new_repo = git.Repo.clone_from(form_update.vars.url, target)
session.flash = T('new application "%s" imported',
form_update.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(r=request))
elif form_update.vars.url:
# fetch an application via URL or file upload
try:
f = urlopen(form_update.vars.url)
if f.code == 404:
raise Exception("404 file not found")
except Exception as e:
session.flash = \
DIV(T('Unable to download app because:'), PRE(repr(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,
request, fname,
overwrite=form_update.vars.overwrite)
if f and installed:
msg = 'application %(appname)s installed with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T(msg, dict(appname=appname,
digest=md5_hash(installed)))
gluon.rewrite.load()
else:
msg = 'unable to install application "%(appname)s"'
session.flash = T(msg, dict(appname=form_update.vars.name))
redirect(URL(r=request))
regex = re.compile('^\w+$')
if is_manager():
apps = [a for a in os.listdir(apath(r=request)) if regex.match(a) and
a != '__pycache__']
else:
apps = [a.name for a in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
apps = [a for a in apps if a in FILTER_APPS]
apps = sorted(apps, key=lambda a: a.upper())
myplatform = platform.python_version()
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
form_create=form_create, form_update=form_update)
def report_progress(app):
import datetime
progress_file = os.path.join(apath(app, r=request), 'progress.log')
regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)')
if not os.path.exists(progress_file):
return []
matches = regex.findall(open(progress_file, 'r').read())
events, counter = [], 0
for m in matches:
if not m:
continue
days = -(request.now - datetime.datetime.strptime(m[0],
'%Y-%m-%d %H:%M:%S')).days
counter += int(m[1])
events.append([days, counter])
return events
def pack():
app = get_app()
try:
if len(request.args) == 1:
fname = 'web2py.app.%s.w2p' % app
filename = app_pack(app, request, raise_ex=True)
else:
fname = 'web2py.app.%s.compiled.w2p' % app
filename = app_pack_compiled(app, request, raise_ex=True)
except Exception as e:
pferror = e
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', pferror)
redirect(URL('site'))
def pack_plugin():
app = get_app()
if len(request.args) == 2:
fname = 'web2py.plugin.%s.w2p' % request.args[1]
filename = plugin_pack(app, request.args[1], request)
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', args=request.args))
def pack_exe(app, base, filenames=None):
import urllib
import zipfile
# Download latest web2py_win and open it with zipfile
download_url = 'http://www.web2py.com/examples/static/web2py_win.zip'
out = StringIO()
out.write(urlopen(download_url).read())
web2py_win = zipfile.ZipFile(out, mode='a')
# Write routes.py with the application as default
routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app
web2py_win.writestr('web2py/routes.py', routes.encode('utf-8'))
# Copy the application into the zipfile
common_root = os.path.dirname(base)
for filename in filenames:
fname = os.path.join(base, filename)
arcname = os.path.join('web2py/applications', app, filename)
web2py_win.write(fname, arcname)
web2py_win.close()
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app
out.seek(0)
return response.stream(out)
def pack_custom():
app = get_app()
base = apath(app, r=request)
def ignore(fs):
return [f for f in fs if not (
f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))]
files = {}
for (r, d, f) in os.walk(base):
files[r] = {'folders': ignore(d), 'files': ignore(f)}
if request.post_vars.file:
valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files'])
files = request.post_vars.file
files = [files] if not isinstance(files, list) else files
files = [file for file in files if file in valid_set]
if request.post_vars.doexe is None:
fname = 'web2py.app.%s.w2p' % app
try:
filename = app_pack(app, request, raise_ex=True, filenames=files)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL(args=request.args))
else:
return pack_exe(app, base, files)
return locals()
def upgrade_web2py():
dialog = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if dialog.accepted:
(success, error) = upgrade(request)
if success:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', error)
redirect(URL('site'))
return dict(dialog=dialog)
def uninstall():
app = get_app()
dialog = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
dialog['_id'] = 'confirm_form'
dialog['_class'] = 'well'
for component in dialog.components:
component['_class'] = 'btn'
if dialog.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == app).delete():
pass
elif db(db.app.name == app)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to uninstall "%s"', app)
redirect(URL('site'))
try:
filename = app_pack(app, request, raise_ex=True)
except:
session.flash = T('unable to uninstall "%s"', app)
else:
if app_uninstall(app, request):
session.flash = T('application "%s" uninstalled', app)
else:
session.flash = T('unable to uninstall "%s"', app)
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def cleanup():
app = get_app()
clean = app_cleanup(app, request)
if not clean:
session.flash = T("some files could not be removed")
else:
session.flash = T('cache, errors and sessions cleaned')
redirect(URL('site'))
def compile_app():
app = get_app()
c = app_compile(app, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not c:
session.flash = T('application compiled')
elif isinstance(c, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following views could not be compiled:'), BR()] +
[CAT(BR(), view) for view in c] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
CODE(c))
redirect(URL('site'))
def remove_compiled_app():
""" Remove the compiled application """
app = get_app()
remove_compiled_application(apath(app, r=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def delete():
""" Object delete handler """
app = get_app()
filename = '/'.join(request.args)
sender = request.vars.sender
if isinstance(sender, list): # ## fix a problem with Vista
sender = sender[0]
dialog = FORM.confirm(T('Delete'),
{T('Cancel'): URL(sender, anchor=request.vars.id)})
if dialog.accepted:
try:
full_path = apath(filename, r=request)
lineno = count_lines(open(full_path, 'r').read())
os.unlink(full_path)
log_progress(app, 'DELETE', filename, progress=-lineno)
session.flash = T('file "%(filename)s" deleted',
dict(filename=filename))
except Exception:
session.flash = T('unable to delete file "%(filename)s"',
dict(filename=filename))
redirect(URL(sender, anchor=request.vars.id2))
return dict(dialog=dialog, filename=filename)
def enable():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
app = get_app()
filename = os.path.join(apath(app, r=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
str_ = 'disabled: True\ntime-disabled: %s' % request.now
safe_open(filename, 'wb').write(str_.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
filename = '/'.join(request.args)
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
try:
data = safe_read(path).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
extension = filename[filename.rfind('.') + 1:].lower()
return dict(app=app,
filename=filename,
data=data,
extension=extension)
def test():
""" Execute controller tests """
app = get_app()
if len(request.args) > 1:
file = request.args[1]
else:
file = '.*\.py'
controllers = listdir(
apath('%s/controllers/' % app, r=request), file + '$')
return dict(app=app, controllers=controllers)
def keepalive():
return ''
def search():
keywords = request.vars.keywords or ''
app = get_app()
def match(filename, keywords):
filename = os.path.join(apath(app, r=request), filename)
if keywords in read_file(filename, 'r'):
return True
return False
path = apath(request.args[0], r=request)
files1 = glob(os.path.join(path, '*/*.py'))
files2 = glob(os.path.join(path, '*/*.html'))
files3 = glob(os.path.join(path, '*/*/*.html'))
files = [x[len(path) + 1:].replace(
'\\', '/') for x in files1 + files2 + files3 if match(x, keywords)]
return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files))))
def edit():
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
app_path = apath(app, r=request)
preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
preferences.update(config.read())
if not(request.ajax) and not(is_mobile):
# return the scaffolding, the rest will be through ajax requests
response.title = T('Editing %s') % app
return response.render('default/edit.html', dict(app=app, editor_settings=preferences))
# show settings tab and save prefernces
if 'settings' in request.vars:
if request.post_vars: # save new preferences
if PY2:
post_vars = request.post_vars.items()
else:
post_vars = list(request.post_vars.items())
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars]
if config.save(post_vars):
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
else:
response.headers["web2py-component-flash"] = T('Preferences saved on session only')
response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read())
return
else:
details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences})
return response.json(details)
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
filename = '/'.join(request.args)
realfilename = request.args[-1]
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
# Try to discover the file type
if filename[-3:] == '.py':
filetype = 'python'
elif filename[-5:] == '.html':
filetype = 'html'
elif filename[-5:] == '.load':
filetype = 'html'
elif filename[-4:] == '.css':
filetype = 'css'
elif filename[-3:] == '.js':
filetype = 'javascript'
else:
filetype = 'html'
# ## check if file is not there
if ('revert' in request.vars) and os.path.exists(path + '.bak'):
try:
data = safe_read(path + '.bak')
data1 = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
safe_write(path, data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
safe_write(path + '.bak', data1)
response.flash = T('file "%s" of %s restored', (filename, saved_on))
else:
try:
data = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
lineno_old = count_lines(data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != file_hash:
session.flash = T('file changed on disk')
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path + '.1', data)
if 'from_ajax' in request.vars:
return response.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
args=request.args)})
else:
redirect(URL('resolve', args=request.args))
elif request.vars.data:
safe_write(path + '.bak', data)
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path, data)
lineno_new = count_lines(data)
log_progress(
app, 'EDIT', filename, progress=lineno_new - lineno_old)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
response.flash = T('file saved on %s', saved_on)
data_or_revert = (request.vars.data or request.vars.revert)
# Check compile errors
highlight = None
if filetype == 'python' and request.vars.data:
import _ast
try:
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
except Exception as e:
# offset calculation is only used for textarea (start/stop)
start = sum([len(line) + 1 for l, line
in enumerate(request.vars.data.split("\n"))
if l < e.lineno - 1])
if e.text and e.offset:
offset = e.offset - (len(e.text) - len(
e.text.splitlines()[-1]))
else:
offset = 0
highlight = {'start': start, 'end': start +
offset + 1, 'lineno': e.lineno, 'offset': offset}
try:
ex_name = e.__class__.__name__
except:
ex_name = 'unknown exception!'
response.flash = DIV(T('failed to compile file because:'), BR(),
B(ex_name), ' ' + T('at line %s', e.lineno),
offset and ' ' +
T('at char %s', offset) or '',
PRE(repr(e)))
if data_or_revert and request.args[1] == 'modules':
# Lets try to reload the modules
try:
mopath = '.'.join(request.args[2:])[:-3]
exec('import applications.%s.modules.%s' % (
request.args[0], mopath))
reload(sys.modules['applications.%s.modules.%s'
% (request.args[0], mopath)])
except Exception as e:
response.flash = DIV(
T('failed to reload module because:'), PRE(repr(e)))
edit_controller = None
editviewlinks = None
view_link = None
if filetype == 'html' and len(request.args) >= 3:
cfilename = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(cfilename, r=request)):
edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")])
view = request.args[3].replace('.html', '')
view_link = URL(request.args[0], request.args[2], view)
elif filetype == 'python' and request.args[1] == 'controllers':
# it's a controller file.
# Create links to all of the associated view files.
app = get_app()
viewname = os.path.splitext(request.args[2])[0]
viewpath = os.path.join(app, 'views', viewname)
aviewpath = apath(viewpath, r=request)
viewlist = []
if os.path.exists(aviewpath):
if os.path.isdir(aviewpath):
viewlist = glob(os.path.join(aviewpath, '*.html'))
elif os.path.exists(aviewpath + '.html'):
viewlist.append(aviewpath + '.html')
if len(viewlist):
editviewlinks = []
for v in sorted(viewlist):
vf = os.path.split(v)[-1]
vargs = "/".join([viewpath.replace(os.sep, "/"), vf])
editviewlinks.append(A(vf.split(".")[0],
_class="editor_filelink",
_href=URL('edit', args=[vargs])))
if len(request.args) > 2 and request.args[1] == 'controllers':
controller = (request.args[2])[:-3]
try:
functions = find_exposed_functions(data)
functions = functions and sorted(functions) or []
except SyntaxError as err:
functions = ['SyntaxError:Line:%d' % err.lineno]
else:
(controller, functions) = (None, None)
if 'from_ajax' in request.vars:
return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight})
else:
file_details = dict(app=request.args[0],
lineno=request.vars.lineno or 1,
editor_settings=preferences,
filename=filename,
realfilename=realfilename,
filetype=filetype,
data=data,
edit_controller=edit_controller,
file_hash=file_hash,
saved_on=saved_on,
controller=controller,
functions=functions,
view_link=view_link,
editviewlinks=editviewlinks,
id=IS_SLUG()(filename)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
plain_html = response.render('default/edit_js.html', file_details)
file_details['plain_html'] = plain_html
if is_mobile:
return response.render('default.mobile/edit.html',
file_details, editor_settings=preferences)
else:
return response.json(file_details)
def todolist():
""" Returns all TODO of the requested app
"""
app = request.vars.app or ''
app_path = apath('%(app)s' % {'app': app}, r=request)
dirs = ['models', 'controllers', 'modules', 'private']
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')]
return files
pattern = '#\s*(todo)+\s+(.*)'
regex = re.compile(pattern, re.IGNORECASE)
output = []
for d in dirs:
for f in listfiles(app, d):
matches = []
filename = apath(os.path.join(app, d, f), r=request)
with safe_open(filename, 'r') as f_s:
src = f_s.read()
for m in regex.finditer(src):
start = m.start()
lineno = src.count('\n', 0, start) + 1
matches.append({'text': m.group(0), 'lineno': lineno})
if len(matches) != 0:
output.append({'filename': f, 'matches': matches, 'dir': d})
return {'todo': output, 'app': app}
def editor_sessions():
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
preferences = config.read()
if request.vars.session_name and request.vars.files:
session_name = request.vars.session_name
files = request.vars.files
preferences.update({session_name: ','.join(files)})
if config.save(preferences.items()):
response.headers["web2py-component-flash"] = T('Session saved correctly')
else:
response.headers["web2py-component-flash"] = T('Session saved on session only')
return response.render('default/editor_sessions.html', {'editor_sessions': preferences})
def resolve():
"""
"""
filename = '/'.join(request.args)
# ## check if file is not there
path = apath(filename, r=request)
a = safe_read(path).split('\n')
try:
b = safe_read(path + '.1').split('\n')
except IOError:
session.flash = 'Other file, no longer there'
redirect(URL('edit', args=request.args))
d = difflib.ndiff(a, b)
def leading(line):
""" """
# TODO: we really need to comment this
z = ''
for (k, c) in enumerate(line):
if c == ' ':
z += ' '
elif c == ' \t':
z += ' '
elif k == 0 and c == '?':
pass
else:
break
return XML(z)
def getclass(item):
""" Determine item class """
operators = {' ': 'normal', '+': 'plus', '-': 'minus'}
return operators[item[0]]
if request.vars:
c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0]
== ' ' or 'line%i' % i in request.vars])
safe_write(path, c)
session.flash = 'files merged'
redirect(URL('edit', args=request.args))
else:
# Making the short circuit compatible with <= python2.4
gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % index,
value=item[0] == '+')
diff = TABLE(*[TR(TD(gen_data(i, item)),
TD(item[0]),
TD(leading(item[2:]),
TT(item[2:].rstrip())),
_class=getclass(item))
for (i, item) in enumerate(d) if item[0] != '?'])
return dict(diff=diff, filename=filename)
def edit_language():
""" Edit language file """
app = get_app()
filename = '/'.join(request.args)
response.title = request.args[-1]
strings = read_dict(apath(filename, r=request))
if '__corrupted__' in strings:
form = SPAN(strings['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(strings.keys(), key=lambda x: to_native(x).lower())
rows = []
rows.append(H2(T('Original/Translation')))
for key in keys:
name = md5_hash(key)
s = strings[key]
(prefix, sep, key) = key.partition('\x01')
if sep:
prefix = SPAN(prefix + ': ', _class='tm_ftag')
k = key
else:
(k, prefix) = (prefix, '')
_class = 'untranslated' if k == s else 'translated'
if len(s) <= 40:
elem = INPUT(_type='text', _name=name, value=s,
_size=70, _class=_class)
else:
elem = TEXTAREA(_name=name, value=s, _cols=70,
_rows=5, _class=_class)
# Making the short circuit compatible with <= python2.4
k = (s != k) and k or B(k)
new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"),
CAT(elem, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % name,
_class='btn')), _id=name, _class='span6 well well-small')
rows.append(DIV(new_row, _class="row-fluid"))
rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls'))
form = FORM(*rows)
if form.accepts(request.vars, keepvalues=True):
strs = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name] == chr(127):
continue
strs[key] = form.vars[name]
write_dict(apath(filename, r=request), strs)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args))
return dict(app=request.args[0], filename=filename, form=form)
def edit_plurals():
""" Edit plurals file """
app = get_app()
filename = '/'.join(request.args)
plurals = read_plural_dict(
apath(filename, r=request)) # plural forms dictionary
nplurals = int(request.vars.nplurals) - 1 # plural forms quantity
xnplurals = xrange(nplurals)
if '__corrupted__' in plurals:
# show error message and exit
form = SPAN(plurals['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(plurals.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
tab_rows = []
for key in keys:
name = md5_hash(key)
forms = plurals[key]
if len(forms) < nplurals:
forms.extend(None for i in xrange(nplurals - len(forms)))
tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key,
_class='fake-input')))
tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals]
tab_col2 = DIV(CAT(*tab_inputs))
tab_col3 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6'))
tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row')
tab_rows.append(tab_row)
tab_rows.append(DIV(TAG['button'](T('update'), _type='submit',
_class='btn btn-primary'),
_class='controls'))
tab_container = DIV(*tab_rows, **dict(_class="row-fluid"))
form = FORM(tab_container)
if form.accepts(request.vars, keepvalues=True):
new_plurals = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name + '_0'] == chr(127):
continue
new_plurals[key] = [form.vars[name + '_' + str(n)]
for n in xnplurals]
write_plural_dict(apath(filename, r=request), new_plurals)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args, vars=dict(
nplurals=request.vars.nplurals)))
return dict(app=request.args[0], filename=filename, form=form)
def about():
""" Read about info """
app = get_app()
# ## check if file is not there
about = safe_read(apath('%s/ABOUT' % app, r=request))
license = safe_read(apath('%s/LICENSE' % app, r=request))
return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app))
def design():
""" Application design handler """
app = get_app()
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
filename = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(app, request.vars.pluginfile.file,
request, filename):
session.flash = T('new plugin installed')
redirect(URL('design', args=app))
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(r=request, args=app))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(r=request, args=app))
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$'))
views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
langpath = os.path.join(apath(app, r=request), 'languages')
languages = dict([(lang, info) for lang, info
in iteritems(read_possible_languages(langpath))
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
cronfolder = apath('%s/cron' % app, r=request)
crontab = apath('%s/cron/crontab' % app, r=request)
if not is_gae:
if not os.path.exists(cronfolder):
os.mkdir(cronfolder)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
plugins = []
def filter_plugins(items, plugins):
plugins += [item[7:].split('/')[0].split(
'.')[0] for item in items if item.startswith('plugin_')]
plugins[:] = list(set(plugins))
plugins.sort()
return [item for item in items if not item.startswith('plugin_')]
return dict(app=app,
models=filter_plugins(models, plugins),
defines=defines,
controllers=filter_plugins(controllers, plugins),
functions=functions,
views=filter_plugins(views, plugins),
modules=filter_plugins(modules, plugins),
extend=extend,
include=include,
privates=filter_plugins(privates, plugins),
statics=filter_plugins(statics, plugins),
languages=languages,
crontab=crontab,
plugins=plugins)
def delete_plugin():
""" Object delete handler """
app = request.args(0)
plugin = request.args(1)
plugin_name = 'plugin_' + plugin
dialog = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', args=app)})
if dialog.accepted:
try:
for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
path = os.path.join(apath(app, r=request), folder)
for item in os.listdir(path):
if item.rsplit('.', 1)[0] == plugin_name:
filename = os.path.join(path, item)
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
session.flash = T('plugin "%(plugin)s" deleted',
dict(plugin=plugin))
except Exception:
session.flash = T('unable to delete file plugin "%(plugin)s"',
dict(plugin=plugin))
redirect(URL('design', args=request.args(0), anchor=request.vars.id2))
return dict(dialog=dialog, plugin=plugin)
def plugin():
""" Application design handler """
app = get_app()
plugin = request.args(1)
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$'))
views = [x.replace('\\', '/') for x in views]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
languages = sorted([lang + '.py' for lang, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
crontab = apath('%s/cron/crontab' % app, r=request)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
def filter_plugins(items):
regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$')
return [item for item in items if item and regex.match(item)]
return dict(app=app,
models=filter_plugins(models),
defines=defines,
controllers=filter_plugins(controllers),
functions=functions,
views=filter_plugins(views),
modules=filter_plugins(modules),
extend=extend,
include=include,
privates=filter_plugins(privates),
statics=filter_plugins(statics),
languages=languages,
crontab=crontab)
def create_file():
""" Create files handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
anchor = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
app = get_app(request.vars.app)
path = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
if path[-7:] == '/rules/':
# Handle plural rules files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
lang = re.match('^plural_rules-(.*)\.py$', filename).group(1)
langinfo = read_possible_languages(apath(app, r=request))[lang]
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Plural-Forms for %(lang)s (%(langname)s)
nplurals=2 # for example, English language has 2 forms:
# 1 singular and 1 plural
# Determine plural_id for number *n* as sequence of positive
# integers: 0,1,...
# NOTE! For singular form ALWAYS return plural_id = 0
get_plural_id = lambda n: int(n != 1)
# Construct and return plural form of *word* using
# *plural_id* (which ALWAYS>0). This function will be executed
# for words (or phrases) not found in plural_dict dictionary.
# By default this function simply returns word in singular:
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(lang=langinfo[0], langname=langinfo[1])
elif path[-11:] == '/languages/':
# Handle language files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
path = os.path.join(apath(app, r=request), 'languages', filename)
if not os.path.exists(path):
safe_write(path, '')
# create language xx[-yy].py file:
findT(apath(app, r=request), filename[:-3])
session.flash = T('language file "%(filename)s" created/updated',
dict(filename=filename))
redirect(request.vars.sender + anchor)
elif path[-8:] == '/models/':
# Handle python models
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n'
elif path[-13:] == '/controllers/':
# Handle python controllers
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")'
text = text % (T('try something like'), filename)
elif path[-7:] == '/views/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle template (html) views
if filename.find('.') < 0:
filename += '.html'
extension = filename.split('.')[-1].lower()
if len(filename) == 5:
raise SyntaxError
msg = T(
'This is the %(filename)s template', dict(filename=filename))
if extension == 'html':
text = dedent("""
{{extend 'layout.html'}}
<h1>%s</h1>
{{=BEAUTIFY(response._vars)}}""" % msg)[1:]
else:
generic = os.path.join(path, 'generic.' + extension)
if os.path.exists(generic):
text = read_file(generic)
else:
text = ''
elif path[-9:] == '/modules/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle python module files
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gluon import *\n""")[1:]
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
if (request.vars.plugin and
not filename.startswith('plugin_%s/' % request.vars.plugin)):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
text = ''
else:
redirect(request.vars.sender + anchor)
full_filename = os.path.join(path, filename)
dirpath = os.path.dirname(full_filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
if os.path.exists(full_filename):
raise SyntaxError
safe_write(full_filename, text)
log_progress(app, 'CREATE', filename)
if request.vars.dir:
result = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
else:
session.flash = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
vars = {}
if request.vars.id:
vars['id'] = request.vars.id
if request.vars.app:
vars['app'] = request.vars.app
redirect(URL('edit',
args=[os.path.join(request.vars.location, filename)], vars=vars))
except Exception as e:
if not isinstance(e, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
response.flash = result
response.headers['web2py-component-content'] = 'append'
response.headers['web2py-component-command'] = "%s %s %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]),
"$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + anchor)
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(
listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
return files
def editfile(path, file, vars={}, app=None):
args = (path, file) if 'app' in vars else (app, path, file)
url = URL('edit', args=args, vars=vars)
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
def files_menu():
app = request.vars.app or 'welcome'
dirs = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
result_files = []
for dir in dirs:
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__'))
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
_class="nav nav-list small-font"),
_id=dir['name'] + '_files', _style="display: none;")))
return dict(result_files=result_files)
def upload_file():
""" File uploading handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
filename = None
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
if request.vars.filename:
filename = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
filename = os.path.split(request.vars.file.filename)[-1]
if path[-8:] == '/models/' and not filename[-3:] == '.py':
filename += '.py'
if path[-9:] == '/modules/' and not filename[-3:] == '.py':
filename += '.py'
if path[-13:] == '/controllers/' and not filename[-3:] == '.py':
filename += '.py'
if path[-7:] == '/views/' and not filename[-5:] == '.html':
filename += '.html'
if path[-11:] == '/languages/' and not filename[-3:] == '.py':
filename += '.py'
filename = os.path.join(path, filename)
dirpath = os.path.dirname(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = request.vars.file.file.read()
lineno = count_lines(data)
safe_write(filename, data, 'wb')
log_progress(app, 'UPLOAD', filename, lineno)
session.flash = T('file "%(filename)s" uploaded',
dict(filename=filename[len(path):]))
except Exception:
if filename:
d = dict(filename=filename[len(path):])
else:
d = dict(filename='unknown')
session.flash = T('cannot upload file "%(filename)s"', d)
redirect(request.vars.sender)
def errors():
""" Error handler """
import operator
import os
import hashlib
app = get_app()
if is_gae:
method = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
method = request.args(1) or 'new'
db_ready = {}
db_ready['status'] = get_ticket_storage(app)
db_ready['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if method == 'new':
errors_path = apath('%s/errors' % app, r=request)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'):
fullpath = os.path.join(errors_path, fn)
if not os.path.isfile(fullpath):
continue
try:
fullpath_file = safe_open(fullpath, 'rb')
try:
error = pickle.load(fullpath_file)
finally:
fullpath_file.close()
except IOError:
continue
except EOFError:
continue
hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()
if hash in delete_hashes:
os.unlink(fullpath)
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1, pickel=error,
causer=error_causer,
last_line=last_line,
hash=hash, ticket=fn)
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready)
elif method == 'dbnew':
errors_path = apath('%s/errors' % app, r=request)
tk_db, tk_table = get_ticket_storage(app)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in tk_db(tk_table.id > 0).select():
try:
error = pickle.loads(fn.ticket_data)
hash = hashlib.md5(error['traceback']).hexdigest()
if hash in delete_hashes:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2]
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1,
pickel=error, causer=error_causer,
last_line=last_line, hash=hash,
ticket=fn.ticket_id)
except AttributeError as e:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app,
method=method, db_ready=db_ready)
elif method == 'dbold':
tk_db, tk_table = get_ticket_storage(app)
for item in request.vars:
if item[:7] == 'delete_':
tk_db(tk_table.ticket_id == item[7:]).delete()
tk_db.commit()
tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id,
tk_table.created_datetime,
orderby=~tk_table.created_datetime)
tickets = [row.ticket_id for row in tickets_]
times = dict([(row.ticket_id, row.created_datetime) for
row in tickets_])
return dict(app=app, tickets=tickets, method=method,
times=times, db_ready=db_ready)
else:
for item in request.vars:
# delete_all rows doesn't contain any ticket
# Remove anything else as requested
if item[:7] == 'delete_' and (not item == "delete_all}"):
os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))
func = lambda p: os.stat(apath('%s/errors/%s' %
(app, p), r=request)).st_mtime
tickets = sorted(
listdir(apath('%s/errors/' % app, r=request), '^\w.*'),
key=func,
reverse=True)
return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)
def get_ticket_storage(app):
private_folder = apath('%s/private' % app, r=request)
ticket_file = os.path.join(private_folder, 'ticket_storage.txt')
if os.path.exists(ticket_file):
db_string = safe_read(ticket_file)
db_string = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
# use Datastore as fallback if there is no ticket_file
db_string = "google:datastore"
else:
return False
tickets_table = 'web2py_ticket'
tablename = tickets_table + '_' + app
db_path = apath('%s/databases' % app, r=request)
ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
if not ticketsdb.get(tablename):
table = ticketsdb.define_table(
tablename,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return ticketsdb, ticketsdb.get(tablename)
def make_link(path):
""" Create a link from a path """
tryFile = path.replace('\\', '/')
if os.path.isabs(tryFile) and os.path.isfile(tryFile):
(folder, filename) = os.path.split(tryFile)
(base, ext) = os.path.splitext(filename)
app = get_app()
editable = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for key in editable.keys():
check_extension = folder.endswith("%s/%s" % (app, key))
if ext.lower() == editable[key] and check_extension:
return to_native(A('"' + tryFile + '"',
_href=URL(r=request,
f='edit/%s/%s/%s' % (app, key, filename))).xml())
return ''
def make_links(traceback):
""" Make links using the given traceback """
lwords = traceback.split('"')
# Making the short circuit compatible with <= python2.4
result = (len(lwords) != 0) and lwords[0] or ''
i = 1
while i < len(lwords):
link = make_link(lwords[i])
if link == '':
result += '"' + lwords[i]
else:
result += link
if i + 1 < len(lwords):
result += lwords[i + 1]
i = i + 1
i = i + 1
return result
class TRACEBACK(object):
""" Generate the traceback """
def __init__(self, text):
""" TRACEBACK constructor """
self.s = make_links(CODE(text).xml())
def xml(self):
""" Returns the xml """
return self.s
def ticket():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
e.load(request, app, ticket)
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def ticketdb():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
request.tickets_db = get_ticket_storage(app)[0]
e.load(request, app, ticket)
response.view = 'default/ticket.html'
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def error():
""" Generate a ticket (for testing) """
raise RuntimeError('admin ticket generator at your service')
def update_languages():
""" Update available languages """
app = get_app()
update_all_languages(apath(app, r=request))
session.flash = T('Language files (static strings) updated')
redirect(URL('design', args=app, anchor='languages'))
def user():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(form=auth())
else:
return dict(form=T("Disabled"))
def reload_routes():
""" Reload routes.py """
gluon.rewrite.load()
redirect(URL('site'))
def manage_students():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
grid = SQLFORM.grid(db.auth_user)
return locals()
def bulk_register():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
form = SQLFORM.factory(Field('emails', 'text'))
if form.process().accepted:
emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()]
n = 0
for email in emails:
if not db.auth_user(email=email):
n += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%s students registered', n)
redirect(URL('site'))
return locals()
# Begin experimental stuff need fixes:
# 1) should run in its own process - cannot os.chdir
# 2) should not prompt user at console
# 3) should give option to force commit and not reuqire manual merge
def git_pull():
""" Git Pull handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
dialog = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if dialog.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
origin = repo.remotes.origin
origin.fetch()
origin.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for details.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def git_push():
""" Git Push handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([apath(r=request) + app + '/*'])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T(
"Git repo updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(app=app, form=form)
def plugins():
app = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
rawlist = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=plugin&search_index=false").read()
session.plugins = loads_json(rawlist)
except:
response.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(plugins=session.plugins["results"], app=request.args(0))
def install_plugin():
app = request.args(0)
source = request.vars.source
plugin = request.vars.plugin
if not (source and app):
raise HTTP(500, T("Invalid request"))
# make sure no XSS attacks in source
if not source.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
form = SQLFORM.factory()
result = None
if form.process().accepted:
# get w2p plugin
if "web2py.plugin." in source:
filename = "web2py.plugin.%s.w2p" % \
source.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
filename = "web2py.plugin.%s.w2p" % cleanpath(plugin)
if plugin_install(app, urlopen(source),
request, filename):
session.flash = T('New plugin installed: %s', filename)
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(f="plugins", args=[app, ]))
return dict(form=form, app=app, plugin=plugin, source=source)
| # -*- coding: utf-8 -*-
EXPERIMENTAL_STUFF = True
MAXNFILES = 1000
if EXPERIMENTAL_STUFF:
if is_mobile:
response.view = response.view.replace('default/', 'default.mobile/')
response.menu = []
import re
from gluon.admin import *
from gluon.fileutils import abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config, prevent_open_redirect
from gluon.compileapp import find_exposed_functions
from glob import glob
from gluon._compat import iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
have_git = True
except ImportError as e:
have_git = False
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi user mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def count_lines(data):
return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')])
def log_progress(app, mode='EDIT', filename=None, progress=0):
progress_file = os.path.join(apath(app, r=request), 'progress.log')
now = str(request.now)[:19]
if not os.path.exists(progress_file):
safe_open(progress_file, 'w').write('[%s] START\n' % now)
if filename:
safe_open(progress_file, 'a').write(
'[%s] %s %s: %s\n' % (now, mode, filename, progress))
def safe_open(a, b):
if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b):
class tmp:
def write(self, data):
pass
def close(self):
pass
return tmp()
if PY2 or 'b' in b:
return open(a, b)
else:
return open(a, b, encoding="utf8")
def safe_read(a, b='r'):
safe_file = safe_open(a, b)
try:
return safe_file.read()
finally:
safe_file.close()
def safe_write(a, value, b='w'):
safe_file = safe_open(a, b)
try:
safe_file.write(value)
finally:
safe_file.close()
def get_app(name=None):
app = name or request.args(0)
if (app and os.path.exists(apath(app, r=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == app)(db.app.owner == auth.user.id).count())):
return app
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def index():
""" Index handler """
send = prevent_open_redirect(request.vars.send)
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not send:
send = URL('site')
if session.authorized:
redirect(send)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(send, list): # ## why does this happen?
send = str(send[0])
redirect(send)
else:
times_denied = login_record(False)
if times_denied >= allowed_number_of_attempts:
response.flash = \
T('admin disabled because too many invalid login attempts')
elif times_denied == allowed_number_of_attempts - 1:
response.flash = \
T('You have one more login attempt before you are locked out')
else:
response.flash = T('invalid password.')
return dict(send=send)
def check_version():
""" Checks if web2py is up to date """
session.forget()
session._unlock(response)
new_version, version = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if new_version in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not new_version:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % version.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0])
def logout():
""" Logout handler """
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def change_password():
if session.pam_user:
session.flash = T(
'PAM authenticated user, cannot change password here')
redirect(URL('site'))
form = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
_class="span4 well")
if form.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
form.errors.current_admin_password = T('invalid password')
elif form.vars.new_admin_password != form.vars.new_admin_password_again:
form.errors.new_admin_password_again = T('no match')
else:
path = abspath('parameters_%s.py' % request.env.server_port)
safe_write(path, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(form=form)
def site():
""" Site handler """
myversion = request.env.web2py_version
# Shortcut to make the elif statements more legible
file_or_appurl = 'file' in request.vars or 'appurl' in request.vars
class IS_VALID_APPNAME(object):
def __call__(self, value):
if not re.compile('^\w+$').match(value):
return (value, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(r=request), value)):
return (value, T('Application exists already'))
return (value, None)
is_appname = IS_VALID_APPNAME()
form_create = SQLFORM.factory(Field('name', requires=is_appname),
table_name='appcreate')
form_update = SQLFORM.factory(Field('name', requires=is_appname),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
form_create.process()
form_update.process()
if DEMO_MODE:
pass
elif form_create.accepted:
# create a new application
appname = cleanpath(form_create.vars.name)
created, error = app_create(appname, request, info=True)
if created:
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T('new application "%s" created', appname)
gluon.rewrite.load()
redirect(URL('design', args=appname))
else:
session.flash = \
DIV(T('unable to create application "%s"', appname),
PRE(error))
redirect(URL(r=request))
elif form_update.accepted:
if (form_update.vars.url or '').endswith('.git'):
if not have_git:
session.flash = GIT_MISSING
redirect(URL(r=request))
target = os.path.join(apath(r=request), form_update.vars.name)
try:
new_repo = git.Repo.clone_from(form_update.vars.url, target)
session.flash = T('new application "%s" imported',
form_update.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(r=request))
elif form_update.vars.url:
# fetch an application via URL or file upload
try:
f = urlopen(form_update.vars.url)
if f.code == 404:
raise Exception("404 file not found")
except Exception as e:
session.flash = \
DIV(T('Unable to download app because:'), PRE(repr(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,
request, fname,
overwrite=form_update.vars.overwrite)
if f and installed:
msg = 'application %(appname)s installed with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T(msg, dict(appname=appname,
digest=md5_hash(installed)))
gluon.rewrite.load()
else:
msg = 'unable to install application "%(appname)s"'
session.flash = T(msg, dict(appname=form_update.vars.name))
redirect(URL(r=request))
regex = re.compile('^\w+$')
if is_manager():
apps = [a for a in os.listdir(apath(r=request)) if regex.match(a) and
a != '__pycache__']
else:
apps = [a.name for a in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
apps = [a for a in apps if a in FILTER_APPS]
apps = sorted(apps, key=lambda a: a.upper())
myplatform = platform.python_version()
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
form_create=form_create, form_update=form_update)
def report_progress(app):
import datetime
progress_file = os.path.join(apath(app, r=request), 'progress.log')
regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)')
if not os.path.exists(progress_file):
return []
matches = regex.findall(open(progress_file, 'r').read())
events, counter = [], 0
for m in matches:
if not m:
continue
days = -(request.now - datetime.datetime.strptime(m[0],
'%Y-%m-%d %H:%M:%S')).days
counter += int(m[1])
events.append([days, counter])
return events
def pack():
app = get_app()
try:
if len(request.args) == 1:
fname = 'web2py.app.%s.w2p' % app
filename = app_pack(app, request, raise_ex=True)
else:
fname = 'web2py.app.%s.compiled.w2p' % app
filename = app_pack_compiled(app, request, raise_ex=True)
except Exception as e:
pferror = e
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', pferror)
redirect(URL('site'))
def pack_plugin():
app = get_app()
if len(request.args) == 2:
fname = 'web2py.plugin.%s.w2p' % request.args[1]
filename = plugin_pack(app, request.args[1], request)
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', args=request.args))
def pack_exe(app, base, filenames=None):
import urllib
import zipfile
# Download latest web2py_win and open it with zipfile
download_url = 'http://www.web2py.com/examples/static/web2py_win.zip'
out = StringIO()
out.write(urlopen(download_url).read())
web2py_win = zipfile.ZipFile(out, mode='a')
# Write routes.py with the application as default
routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app
web2py_win.writestr('web2py/routes.py', routes.encode('utf-8'))
# Copy the application into the zipfile
common_root = os.path.dirname(base)
for filename in filenames:
fname = os.path.join(base, filename)
arcname = os.path.join('web2py/applications', app, filename)
web2py_win.write(fname, arcname)
web2py_win.close()
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app
out.seek(0)
return response.stream(out)
def pack_custom():
app = get_app()
base = apath(app, r=request)
def ignore(fs):
return [f for f in fs if not (
f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))]
files = {}
for (r, d, f) in os.walk(base):
files[r] = {'folders': ignore(d), 'files': ignore(f)}
if request.post_vars.file:
valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files'])
files = request.post_vars.file
files = [files] if not isinstance(files, list) else files
files = [file for file in files if file in valid_set]
if request.post_vars.doexe is None:
fname = 'web2py.app.%s.w2p' % app
try:
filename = app_pack(app, request, raise_ex=True, filenames=files)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL(args=request.args))
else:
return pack_exe(app, base, files)
return locals()
def upgrade_web2py():
dialog = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if dialog.accepted:
(success, error) = upgrade(request)
if success:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', error)
redirect(URL('site'))
return dict(dialog=dialog)
def uninstall():
app = get_app()
dialog = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
dialog['_id'] = 'confirm_form'
dialog['_class'] = 'well'
for component in dialog.components:
component['_class'] = 'btn'
if dialog.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == app).delete():
pass
elif db(db.app.name == app)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to uninstall "%s"', app)
redirect(URL('site'))
try:
filename = app_pack(app, request, raise_ex=True)
except:
session.flash = T('unable to uninstall "%s"', app)
else:
if app_uninstall(app, request):
session.flash = T('application "%s" uninstalled', app)
else:
session.flash = T('unable to uninstall "%s"', app)
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def cleanup():
app = get_app()
clean = app_cleanup(app, request)
if not clean:
session.flash = T("some files could not be removed")
else:
session.flash = T('cache, errors and sessions cleaned')
redirect(URL('site'))
def compile_app():
app = get_app()
c = app_compile(app, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not c:
session.flash = T('application compiled')
elif isinstance(c, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following views could not be compiled:'), BR()] +
[CAT(BR(), view) for view in c] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
CODE(c))
redirect(URL('site'))
def remove_compiled_app():
""" Remove the compiled application """
app = get_app()
remove_compiled_application(apath(app, r=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def delete():
""" Object delete handler """
app = get_app()
filename = '/'.join(request.args)
sender = request.vars.sender
if isinstance(sender, list): # ## fix a problem with Vista
sender = sender[0]
dialog = FORM.confirm(T('Delete'),
{T('Cancel'): URL(sender, anchor=request.vars.id)})
if dialog.accepted:
try:
full_path = apath(filename, r=request)
lineno = count_lines(open(full_path, 'r').read())
os.unlink(full_path)
log_progress(app, 'DELETE', filename, progress=-lineno)
session.flash = T('file "%(filename)s" deleted',
dict(filename=filename))
except Exception:
session.flash = T('unable to delete file "%(filename)s"',
dict(filename=filename))
redirect(URL(sender, anchor=request.vars.id2))
return dict(dialog=dialog, filename=filename)
def enable():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
app = get_app()
filename = os.path.join(apath(app, r=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
str_ = 'disabled: True\ntime-disabled: %s' % request.now
safe_open(filename, 'wb').write(str_.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
filename = '/'.join(request.args)
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
try:
data = safe_read(path).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
extension = filename[filename.rfind('.') + 1:].lower()
return dict(app=app,
filename=filename,
data=data,
extension=extension)
def test():
""" Execute controller tests """
app = get_app()
if len(request.args) > 1:
file = request.args[1]
else:
file = '.*\.py'
controllers = listdir(
apath('%s/controllers/' % app, r=request), file + '$')
return dict(app=app, controllers=controllers)
def keepalive():
return ''
def search():
keywords = request.vars.keywords or ''
app = get_app()
def match(filename, keywords):
filename = os.path.join(apath(app, r=request), filename)
if keywords in read_file(filename, 'r'):
return True
return False
path = apath(request.args[0], r=request)
files1 = glob(os.path.join(path, '*/*.py'))
files2 = glob(os.path.join(path, '*/*.html'))
files3 = glob(os.path.join(path, '*/*/*.html'))
files = [x[len(path) + 1:].replace(
'\\', '/') for x in files1 + files2 + files3 if match(x, keywords)]
return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files))))
def edit():
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
app_path = apath(app, r=request)
preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
preferences.update(config.read())
if not(request.ajax) and not(is_mobile):
# return the scaffolding, the rest will be through ajax requests
response.title = T('Editing %s') % app
return response.render('default/edit.html', dict(app=app, editor_settings=preferences))
# show settings tab and save prefernces
if 'settings' in request.vars:
if request.post_vars: # save new preferences
if PY2:
post_vars = request.post_vars.items()
else:
post_vars = list(request.post_vars.items())
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars]
if config.save(post_vars):
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
else:
response.headers["web2py-component-flash"] = T('Preferences saved on session only')
response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read())
return
else:
details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences})
return response.json(details)
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
filename = '/'.join(request.args)
realfilename = request.args[-1]
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
# Try to discover the file type
if filename[-3:] == '.py':
filetype = 'python'
elif filename[-5:] == '.html':
filetype = 'html'
elif filename[-5:] == '.load':
filetype = 'html'
elif filename[-4:] == '.css':
filetype = 'css'
elif filename[-3:] == '.js':
filetype = 'javascript'
else:
filetype = 'html'
# ## check if file is not there
if ('revert' in request.vars) and os.path.exists(path + '.bak'):
try:
data = safe_read(path + '.bak')
data1 = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
safe_write(path, data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
safe_write(path + '.bak', data1)
response.flash = T('file "%s" of %s restored', (filename, saved_on))
else:
try:
data = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
lineno_old = count_lines(data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != file_hash:
session.flash = T('file changed on disk')
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path + '.1', data)
if 'from_ajax' in request.vars:
return response.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
args=request.args)})
else:
redirect(URL('resolve', args=request.args))
elif request.vars.data:
safe_write(path + '.bak', data)
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path, data)
lineno_new = count_lines(data)
log_progress(
app, 'EDIT', filename, progress=lineno_new - lineno_old)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
response.flash = T('file saved on %s', saved_on)
data_or_revert = (request.vars.data or request.vars.revert)
# Check compile errors
highlight = None
if filetype == 'python' and request.vars.data:
import _ast
try:
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
except Exception as e:
# offset calculation is only used for textarea (start/stop)
start = sum([len(line) + 1 for l, line
in enumerate(request.vars.data.split("\n"))
if l < e.lineno - 1])
if e.text and e.offset:
offset = e.offset - (len(e.text) - len(
e.text.splitlines()[-1]))
else:
offset = 0
highlight = {'start': start, 'end': start +
offset + 1, 'lineno': e.lineno, 'offset': offset}
try:
ex_name = e.__class__.__name__
except:
ex_name = 'unknown exception!'
response.flash = DIV(T('failed to compile file because:'), BR(),
B(ex_name), ' ' + T('at line %s', e.lineno),
offset and ' ' +
T('at char %s', offset) or '',
PRE(repr(e)))
if data_or_revert and request.args[1] == 'modules':
# Lets try to reload the modules
try:
mopath = '.'.join(request.args[2:])[:-3]
exec('import applications.%s.modules.%s' % (
request.args[0], mopath))
reload(sys.modules['applications.%s.modules.%s'
% (request.args[0], mopath)])
except Exception as e:
response.flash = DIV(
T('failed to reload module because:'), PRE(repr(e)))
edit_controller = None
editviewlinks = None
view_link = None
if filetype == 'html' and len(request.args) >= 3:
cfilename = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(cfilename, r=request)):
edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")])
view = request.args[3].replace('.html', '')
view_link = URL(request.args[0], request.args[2], view)
elif filetype == 'python' and request.args[1] == 'controllers':
# it's a controller file.
# Create links to all of the associated view files.
app = get_app()
viewname = os.path.splitext(request.args[2])[0]
viewpath = os.path.join(app, 'views', viewname)
aviewpath = apath(viewpath, r=request)
viewlist = []
if os.path.exists(aviewpath):
if os.path.isdir(aviewpath):
viewlist = glob(os.path.join(aviewpath, '*.html'))
elif os.path.exists(aviewpath + '.html'):
viewlist.append(aviewpath + '.html')
if len(viewlist):
editviewlinks = []
for v in sorted(viewlist):
vf = os.path.split(v)[-1]
vargs = "/".join([viewpath.replace(os.sep, "/"), vf])
editviewlinks.append(A(vf.split(".")[0],
_class="editor_filelink",
_href=URL('edit', args=[vargs])))
if len(request.args) > 2 and request.args[1] == 'controllers':
controller = (request.args[2])[:-3]
try:
functions = find_exposed_functions(data)
functions = functions and sorted(functions) or []
except SyntaxError as err:
functions = ['SyntaxError:Line:%d' % err.lineno]
else:
(controller, functions) = (None, None)
if 'from_ajax' in request.vars:
return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight})
else:
file_details = dict(app=request.args[0],
lineno=request.vars.lineno or 1,
editor_settings=preferences,
filename=filename,
realfilename=realfilename,
filetype=filetype,
data=data,
edit_controller=edit_controller,
file_hash=file_hash,
saved_on=saved_on,
controller=controller,
functions=functions,
view_link=view_link,
editviewlinks=editviewlinks,
id=IS_SLUG()(filename)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
plain_html = response.render('default/edit_js.html', file_details)
file_details['plain_html'] = plain_html
if is_mobile:
return response.render('default.mobile/edit.html',
file_details, editor_settings=preferences)
else:
return response.json(file_details)
def todolist():
""" Returns all TODO of the requested app
"""
app = request.vars.app or ''
app_path = apath('%(app)s' % {'app': app}, r=request)
dirs = ['models', 'controllers', 'modules', 'private']
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')]
return files
pattern = '#\s*(todo)+\s+(.*)'
regex = re.compile(pattern, re.IGNORECASE)
output = []
for d in dirs:
for f in listfiles(app, d):
matches = []
filename = apath(os.path.join(app, d, f), r=request)
with safe_open(filename, 'r') as f_s:
src = f_s.read()
for m in regex.finditer(src):
start = m.start()
lineno = src.count('\n', 0, start) + 1
matches.append({'text': m.group(0), 'lineno': lineno})
if len(matches) != 0:
output.append({'filename': f, 'matches': matches, 'dir': d})
return {'todo': output, 'app': app}
def editor_sessions():
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
preferences = config.read()
if request.vars.session_name and request.vars.files:
session_name = request.vars.session_name
files = request.vars.files
preferences.update({session_name: ','.join(files)})
if config.save(preferences.items()):
response.headers["web2py-component-flash"] = T('Session saved correctly')
else:
response.headers["web2py-component-flash"] = T('Session saved on session only')
return response.render('default/editor_sessions.html', {'editor_sessions': preferences})
def resolve():
"""
"""
filename = '/'.join(request.args)
# ## check if file is not there
path = apath(filename, r=request)
a = safe_read(path).split('\n')
try:
b = safe_read(path + '.1').split('\n')
except IOError:
session.flash = 'Other file, no longer there'
redirect(URL('edit', args=request.args))
d = difflib.ndiff(a, b)
def leading(line):
""" """
# TODO: we really need to comment this
z = ''
for (k, c) in enumerate(line):
if c == ' ':
z += ' '
elif c == ' \t':
z += ' '
elif k == 0 and c == '?':
pass
else:
break
return XML(z)
def getclass(item):
""" Determine item class """
operators = {' ': 'normal', '+': 'plus', '-': 'minus'}
return operators[item[0]]
if request.vars:
c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0]
== ' ' or 'line%i' % i in request.vars])
safe_write(path, c)
session.flash = 'files merged'
redirect(URL('edit', args=request.args))
else:
# Making the short circuit compatible with <= python2.4
gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % index,
value=item[0] == '+')
diff = TABLE(*[TR(TD(gen_data(i, item)),
TD(item[0]),
TD(leading(item[2:]),
TT(item[2:].rstrip())),
_class=getclass(item))
for (i, item) in enumerate(d) if item[0] != '?'])
return dict(diff=diff, filename=filename)
def edit_language():
""" Edit language file """
app = get_app()
filename = '/'.join(request.args)
response.title = request.args[-1]
strings = read_dict(apath(filename, r=request))
if '__corrupted__' in strings:
form = SPAN(strings['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(strings.keys(), key=lambda x: to_native(x).lower())
rows = []
rows.append(H2(T('Original/Translation')))
for key in keys:
name = md5_hash(key)
s = strings[key]
(prefix, sep, key) = key.partition('\x01')
if sep:
prefix = SPAN(prefix + ': ', _class='tm_ftag')
k = key
else:
(k, prefix) = (prefix, '')
_class = 'untranslated' if k == s else 'translated'
if len(s) <= 40:
elem = INPUT(_type='text', _name=name, value=s,
_size=70, _class=_class)
else:
elem = TEXTAREA(_name=name, value=s, _cols=70,
_rows=5, _class=_class)
# Making the short circuit compatible with <= python2.4
k = (s != k) and k or B(k)
new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"),
CAT(elem, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % name,
_class='btn')), _id=name, _class='span6 well well-small')
rows.append(DIV(new_row, _class="row-fluid"))
rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls'))
form = FORM(*rows)
if form.accepts(request.vars, keepvalues=True):
strs = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name] == chr(127):
continue
strs[key] = form.vars[name]
write_dict(apath(filename, r=request), strs)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args))
return dict(app=request.args[0], filename=filename, form=form)
def edit_plurals():
""" Edit plurals file """
app = get_app()
filename = '/'.join(request.args)
plurals = read_plural_dict(
apath(filename, r=request)) # plural forms dictionary
nplurals = int(request.vars.nplurals) - 1 # plural forms quantity
xnplurals = xrange(nplurals)
if '__corrupted__' in plurals:
# show error message and exit
form = SPAN(plurals['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(plurals.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
tab_rows = []
for key in keys:
name = md5_hash(key)
forms = plurals[key]
if len(forms) < nplurals:
forms.extend(None for i in xrange(nplurals - len(forms)))
tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key,
_class='fake-input')))
tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals]
tab_col2 = DIV(CAT(*tab_inputs))
tab_col3 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6'))
tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row')
tab_rows.append(tab_row)
tab_rows.append(DIV(TAG['button'](T('update'), _type='submit',
_class='btn btn-primary'),
_class='controls'))
tab_container = DIV(*tab_rows, **dict(_class="row-fluid"))
form = FORM(tab_container)
if form.accepts(request.vars, keepvalues=True):
new_plurals = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name + '_0'] == chr(127):
continue
new_plurals[key] = [form.vars[name + '_' + str(n)]
for n in xnplurals]
write_plural_dict(apath(filename, r=request), new_plurals)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args, vars=dict(
nplurals=request.vars.nplurals)))
return dict(app=request.args[0], filename=filename, form=form)
def about():
""" Read about info """
app = get_app()
# ## check if file is not there
about = safe_read(apath('%s/ABOUT' % app, r=request))
license = safe_read(apath('%s/LICENSE' % app, r=request))
return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app))
def design():
""" Application design handler """
app = get_app()
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
filename = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(app, request.vars.pluginfile.file,
request, filename):
session.flash = T('new plugin installed')
redirect(URL('design', args=app))
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(r=request, args=app))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(r=request, args=app))
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$'))
views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
langpath = os.path.join(apath(app, r=request), 'languages')
languages = dict([(lang, info) for lang, info
in iteritems(read_possible_languages(langpath))
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
cronfolder = apath('%s/cron' % app, r=request)
crontab = apath('%s/cron/crontab' % app, r=request)
if not is_gae:
if not os.path.exists(cronfolder):
os.mkdir(cronfolder)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
plugins = []
def filter_plugins(items, plugins):
plugins += [item[7:].split('/')[0].split(
'.')[0] for item in items if item.startswith('plugin_')]
plugins[:] = list(set(plugins))
plugins.sort()
return [item for item in items if not item.startswith('plugin_')]
return dict(app=app,
models=filter_plugins(models, plugins),
defines=defines,
controllers=filter_plugins(controllers, plugins),
functions=functions,
views=filter_plugins(views, plugins),
modules=filter_plugins(modules, plugins),
extend=extend,
include=include,
privates=filter_plugins(privates, plugins),
statics=filter_plugins(statics, plugins),
languages=languages,
crontab=crontab,
plugins=plugins)
def delete_plugin():
""" Object delete handler """
app = request.args(0)
plugin = request.args(1)
plugin_name = 'plugin_' + plugin
dialog = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', args=app)})
if dialog.accepted:
try:
for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
path = os.path.join(apath(app, r=request), folder)
for item in os.listdir(path):
if item.rsplit('.', 1)[0] == plugin_name:
filename = os.path.join(path, item)
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
session.flash = T('plugin "%(plugin)s" deleted',
dict(plugin=plugin))
except Exception:
session.flash = T('unable to delete file plugin "%(plugin)s"',
dict(plugin=plugin))
redirect(URL('design', args=request.args(0), anchor=request.vars.id2))
return dict(dialog=dialog, plugin=plugin)
def plugin():
""" Application design handler """
app = get_app()
plugin = request.args(1)
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$'))
views = [x.replace('\\', '/') for x in views]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
languages = sorted([lang + '.py' for lang, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
crontab = apath('%s/cron/crontab' % app, r=request)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
def filter_plugins(items):
regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$')
return [item for item in items if item and regex.match(item)]
return dict(app=app,
models=filter_plugins(models),
defines=defines,
controllers=filter_plugins(controllers),
functions=functions,
views=filter_plugins(views),
modules=filter_plugins(modules),
extend=extend,
include=include,
privates=filter_plugins(privates),
statics=filter_plugins(statics),
languages=languages,
crontab=crontab)
def create_file():
""" Create files handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
anchor = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
app = get_app(request.vars.app)
path = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
if path[-7:] == '/rules/':
# Handle plural rules files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
lang = re.match('^plural_rules-(.*)\.py$', filename).group(1)
langinfo = read_possible_languages(apath(app, r=request))[lang]
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Plural-Forms for %(lang)s (%(langname)s)
nplurals=2 # for example, English language has 2 forms:
# 1 singular and 1 plural
# Determine plural_id for number *n* as sequence of positive
# integers: 0,1,...
# NOTE! For singular form ALWAYS return plural_id = 0
get_plural_id = lambda n: int(n != 1)
# Construct and return plural form of *word* using
# *plural_id* (which ALWAYS>0). This function will be executed
# for words (or phrases) not found in plural_dict dictionary.
# By default this function simply returns word in singular:
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(lang=langinfo[0], langname=langinfo[1])
elif path[-11:] == '/languages/':
# Handle language files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
path = os.path.join(apath(app, r=request), 'languages', filename)
if not os.path.exists(path):
safe_write(path, '')
# create language xx[-yy].py file:
findT(apath(app, r=request), filename[:-3])
session.flash = T('language file "%(filename)s" created/updated',
dict(filename=filename))
redirect(request.vars.sender + anchor)
elif path[-8:] == '/models/':
# Handle python models
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n'
elif path[-13:] == '/controllers/':
# Handle python controllers
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")'
text = text % (T('try something like'), filename)
elif path[-7:] == '/views/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle template (html) views
if filename.find('.') < 0:
filename += '.html'
extension = filename.split('.')[-1].lower()
if len(filename) == 5:
raise SyntaxError
msg = T(
'This is the %(filename)s template', dict(filename=filename))
if extension == 'html':
text = dedent("""
{{extend 'layout.html'}}
<h1>%s</h1>
{{=BEAUTIFY(response._vars)}}""" % msg)[1:]
else:
generic = os.path.join(path, 'generic.' + extension)
if os.path.exists(generic):
text = read_file(generic)
else:
text = ''
elif path[-9:] == '/modules/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle python module files
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gluon import *\n""")[1:]
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
if (request.vars.plugin and
not filename.startswith('plugin_%s/' % request.vars.plugin)):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
text = ''
else:
redirect(request.vars.sender + anchor)
full_filename = os.path.join(path, filename)
dirpath = os.path.dirname(full_filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
if os.path.exists(full_filename):
raise SyntaxError
safe_write(full_filename, text)
log_progress(app, 'CREATE', filename)
if request.vars.dir:
result = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
else:
session.flash = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
vars = {}
if request.vars.id:
vars['id'] = request.vars.id
if request.vars.app:
vars['app'] = request.vars.app
redirect(URL('edit',
args=[os.path.join(request.vars.location, filename)], vars=vars))
except Exception as e:
if not isinstance(e, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
response.flash = result
response.headers['web2py-component-content'] = 'append'
response.headers['web2py-component-command'] = "%s %s %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]),
"$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + anchor)
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(
listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
return files
def editfile(path, file, vars={}, app=None):
args = (path, file) if 'app' in vars else (app, path, file)
url = URL('edit', args=args, vars=vars)
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
def files_menu():
app = request.vars.app or 'welcome'
dirs = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
result_files = []
for dir in dirs:
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__'))
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
_class="nav nav-list small-font"),
_id=dir['name'] + '_files', _style="display: none;")))
return dict(result_files=result_files)
def upload_file():
""" File uploading handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
filename = None
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
if request.vars.filename:
filename = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
filename = os.path.split(request.vars.file.filename)[-1]
if path[-8:] == '/models/' and not filename[-3:] == '.py':
filename += '.py'
if path[-9:] == '/modules/' and not filename[-3:] == '.py':
filename += '.py'
if path[-13:] == '/controllers/' and not filename[-3:] == '.py':
filename += '.py'
if path[-7:] == '/views/' and not filename[-5:] == '.html':
filename += '.html'
if path[-11:] == '/languages/' and not filename[-3:] == '.py':
filename += '.py'
filename = os.path.join(path, filename)
dirpath = os.path.dirname(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = request.vars.file.file.read()
lineno = count_lines(data)
safe_write(filename, data, 'wb')
log_progress(app, 'UPLOAD', filename, lineno)
session.flash = T('file "%(filename)s" uploaded',
dict(filename=filename[len(path):]))
except Exception:
if filename:
d = dict(filename=filename[len(path):])
else:
d = dict(filename='unknown')
session.flash = T('cannot upload file "%(filename)s"', d)
redirect(request.vars.sender)
def errors():
""" Error handler """
import operator
import os
import hashlib
app = get_app()
if is_gae:
method = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
method = request.args(1) or 'new'
db_ready = {}
db_ready['status'] = get_ticket_storage(app)
db_ready['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if method == 'new':
errors_path = apath('%s/errors' % app, r=request)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'):
fullpath = os.path.join(errors_path, fn)
if not os.path.isfile(fullpath):
continue
try:
fullpath_file = safe_open(fullpath, 'rb')
try:
error = pickle.load(fullpath_file)
finally:
fullpath_file.close()
except IOError:
continue
except EOFError:
continue
hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()
if hash in delete_hashes:
os.unlink(fullpath)
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1, pickel=error,
causer=error_causer,
last_line=last_line,
hash=hash, ticket=fn)
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready)
elif method == 'dbnew':
errors_path = apath('%s/errors' % app, r=request)
tk_db, tk_table = get_ticket_storage(app)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in tk_db(tk_table.id > 0).select():
try:
error = pickle.loads(fn.ticket_data)
hash = hashlib.md5(error['traceback']).hexdigest()
if hash in delete_hashes:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2]
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1,
pickel=error, causer=error_causer,
last_line=last_line, hash=hash,
ticket=fn.ticket_id)
except AttributeError as e:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app,
method=method, db_ready=db_ready)
elif method == 'dbold':
tk_db, tk_table = get_ticket_storage(app)
for item in request.vars:
if item[:7] == 'delete_':
tk_db(tk_table.ticket_id == item[7:]).delete()
tk_db.commit()
tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id,
tk_table.created_datetime,
orderby=~tk_table.created_datetime)
tickets = [row.ticket_id for row in tickets_]
times = dict([(row.ticket_id, row.created_datetime) for
row in tickets_])
return dict(app=app, tickets=tickets, method=method,
times=times, db_ready=db_ready)
else:
for item in request.vars:
# delete_all rows doesn't contain any ticket
# Remove anything else as requested
if item[:7] == 'delete_' and (not item == "delete_all}"):
os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))
func = lambda p: os.stat(apath('%s/errors/%s' %
(app, p), r=request)).st_mtime
tickets = sorted(
listdir(apath('%s/errors/' % app, r=request), '^\w.*'),
key=func,
reverse=True)
return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)
def get_ticket_storage(app):
private_folder = apath('%s/private' % app, r=request)
ticket_file = os.path.join(private_folder, 'ticket_storage.txt')
if os.path.exists(ticket_file):
db_string = safe_read(ticket_file)
db_string = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
# use Datastore as fallback if there is no ticket_file
db_string = "google:datastore"
else:
return False
tickets_table = 'web2py_ticket'
tablename = tickets_table + '_' + app
db_path = apath('%s/databases' % app, r=request)
ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
if not ticketsdb.get(tablename):
table = ticketsdb.define_table(
tablename,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return ticketsdb, ticketsdb.get(tablename)
def make_link(path):
""" Create a link from a path """
tryFile = path.replace('\\', '/')
if os.path.isabs(tryFile) and os.path.isfile(tryFile):
(folder, filename) = os.path.split(tryFile)
(base, ext) = os.path.splitext(filename)
app = get_app()
editable = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for key in editable.keys():
check_extension = folder.endswith("%s/%s" % (app, key))
if ext.lower() == editable[key] and check_extension:
return to_native(A('"' + tryFile + '"',
_href=URL(r=request,
f='edit/%s/%s/%s' % (app, key, filename))).xml())
return ''
def make_links(traceback):
""" Make links using the given traceback """
lwords = traceback.split('"')
# Making the short circuit compatible with <= python2.4
result = (len(lwords) != 0) and lwords[0] or ''
i = 1
while i < len(lwords):
link = make_link(lwords[i])
if link == '':
result += '"' + lwords[i]
else:
result += link
if i + 1 < len(lwords):
result += lwords[i + 1]
i = i + 1
i = i + 1
return result
class TRACEBACK(object):
""" Generate the traceback """
def __init__(self, text):
""" TRACEBACK constructor """
self.s = make_links(CODE(text).xml())
def xml(self):
""" Returns the xml """
return self.s
def ticket():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
e.load(request, app, ticket)
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def ticketdb():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
request.tickets_db = get_ticket_storage(app)[0]
e.load(request, app, ticket)
response.view = 'default/ticket.html'
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def error():
""" Generate a ticket (for testing) """
raise RuntimeError('admin ticket generator at your service')
def update_languages():
""" Update available languages """
app = get_app()
update_all_languages(apath(app, r=request))
session.flash = T('Language files (static strings) updated')
redirect(URL('design', args=app, anchor='languages'))
def user():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(form=auth())
else:
return dict(form=T("Disabled"))
def reload_routes():
""" Reload routes.py """
gluon.rewrite.load()
redirect(URL('site'))
def manage_students():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
grid = SQLFORM.grid(db.auth_user)
return locals()
def bulk_register():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
form = SQLFORM.factory(Field('emails', 'text'))
if form.process().accepted:
emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()]
n = 0
for email in emails:
if not db.auth_user(email=email):
n += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%s students registered', n)
redirect(URL('site'))
return locals()
# Begin experimental stuff need fixes:
# 1) should run in its own process - cannot os.chdir
# 2) should not prompt user at console
# 3) should give option to force commit and not reuqire manual merge
def git_pull():
""" Git Pull handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
dialog = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if dialog.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
origin = repo.remotes.origin
origin.fetch()
origin.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for details.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def git_push():
""" Git Push handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([apath(r=request) + app + '/*'])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T(
"Git repo updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(app=app, form=form)
def plugins():
app = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
rawlist = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=plugin&search_index=false").read()
session.plugins = loads_json(rawlist)
except:
response.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(plugins=session.plugins["results"], app=request.args(0))
def install_plugin():
app = request.args(0)
source = request.vars.source
plugin = request.vars.plugin
if not (source and app):
raise HTTP(500, T("Invalid request"))
# make sure no XSS attacks in source
if not source.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
form = SQLFORM.factory()
result = None
if form.process().accepted:
# get w2p plugin
if "web2py.plugin." in source:
filename = "web2py.plugin.%s.w2p" % \
source.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
filename = "web2py.plugin.%s.w2p" % cleanpath(plugin)
if plugin_install(app, urlopen(source),
request, filename):
session.flash = T('New plugin installed: %s', filename)
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(f="plugins", args=[app, ]))
return dict(form=form, app=app, plugin=plugin, source=source)
| open_redirect | {
"code": [
"from gluon.tools import Config",
" send = request.vars.send"
],
"line_no": [
15,
121
]
} | {
"code": [
"from gluon.tools import Config, prevent_open_redirect",
" send = prevent_open_redirect(request.vars.send)"
],
"line_no": [
15,
121
]
} |
VAR_0 = True
VAR_1 = 1000
if VAR_0:
if is_mobile:
VAR_43.view = VAR_43.view.replace('default/', 'default.mobile/')
VAR_43.menu = []
import .re
from gluon.admin import *
from gluon.fileutils import .abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config
from gluon.compileapp import .find_exposed_functions
from glob import glob
from gluon._compat import .iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import .shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your VAR_25 of git is %VAR_140. Upgrade to 0.3.1 or better." % git.__version__)
VAR_19 = True
except ImportError as VAR_114:
VAR_19 = False
VAR_117 = 'Requires gitpython module, but not VAR_212 or incompatible VAR_25: %s' % VAR_114
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi FUNC_50 mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def FUNC_0(VAR_2):
return len([VAR_69 for VAR_69 in VAR_2.split('\n') if VAR_69.strip() and not VAR_69.startswith('#')])
def FUNC_1(VAR_3, VAR_4='EDIT', VAR_5=None, VAR_6=0):
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_21 = str(request.now)[:19]
if not os.path.exists(VAR_20):
FUNC_2(VAR_20, 'w').write('[%VAR_140] START\n' % VAR_21)
if VAR_5:
FUNC_2(VAR_20, 'a').write(
'[%VAR_140] %VAR_140 %VAR_140: %s\n' % (VAR_21, VAR_4, VAR_5, VAR_6))
def FUNC_2(VAR_7, VAR_8):
if (DEMO_MODE or is_gae) and ('w' in VAR_8 or 'a' in VAR_8):
class CLASS_2:
def FUNC_64(self, VAR_2):
pass
def FUNC_65(self):
pass
return CLASS_2()
if PY2 or 'b' in VAR_8:
return open(VAR_7, VAR_8)
else:
return open(VAR_7, VAR_8, encoding="utf8")
def FUNC_3(VAR_7, VAR_8='r'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
return VAR_22.read()
finally:
VAR_22.close()
def FUNC_4(VAR_7, VAR_9, VAR_8='w'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
VAR_22.write(VAR_9)
finally:
VAR_22.close()
def FUNC_5(VAR_10=None):
VAR_3 = VAR_10 or request.args(0)
if (VAR_3 and os.path.exists(apath(VAR_3, VAR_122=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == VAR_3)(db.app.owner == auth.user.id).count())):
return VAR_3
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def VAR_197():
VAR_23 = request.vars.send
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not VAR_23:
send = URL('site')
if session.authorized:
redirect(VAR_23)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(VAR_23, list): # ## why does this happen?
VAR_23 = str(VAR_23[0])
redirect(VAR_23)
else:
VAR_210 = login_record(False)
if VAR_210 >= allowed_number_of_attempts:
VAR_43.flash = \
T('admin disabled because too many invalid login attempts')
elif VAR_210 == allowed_number_of_attempts - 1:
VAR_43.flash = \
T('You have one more login attempt before you are locked out')
else:
VAR_43.flash = T('invalid password.')
return dict(VAR_23=send)
def FUNC_7():
session.forget()
session._unlock(VAR_43)
VAR_24, VAR_25 = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if VAR_24 in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not VAR_24:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % VAR_25.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade VAR_21 to %s') % VAR_25.split('(')[0])
def FUNC_8():
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def FUNC_9():
if session.pam_user:
session.flash = T(
'PAM authenticated FUNC_50, cannot change password here')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
VAR_144="span4 well")
if VAR_26.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
VAR_26.errors.current_admin_password = T('invalid password')
elif VAR_26.vars.new_admin_password != VAR_26.vars.new_admin_password_again:
VAR_26.errors.new_admin_password_again = T('no match')
else:
VAR_15 = abspath('parameters_%VAR_140.py' % request.env.server_port)
FUNC_4(VAR_15, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(VAR_26=form)
def FUNC_10():
VAR_27 = request.env.web2py_version
VAR_28 = 'file' in request.vars or 'appurl' in request.vars
class CLASS_1(object):
def __call__(self, VAR_9):
if not re.compile('^\w+$').match(VAR_9):
return (VAR_9, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(VAR_122=request), VAR_9)):
return (VAR_9, T('Application exists already'))
return (VAR_9, None)
VAR_29 = CLASS_1()
VAR_30 = SQLFORM.factory(Field('name', requires=VAR_29),
table_name='appcreate')
VAR_31 = SQLFORM.factory(Field('name', requires=VAR_29),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
VAR_30.process()
VAR_31.process()
if DEMO_MODE:
pass
elif VAR_30.accepted:
VAR_174 = cleanpath(VAR_30.vars.name)
VAR_175, VAR_125 = app_create(VAR_174, request, info=True)
if VAR_175:
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T('new application "%s" created', VAR_174)
gluon.rewrite.load()
redirect(URL('design', VAR_98=VAR_174))
else:
session.flash = \
DIV(T('unable to create application "%s"', VAR_174),
PRE(VAR_125))
redirect(URL(VAR_122=request))
elif VAR_31.accepted:
if (VAR_31.vars.url or '').endswith('.git'):
if not VAR_19:
session.flash = VAR_117
redirect(URL(VAR_122=request))
VAR_211 = os.path.join(apath(VAR_122=request), VAR_31.vars.name)
try:
VAR_217 = git.Repo.clone_from(VAR_31.vars.url, VAR_211)
session.flash = T('new application "%s" imported',
VAR_31.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(VAR_122=request))
elif VAR_31.vars.url:
try:
VAR_221 = urlopen(VAR_31.vars.url)
if VAR_221.code == 404:
raise Exception("404 VAR_16 not found")
except Exception as VAR_114:
session.flash = \
DIV(T('Unable to download VAR_3 because:'), PRE(repr(VAR_114)))
redirect(URL(VAR_122=request))
VAR_120 = VAR_31.vars.url
elif VAR_31.accepted and VAR_31.vars.file:
VAR_120 = request.vars.file.filename
VAR_221 = request.vars.file.file
else:
session.flash = 'No VAR_16 uploaded and no URL specified'
redirect(URL(VAR_122=request))
if VAR_221:
VAR_174 = cleanpath(VAR_31.vars.name)
VAR_212 = app_install(VAR_174, VAR_221,
request, VAR_120,
overwrite=VAR_31.vars.overwrite)
if VAR_221 and VAR_212:
VAR_155 = 'application %(VAR_174)VAR_140 VAR_212 with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T(VAR_155, dict(VAR_174=appname,
digest=md5_hash(VAR_212)))
gluon.rewrite.load()
else:
VAR_155 = 'unable to install application "%(VAR_174)s"'
session.flash = T(VAR_155, dict(VAR_174=VAR_31.vars.name))
redirect(URL(VAR_122=request))
VAR_32 = re.compile('^\w+$')
if is_manager():
VAR_33 = [VAR_7 for VAR_7 in os.listdir(apath(VAR_122=request)) if VAR_32.match(VAR_7) and
VAR_7 != '__pycache__']
else:
VAR_33 = [VAR_7.name for VAR_7 in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
VAR_33 = [VAR_7 for VAR_7 in VAR_33 if VAR_7 in FILTER_APPS]
VAR_33 = sorted(VAR_33, VAR_143=lambda VAR_7: a.upper())
VAR_34 = platform.python_version()
return dict(VAR_3=None, VAR_33=apps, VAR_27=myversion, VAR_34=myplatform,
VAR_30=form_create, VAR_31=form_update)
def FUNC_11(VAR_3):
import .datetime
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_32 = re.compile('\[(.*?)\][^\:]+\:\VAR_140+(\-?\VAR_68+)')
if not os.path.exists(VAR_20):
return []
VAR_35 = VAR_32.findall(open(VAR_20, 'r').read())
VAR_36, VAR_37 = [], 0
for VAR_156 in VAR_35:
if not VAR_156:
continue
VAR_118 = -(request.now - datetime.datetime.strptime(VAR_156[0],
'%Y-%VAR_156-%VAR_68 %H:%M:%S')).days
VAR_37 += int(VAR_156[1])
VAR_36.append([VAR_118, VAR_37])
return VAR_36
def FUNC_12():
VAR_3 = FUNC_5()
try:
if len(request.args) == 1:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
else:
VAR_120 = 'web2py.app.%VAR_140.compiled.w2p' % VAR_3
VAR_5 = app_pack_compiled(VAR_3, request, raise_ex=True)
except Exception as VAR_114:
VAR_176 = VAR_114
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_176)
redirect(URL('site'))
def FUNC_13():
VAR_3 = FUNC_5()
if len(request.args) == 2:
VAR_120 = 'web2py.plugin.%VAR_140.w2p' % request.args[1]
VAR_5 = plugin_pack(VAR_3, request.args[1], request)
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', VAR_98=request.args))
def FUNC_14(VAR_3, VAR_11, VAR_12=None):
import .urllib
import .zipfile
VAR_38 = 'http://www.web2py.com/examples/static/VAR_40.zip'
VAR_39 = StringIO()
VAR_39.write(urlopen(VAR_38).read())
VAR_40 = zipfile.ZipFile(VAR_39, VAR_4='a')
VAR_41 = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % VAR_3
VAR_40.writestr('web2py/VAR_41.py', VAR_41.encode('utf-8'))
VAR_42 = os.path.dirname(VAR_11)
for VAR_5 in VAR_12:
VAR_120 = os.path.join(VAR_11, VAR_5)
VAR_121 = os.path.join('web2py/applications', VAR_3, VAR_5)
VAR_40.write(VAR_120, VAR_121)
VAR_40.close()
VAR_43.headers['Content-Type'] = 'application/zip'
VAR_43.headers['Content-Disposition'] = 'attachment; VAR_5=web2py.app.%VAR_140.zip' % VAR_3
VAR_39.seek(0)
return VAR_43.stream(VAR_39)
def FUNC_15():
VAR_3 = FUNC_5()
VAR_11 = apath(VAR_3, VAR_122=request)
def FUNC_58(VAR_44):
return [VAR_221 for VAR_221 in VAR_44 if not (
VAR_221[:1] in '#' or VAR_221.endswith('~') or VAR_221.endswith('.bak'))]
VAR_45 = {}
for (VAR_122, VAR_68, VAR_221) in os.walk(VAR_11):
VAR_45[VAR_122] = {'folders': FUNC_58(VAR_68), 'files': FUNC_58(VAR_221)}
if request.post_vars.file:
VAR_123 = set(os.path.relpath(os.path.join(VAR_122, VAR_221), VAR_11) for VAR_122 in VAR_45 for VAR_221 in VAR_45[VAR_122]['files'])
VAR_45 = request.post_vars.file
VAR_45 = [files] if not isinstance(VAR_45, list) else VAR_45
VAR_45 = [VAR_16 for VAR_16 in VAR_45 if VAR_16 in VAR_123]
if request.post_vars.doexe is None:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True, VAR_12=VAR_45)
except Exception as VAR_114:
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_114)
redirect(URL(VAR_98=request.args))
else:
return FUNC_14(VAR_3, VAR_11, VAR_45)
return locals()
def FUNC_16():
VAR_46 = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
(VAR_124, VAR_125) = upgrade(request)
if VAR_124:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', VAR_125)
redirect(URL('site'))
return dict(VAR_46=dialog)
def FUNC_17():
VAR_3 = FUNC_5()
VAR_46 = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
VAR_46['_id'] = 'confirm_form'
VAR_46['_class'] = 'well'
for VAR_126 in VAR_46.components:
VAR_126['_class'] = 'btn'
if VAR_46.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == VAR_3).delete():
pass
elif db(db.app.name == VAR_3)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
except:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
else:
if app_uninstall(VAR_3, request):
session.flash = T('application "%s" uninstalled', VAR_3)
else:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_18():
VAR_3 = FUNC_5()
VAR_47 = app_cleanup(VAR_3, request)
if not VAR_47:
session.flash = T("some VAR_45 could not be removed")
else:
session.flash = T('cache, FUNC_42 and sessions cleaned')
redirect(URL('site'))
def FUNC_19():
VAR_3 = FUNC_5()
VAR_48 = app_compile(VAR_3, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not VAR_48:
session.flash = T('application compiled')
elif isinstance(VAR_48, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following VAR_84 could not be compiled:'), BR()] +
[CAT(BR(), VAR_182) for VAR_182 in VAR_48] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are FUNC_42 in your VAR_3:'),
CODE(VAR_48))
redirect(URL('site'))
def FUNC_20():
VAR_3 = FUNC_5()
remove_compiled_application(apath(VAR_3, VAR_122=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def FUNC_21():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_49 = request.vars.sender
if isinstance(VAR_49, list): # ## fix VAR_7 problem with Vista
VAR_49 = sender[0]
VAR_46 = FORM.confirm(T('Delete'),
{T('Cancel'): URL(VAR_49, VAR_157=request.vars.id)})
if VAR_46.accepted:
try:
VAR_177 = apath(VAR_5, VAR_122=request)
VAR_160 = FUNC_0(open(VAR_177, 'r').read())
os.unlink(VAR_177)
FUNC_1(VAR_3, 'DELETE', VAR_5, VAR_6=-VAR_160)
session.flash = T('file "%(VAR_5)s" deleted',
dict(VAR_5=filename))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 "%(VAR_5)s"',
dict(VAR_5=filename))
redirect(URL(VAR_49, VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_5=filename)
def FUNC_22():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
VAR_3 = FUNC_5()
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(VAR_5):
os.unlink(VAR_5)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
FUNC_2(VAR_5, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
VAR_200 = 'disabled: True\ntime-disabled: %s' % request.now
FUNC_2(VAR_5, 'wb').write(VAR_200.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def FUNC_23():
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
try:
VAR_2 = FUNC_3(VAR_15).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
VAR_50 = VAR_5[filename.rfind('.') + 1:].lower()
return dict(VAR_3=app,
VAR_5=filename,
VAR_2=data,
VAR_50=extension)
def FUNC_24():
VAR_3 = FUNC_5()
if len(request.args) > 1:
VAR_16 = request.args[1]
else:
VAR_16 = '.*\.py'
VAR_51 = listdir(
apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), VAR_16 + '$')
return dict(VAR_3=VAR_3, VAR_51=controllers)
def FUNC_25():
return ''
def FUNC_26():
VAR_52 = request.vars.keywords or ''
VAR_3 = FUNC_5()
def FUNC_59(VAR_5, VAR_52):
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), VAR_5)
if VAR_52 in read_file(VAR_5, 'r'):
return True
return False
VAR_15 = apath(request.args[0], VAR_122=request)
VAR_53 = glob(os.path.join(VAR_15, '*/*.py'))
VAR_54 = glob(os.path.join(VAR_15, '*/*.html'))
VAR_55 = glob(os.path.join(VAR_15, '*/*/*.html'))
VAR_45 = [x[len(VAR_15) + 1:].replace(
'\\', '/') for x in VAR_53 + VAR_54 + VAR_55 if FUNC_59(x, VAR_52)]
return VAR_43.json(dict(VAR_45=files, message=T.M('Searching: **%VAR_140** %%{VAR_16}', len(VAR_45))))
def FUNC_27():
VAR_3 = FUNC_5(request.vars.app)
VAR_56 = apath(VAR_3, VAR_122=request)
VAR_57 = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
VAR_57.update(VAR_58.read())
if not(request.ajax) and not(is_mobile):
VAR_43.title = T('Editing %s') % VAR_3
return VAR_43.render('default/FUNC_27.html', dict(VAR_3=VAR_3, editor_settings=VAR_57))
if 'settings' in request.vars:
if request.post_vars: # save new VAR_57
if PY2:
VAR_201 = request.post_vars.items()
else:
VAR_201 = list(request.post_vars.items())
VAR_201 += [(opt, 'false') for opt in VAR_57 if opt not in request.post_vars]
if VAR_58.save(VAR_201):
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved on session only')
VAR_43.headers["web2py-VAR_126-command"] = "update_editor(%VAR_140);$('a[href=#editor_settings] button.close').click();" % VAR_43.json(VAR_58.read())
return
else:
VAR_178 = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
VAR_178['plain_html'] = VAR_43.render('default/editor_settings.html', {'editor_settings': VAR_57})
return VAR_43.json(VAR_178)
""" File FUNC_27 handler """
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
VAR_59 = request.args[-1]
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
if VAR_5[-3:] == '.py':
VAR_127 = 'python'
elif VAR_5[-5:] == '.html':
VAR_127 = 'html'
elif VAR_5[-5:] == '.load':
VAR_127 = 'html'
elif VAR_5[-4:] == '.css':
VAR_127 = 'css'
elif VAR_5[-3:] == '.js':
VAR_127 = 'javascript'
else:
VAR_127 = 'html'
if ('revert' in request.vars) and os.path.exists(VAR_15 + '.bak'):
try:
VAR_2 = FUNC_3(VAR_15 + '.bak')
VAR_179 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
FUNC_4(VAR_15, VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
FUNC_4(VAR_15 + '.bak', VAR_179)
VAR_43.flash = T('file "%s" of %VAR_140 restored', (VAR_5, VAR_129))
else:
try:
VAR_2 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
VAR_130 = FUNC_0(VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != VAR_128:
session.flash = T('file changed on disk')
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15 + '.1', VAR_2)
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
VAR_98=request.args)})
else:
redirect(URL('resolve', VAR_98=request.args))
elif request.vars.data:
FUNC_4(VAR_15 + '.bak', VAR_2)
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15, VAR_2)
VAR_202 = FUNC_0(VAR_2)
FUNC_1(
VAR_3, 'EDIT', VAR_5, VAR_6=VAR_202 - VAR_130)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
VAR_43.flash = T('file saved on %s', VAR_129)
VAR_60 = (request.vars.data or request.vars.revert)
VAR_61 = None
if VAR_127 == 'python' and request.vars.data:
import _ast
try:
VAR_180 = request.vars.data.rstrip().replace('\VAR_122\n', '\n') + '\n'
compile(VAR_180, VAR_15, "exec", _ast.PyCF_ONLY_AST)
except Exception as VAR_114:
VAR_203 = sum([len(VAR_69) + 1 for l, VAR_69
in enumerate(request.vars.data.split("\n"))
if l < VAR_114.lineno - 1])
if VAR_114.text and VAR_114.offset:
VAR_213 = VAR_114.offset - (len(VAR_114.text) - len(
VAR_114.text.splitlines()[-1]))
else:
VAR_213 = 0
VAR_61 = {'start': VAR_203, 'end': VAR_203 +
VAR_213 + 1, 'lineno': VAR_114.lineno, 'offset': VAR_213}
try:
VAR_214 = VAR_114.__class__.__name__
except:
VAR_214 = 'unknown exception!'
VAR_43.flash = DIV(T('failed to compile VAR_16 because:'), BR(),
B(VAR_214), ' ' + T('at VAR_69 %s', VAR_114.lineno),
VAR_213 and ' ' +
T('at char %s', VAR_213) or '',
PRE(repr(VAR_114)))
if VAR_60 and request.args[1] == 'modules':
try:
VAR_181 = '.'.join(request.args[2:])[:-3]
exec('import .applications.%VAR_140.modules.%s' % (
request.args[0], VAR_181))
reload(sys.modules['applications.%VAR_140.modules.%s'
% (request.args[0], VAR_181)])
except Exception as VAR_114:
VAR_43.flash = DIV(
T('failed to reload module because:'), PRE(repr(VAR_114)))
VAR_62 = None
VAR_63 = None
VAR_64 = None
if VAR_127 == 'html' and len(request.args) >= 3:
VAR_131 = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(VAR_131, VAR_122=request)):
VAR_62 = URL('edit', VAR_98=[VAR_131.replace(os.sep, "/")])
VAR_182 = request.args[3].replace('.html', '')
VAR_64 = URL(request.args[0], request.args[2], VAR_182)
elif VAR_127 == 'python' and request.args[1] == 'controllers':
VAR_3 = FUNC_5()
VAR_183 = os.path.splitext(request.args[2])[0]
VAR_184 = os.path.join(VAR_3, 'views', VAR_183)
VAR_185 = apath(VAR_184, VAR_122=request)
VAR_186 = []
if os.path.exists(VAR_185):
if os.path.isdir(VAR_185):
VAR_186 = glob(os.path.join(VAR_185, '*.html'))
elif os.path.exists(VAR_185 + '.html'):
VAR_186.append(VAR_185 + '.html')
if len(VAR_186):
VAR_63 = []
for v in sorted(VAR_186):
VAR_215 = os.path.split(v)[-1]
VAR_216 = "/".join([VAR_184.replace(os.sep, "/"), VAR_215])
VAR_63.append(A(VAR_215.split(".")[0],
VAR_144="editor_filelink",
_href=URL('edit', VAR_98=[VAR_216])))
if len(request.args) > 2 and request.args[1] == 'controllers':
VAR_132 = (request.args[2])[:-3]
try:
VAR_83 = find_exposed_functions(VAR_2)
VAR_83 = functions and sorted(VAR_83) or []
except SyntaxError as err:
VAR_83 = ['SyntaxError:Line:%d' % err.lineno]
else:
(VAR_132, VAR_83) = (None, None)
if 'from_ajax' in request.vars:
return VAR_43.json({'file_hash': VAR_128, 'saved_on': VAR_129, 'functions': VAR_83, 'controller': VAR_132, 'application': request.args[0], 'highlight': VAR_61})
else:
VAR_133 = dict(VAR_3=request.args[0],
VAR_160=request.vars.lineno or 1,
editor_settings=VAR_57,
VAR_5=filename,
VAR_59=realfilename,
VAR_127=filetype,
VAR_2=data,
VAR_62=edit_controller,
VAR_128=file_hash,
VAR_129=saved_on,
VAR_132=controller,
VAR_83=functions,
VAR_64=view_link,
VAR_63=editviewlinks,
id=IS_SLUG()(VAR_5)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
VAR_134 = VAR_43.render('default/edit_js.html', VAR_133)
file_details['plain_html'] = VAR_134
if is_mobile:
return VAR_43.render('default.mobile/FUNC_27.html',
VAR_133, editor_settings=VAR_57)
else:
return VAR_43.json(VAR_133)
def FUNC_28():
VAR_3 = request.vars.app or ''
VAR_56 = apath('%(VAR_3)s' % {'app': VAR_3}, VAR_122=request)
VAR_65 = ['models', 'controllers', 'modules', 'private']
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace(os.path.sep, '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
VAR_66 = '#\VAR_140*(todo)+\VAR_140+(.*)'
VAR_32 = re.compile(VAR_66, re.IGNORECASE)
VAR_67 = []
for VAR_68 in VAR_65:
for VAR_221 in FUNC_38(VAR_3, VAR_68):
VAR_35 = []
VAR_5 = apath(os.path.join(VAR_3, VAR_68, VAR_221), VAR_122=request)
with FUNC_2(VAR_5, 'r') as f_s:
VAR_204 = f_s.read()
for VAR_156 in VAR_32.finditer(VAR_204):
VAR_203 = VAR_156.start()
VAR_160 = VAR_204.count('\n', 0, VAR_203) + 1
VAR_35.append({'text': VAR_156.group(0), 'lineno': VAR_160})
if len(VAR_35) != 0:
VAR_67.append({'filename': VAR_221, 'matches': VAR_35, 'dir': VAR_68})
return {'todo': VAR_67, 'app': VAR_3}
def FUNC_29():
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
VAR_57 = VAR_58.read()
if request.vars.session_name and request.vars.files:
VAR_135 = request.vars.session_name
VAR_45 = request.vars.files
VAR_57.update({VAR_135: ','.join(VAR_45)})
if VAR_58.save(VAR_57.items()):
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved on session only')
return VAR_43.render('default/FUNC_29.html', {'editor_sessions': VAR_57})
def FUNC_30():
VAR_5 = '/'.join(request.args)
VAR_15 = apath(VAR_5, VAR_122=request)
VAR_7 = FUNC_3(VAR_15).split('\n')
try:
VAR_8 = FUNC_3(VAR_15 + '.1').split('\n')
except IOError:
session.flash = 'Other VAR_16, no longer there'
redirect(URL('edit', VAR_98=request.args))
VAR_68 = difflib.ndiff(VAR_7, VAR_8)
def FUNC_60(VAR_69):
VAR_136 = ''
for (VAR_145, VAR_48) in enumerate(VAR_69):
if VAR_48 == ' ':
VAR_136 += ' '
elif VAR_48 == ' \t':
VAR_136 += ' '
elif VAR_145 == 0 and VAR_48 == '?':
pass
else:
break
return XML(VAR_136)
def FUNC_61(VAR_70):
VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}
return VAR_137[VAR_70[0]]
if request.vars:
VAR_48 = '\n'.join([VAR_70[2:].rstrip() for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0]
== ' ' or 'line%i' % VAR_111 in request.vars])
FUNC_4(VAR_15, VAR_48)
session.flash = 'files merged'
redirect(URL('edit', VAR_98=request.args))
else:
VAR_138 = lambda VAR_197, VAR_70: not VAR_70[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % VAR_197,
VAR_9=VAR_70[0] == '+')
VAR_139 = TABLE(*[TR(TD(VAR_138(VAR_111, VAR_70)),
TD(VAR_70[0]),
TD(FUNC_60(VAR_70[2:]),
TT(VAR_70[2:].rstrip())),
VAR_144=FUNC_61(VAR_70))
for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0] != '?'])
return dict(VAR_139=diff, VAR_5=filename)
def FUNC_31():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_43.title = request.args[-1]
VAR_71 = read_dict(apath(VAR_5, VAR_122=request))
if '__corrupted__' in VAR_71:
VAR_26 = SPAN(VAR_71['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_71.keys(), VAR_143=lambda x: to_native(x).lower())
VAR_73 = []
rows.append(H2(T('Original/Translation')))
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_140 = VAR_71[VAR_143]
(VAR_141, VAR_142, VAR_143) = key.partition('\x01')
if VAR_142:
VAR_141 = SPAN(VAR_141 + ': ', VAR_144='tm_ftag')
VAR_145 = VAR_143
else:
(VAR_145, VAR_141) = (VAR_141, '')
VAR_144 = 'untranslated' if VAR_145 == VAR_140 else 'translated'
if len(VAR_140) <= 40:
VAR_187 = INPUT(_type='text', _name=VAR_10, VAR_9=VAR_140,
_size=70, VAR_144=_class)
else:
VAR_187 = TEXTAREA(_name=VAR_10, VAR_9=VAR_140, _cols=70,
_rows=5, VAR_144=_class)
VAR_145 = (VAR_140 != VAR_145) and VAR_145 or B(VAR_145)
VAR_146 = DIV(LABEL(VAR_141, VAR_145, _style="font-weight:normal;"),
CAT(VAR_187, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % VAR_10,
VAR_144='btn')), _id=VAR_10, VAR_144='span6 well well-small')
VAR_73.append(DIV(VAR_146, VAR_144="row-fluid"))
VAR_73.append(DIV(INPUT(_type='submit', _value=T('update'), VAR_144="btn btn-primary"), VAR_144='controls'))
VAR_26 = FORM(*VAR_73)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_147 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10] == chr(127):
continue
VAR_147[VAR_143] = VAR_26.vars[VAR_10]
write_dict(apath(VAR_5, VAR_122=request), VAR_147)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def FUNC_32():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_74 = read_plural_dict(
apath(VAR_5, VAR_122=request)) # plural VAR_148 dictionary
VAR_75 = int(request.vars.nplurals) - 1 # plural VAR_148 quantity
VAR_76 = xrange(VAR_75)
if '__corrupted__' in VAR_74:
VAR_26 = SPAN(VAR_74['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_74.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
VAR_77 = []
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_148 = VAR_74[VAR_143]
if len(VAR_148) < VAR_75:
VAR_148.extend(None for VAR_111 in xrange(VAR_75 - len(VAR_148)))
VAR_149 = DIV(CAT(LABEL(T("Singular Form")), B(VAR_143,
VAR_144='fake-input')))
VAR_150 = [SPAN(LABEL(T("Plural Form #%s", VAR_173 + 1)), INPUT(_type='text', _name=VAR_10 + '_' + str(VAR_173), VAR_9=VAR_148[VAR_173], _size=20), VAR_144='span6') for VAR_173 in VAR_76]
VAR_151 = DIV(CAT(*VAR_150))
VAR_152 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % VAR_10, VAR_144='btn'), VAR_144='span6'))
VAR_153 = DIV(DIV(VAR_149, '\n', VAR_151, '\n', VAR_152, VAR_144='well well-small'), _id=VAR_10, VAR_144='row-fluid tab_row')
VAR_77.append(VAR_153)
VAR_77.append(DIV(TAG['button'](T('update'), _type='submit',
VAR_144='btn btn-primary'),
VAR_144='controls'))
VAR_78 = DIV(*VAR_77, **dict(VAR_144="row-fluid"))
VAR_26 = FORM(VAR_78)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_154 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10 + '_0'] == chr(127):
continue
VAR_154[VAR_143] = [VAR_26.vars[VAR_10 + '_' + str(VAR_173)]
for VAR_173 in VAR_76]
write_plural_dict(apath(VAR_5, VAR_122=request), VAR_154)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args, VAR_17=dict(
VAR_75=request.vars.nplurals)))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def VAR_79():
VAR_3 = FUNC_5()
VAR_79 = FUNC_3(apath('%VAR_140/ABOUT' % VAR_3, VAR_122=request))
VAR_80 = FUNC_3(apath('%VAR_140/LICENSE' % VAR_3, VAR_122=request))
return dict(VAR_3=VAR_3, VAR_79=MARKMIN(VAR_79), VAR_80=MARKMIN(VAR_80), VAR_6=FUNC_11(VAR_3))
def FUNC_34():
VAR_3 = FUNC_5()
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
VAR_5 = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(VAR_3, request.vars.pluginfile.file,
request, VAR_5):
session.flash = T('new VAR_96 installed')
redirect(URL('design', VAR_98=VAR_3))
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_122=request, VAR_98=VAR_3))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(VAR_122=request, VAR_98=VAR_3))
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+(\.\w+)+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84 if not x.endswith('.bak')]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_90 = os.path.join(apath(VAR_3, VAR_122=request), 'languages')
VAR_91 = dict([(VAR_188, info) for VAR_188, info
in iteritems(read_possible_languages(VAR_90))
if info[2] != 0]) # info[2] is langfile_mtime:
VAR_92 = apath('%VAR_140/cron' % VAR_3, VAR_122=request)
VAR_93 = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not is_gae:
if not os.path.exists(VAR_92):
os.mkdir(VAR_92)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
VAR_94 = []
def FUNC_62(VAR_95, VAR_94):
FUNC_56 += [VAR_70[7:].split('/')[0].split(
'.')[0] for VAR_70 in VAR_95 if VAR_70.startswith('plugin_')]
VAR_94[:] = list(set(VAR_94))
FUNC_56.sort()
return [VAR_70 for VAR_70 in VAR_95 if not VAR_70.startswith('plugin_')]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81, VAR_94),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51, VAR_94),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84, VAR_94),
VAR_87=FUNC_62(VAR_87, VAR_94),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88, VAR_94),
VAR_89=FUNC_62(VAR_89, VAR_94),
VAR_91=languages,
VAR_93=crontab,
VAR_94=FUNC_56)
def FUNC_35():
VAR_3 = request.args(0)
VAR_96 = request.args(1)
VAR_97 = 'plugin_' + VAR_96
VAR_46 = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', VAR_98=VAR_3)})
if VAR_46.accepted:
try:
for VAR_168 in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), VAR_168)
for VAR_70 in os.listdir(VAR_15):
if VAR_70.rsplit('.', 1)[0] == VAR_97:
VAR_5 = os.path.join(VAR_15, VAR_70)
if os.path.isdir(VAR_5):
shutil.rmtree(VAR_5)
else:
os.unlink(VAR_5)
session.flash = T('plugin "%(VAR_96)s" deleted',
dict(VAR_96=FUNC_36))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 VAR_96 "%(VAR_96)s"',
dict(VAR_96=FUNC_36))
redirect(URL('design', VAR_98=request.args(0), VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_96=FUNC_36)
def VAR_96():
VAR_3 = FUNC_5()
VAR_96 = request.args(1)
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+\.\w+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_91 = sorted([VAR_188 + '.py' for VAR_188, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
crontab = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
def FUNC_62(VAR_95):
VAR_32 = re.compile('^plugin_' + VAR_96 + '(/.*|\..*)?$')
return [VAR_70 for VAR_70 in VAR_95 if VAR_70 and VAR_32.match(VAR_70)]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84),
VAR_87=FUNC_62(VAR_87),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88),
VAR_89=FUNC_62(VAR_89),
VAR_91=languages,
VAR_93=crontab)
def FUNC_37():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_157 = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
VAR_3 = FUNC_5(request.vars.app)
VAR_15 = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
VAR_5 = re.sub('[^\w./-]+', '_', request.vars.filename)
if VAR_15[-7:] == '/rules/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_188 = re.match('^plural_rules-(.*)\.py$', VAR_5).group(1)
VAR_189 = read_possible_languages(apath(VAR_3, VAR_122=request))[VAR_188]
VAR_112 = dedent("""
VAR_75=2 # for example, English language has 2 VAR_148:
get_plural_id = lambda VAR_173: int(VAR_173 != 1)
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(VAR_188=VAR_189[0], langname=VAR_189[1])
elif VAR_15[-11:] == '/VAR_91/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), 'languages', VAR_5)
if not os.path.exists(VAR_15):
FUNC_4(VAR_15, '')
findT(apath(VAR_3, VAR_122=request), VAR_5[:-3])
session.flash = T('language VAR_16 "%(VAR_5)s" VAR_175/updated',
dict(VAR_5=filename))
redirect(request.vars.sender + VAR_157)
elif VAR_15[-8:] == '/VAR_81/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\n'
elif VAR_15[-13:] == '/VAR_51/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\VAR_173# %VAR_140\ndef VAR_197(): return dict(message="hello from %s")'
VAR_112 = text % (T('try something like'), VAR_5)
elif VAR_15[-7:] == '/VAR_84/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if VAR_5.find('.') < 0:
VAR_5 += '.html'
VAR_50 = VAR_5.split('.')[-1].lower()
if len(VAR_5) == 5:
raise SyntaxError
VAR_155 = T(
'This is the %(VAR_5)VAR_140 template', dict(VAR_5=filename))
if VAR_50 == 'html':
VAR_112 = dedent("""
{{VAR_85 'layout.html'}}
<h1>%VAR_140</h1>
{{=BEAUTIFY(VAR_43._vars)}}""" % VAR_155)[1:]
else:
VAR_222 = os.path.join(VAR_15, 'generic.' + VAR_50)
if os.path.exists(VAR_222):
VAR_112 = read_file(VAR_222)
else:
VAR_112 = ''
elif VAR_15[-9:] == '/VAR_87/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = dedent("""
from gluon import *\n""")[1:]
elif (VAR_15[-8:] == '/static/') or (VAR_15[-9:] == '/private/'):
if (request.vars.plugin and
not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin)):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
VAR_112 = ''
else:
redirect(request.vars.sender + VAR_157)
VAR_158 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_158)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
if os.path.exists(VAR_158):
raise SyntaxError
FUNC_4(VAR_158, VAR_112)
FUNC_1(VAR_3, 'CREATE', VAR_5)
if request.vars.dir:
VAR_110 = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
else:
session.flash = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
VAR_17 = {}
if request.vars.id:
VAR_17['id'] = request.vars.id
if request.vars.app:
VAR_17['app'] = request.vars.app
redirect(URL('edit',
VAR_98=[os.path.join(request.vars.location, VAR_5)], VAR_17=vars))
except Exception as VAR_114:
if not isinstance(VAR_114, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
VAR_43.flash = VAR_110
VAR_43.headers['web2py-VAR_126-content'] = 'append'
VAR_43.headers['web2py-VAR_126-command'] = "%VAR_140 %VAR_140 %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', VAR_98=[VAR_3, request.vars.dir, VAR_5]),
"$.web2py.enableElement($('#VAR_26 form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + VAR_157)
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(
listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace('\\', '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
def FUNC_39(VAR_15, VAR_16, VAR_17={}, VAR_3=None):
VAR_98 = (VAR_15, VAR_16) if 'app' in VAR_17 else (VAR_3, VAR_15, VAR_16)
VAR_99 = URL('edit', VAR_98=args, VAR_17=vars)
return A(VAR_16, VAR_144='editor_filelink', _href=VAR_99, _style='word-wrap: nowrap;')
def FUNC_40():
VAR_3 = request.vars.app or 'welcome'
VAR_65 = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
VAR_100 = []
for VAR_13 in VAR_65:
VAR_100.append(TAG[''](LI(VAR_13['name'], VAR_144="nav-header component", _onclick="collapse('" + VAR_13['name'] + "_files');"),
LI(UL(*[LI(FUNC_39(VAR_13['name'], VAR_221, dict(id=VAR_13['name'] + VAR_221.replace('.', '__')), VAR_3), _style="overflow:hidden", _id=VAR_13['name'] + "__" + VAR_221.replace('.', '__'))
for VAR_221 in FUNC_38(VAR_3, VAR_13['name'], VAR_14=VAR_13['reg'])],
VAR_144="nav nav-list small-font"),
_id=VAR_13['name'] + '_files', _style="display: none;")))
return dict(VAR_100=result_files)
def FUNC_41():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_5 = None
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
if request.vars.filename:
VAR_5 = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
VAR_5 = os.path.split(request.vars.file.filename)[-1]
if VAR_15[-8:] == '/VAR_81/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-9:] == '/VAR_87/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-13:] == '/VAR_51/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-7:] == '/VAR_84/' and not VAR_5[-5:] == '.html':
VAR_5 += '.html'
if VAR_15[-11:] == '/VAR_91/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_5 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_5)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
VAR_2 = request.vars.file.file.read()
VAR_160 = FUNC_0(VAR_2)
FUNC_4(VAR_5, VAR_2, 'wb')
FUNC_1(VAR_3, 'UPLOAD', VAR_5, VAR_160)
session.flash = T('file "%(VAR_5)s" uploaded',
dict(VAR_5=filename[len(VAR_15):]))
except Exception:
if VAR_5:
VAR_68 = dict(VAR_5=filename[len(VAR_15):])
else:
VAR_68 = dict(VAR_5='unknown')
session.flash = T('cannot upload VAR_16 "%(VAR_5)s"', VAR_68)
redirect(request.vars.sender)
def FUNC_42():
import operator
import os
import .hashlib
VAR_3 = FUNC_5()
if is_gae:
VAR_161 = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
VAR_161 = request.args(1) or 'new'
VAR_101 = {}
db_ready['status'] = FUNC_43(VAR_3)
VAR_101['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
VAR_101['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if VAR_161 == 'new':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in listdir(VAR_162, '^[VAR_7-fA-F0-9.\-]+$'):
VAR_190 = os.path.join(VAR_162, fn)
if not os.path.isfile(VAR_190):
continue
try:
VAR_205 = FUNC_2(VAR_190, 'rb')
try:
VAR_125 = pickle.load(VAR_205)
finally:
VAR_205.close()
except IOError:
continue
except EOFError:
continue
VAR_191 = hashlib.md5(to_bytes(VAR_125['traceback'])).hexdigest()
if VAR_191 in VAR_163:
os.unlink(VAR_190)
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2] if len(VAR_218) > 1 else 'unknown'
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1, pickel=VAR_125,
causer=VAR_220,
VAR_219=last_line,
VAR_191=hash, VAR_113=fn)
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=VAR_3, VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbnew':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_192, VAR_193 = FUNC_43(VAR_3)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in VAR_192(VAR_193.id > 0).select():
try:
VAR_125 = pickle.loads(fn.ticket_data)
VAR_191 = hashlib.md5(VAR_125['traceback']).hexdigest()
if VAR_191 in VAR_163:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2]
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1,
pickel=VAR_125, causer=VAR_220,
VAR_219=last_line, VAR_191=hash,
VAR_113=fn.ticket_id)
except AttributeError as VAR_114:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=app,
VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbold':
VAR_192, VAR_193 = FUNC_43(VAR_3)
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_192(VAR_193.ticket_id == VAR_70[7:]).delete()
VAR_192.commit()
VAR_206 = VAR_192(VAR_193.id > 0).select(VAR_193.ticket_id,
VAR_193.created_datetime,
orderby=~VAR_193.created_datetime)
VAR_207 = [row.ticket_id for row in VAR_206]
VAR_208 = dict([(row.ticket_id, row.created_datetime) for
row in VAR_206])
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method,
VAR_208=times, VAR_101=db_ready)
else:
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_' and (not VAR_70 == "delete_all}"):
os.unlink(apath('%VAR_140/FUNC_42/%s' % (VAR_3, VAR_70[7:]), VAR_122=request))
VAR_209 = lambda p: os.stat(apath('%VAR_140/FUNC_42/%s' %
(VAR_3, p), VAR_122=request)).st_mtime
VAR_207 = sorted(
listdir(apath('%VAR_140/FUNC_42/' % VAR_3, VAR_122=request), '^\w.*'),
VAR_143=VAR_209,
reverse=True)
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method, VAR_101=db_ready)
def FUNC_43(VAR_3):
VAR_102 = apath('%VAR_140/private' % VAR_3, VAR_122=request)
VAR_103 = os.path.join(VAR_102, 'ticket_storage.txt')
if os.path.exists(VAR_103):
VAR_166 = FUNC_3(VAR_103)
VAR_166 = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
VAR_166 = "google:datastore"
else:
return False
VAR_104 = 'web2py_ticket'
VAR_105 = VAR_104 + '_' + VAR_3
VAR_106 = apath('%VAR_140/databases' % VAR_3, VAR_122=request)
VAR_107 = DAL(VAR_166, VAR_168=VAR_106, auto_import=True)
if not VAR_107.get(VAR_105):
VAR_167 = VAR_107.define_table(
VAR_105,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return VAR_107, VAR_107.get(VAR_105)
def FUNC_44(VAR_15):
VAR_108 = VAR_15.replace('\\', '/')
if os.path.isabs(VAR_108) and os.path.isfile(VAR_108):
(VAR_168, VAR_5) = os.path.split(VAR_108)
(VAR_11, VAR_169) = os.path.splitext(VAR_5)
VAR_3 = FUNC_5()
VAR_170 = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for VAR_143 in VAR_170.keys():
VAR_194 = VAR_168.endswith("%VAR_140/%s" % (VAR_3, VAR_143))
if VAR_169.lower() == VAR_170[VAR_143] and VAR_194:
return to_native(A('"' + VAR_108 + '"',
_href=URL(VAR_122=request,
VAR_221='edit/%VAR_140/%VAR_140/%s' % (VAR_3, VAR_143, VAR_5))).xml())
return ''
def FUNC_45(VAR_18):
VAR_109 = VAR_18.split('"')
VAR_110 = (len(VAR_109) != 0) and VAR_109[0] or ''
VAR_111 = 1
while VAR_111 < len(VAR_109):
VAR_171 = FUNC_44(VAR_109[VAR_111])
if VAR_171 == '':
VAR_110 += '"' + VAR_109[VAR_111]
else:
VAR_110 += VAR_171
if VAR_111 + 1 < len(VAR_109):
VAR_110 += VAR_109[VAR_111 + 1]
VAR_111 = VAR_111 + 1
VAR_111 = VAR_111 + 1
return VAR_110
class CLASS_0(object):
def __init__(self, VAR_112):
self.s = FUNC_45(CODE(VAR_112).xml())
def FUNC_63(self):
return self.s
def VAR_113():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
VAR_114.load(request, VAR_3, VAR_113)
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def FUNC_47():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
request.tickets_db = FUNC_43(VAR_3)[0]
VAR_114.load(request, VAR_3, VAR_113)
VAR_43.view = 'default/VAR_113.html'
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def VAR_125():
raise RuntimeError('admin VAR_113 generator at your service')
def FUNC_49():
VAR_3 = FUNC_5()
update_all_languages(apath(VAR_3, VAR_122=request))
session.flash = T('Language VAR_45 (static VAR_71) updated')
redirect(URL('design', VAR_98=VAR_3, VAR_157='languages'))
def FUNC_50():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(VAR_26=auth())
else:
return dict(VAR_26=T("Disabled"))
def FUNC_51():
gluon.rewrite.load()
redirect(URL('site'))
def FUNC_52():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
VAR_115 = SQLFORM.grid(db.auth_user)
return locals()
def FUNC_53():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('emails', 'text'))
if VAR_26.process().accepted:
VAR_172 = [x.strip() for x in VAR_26.vars.emails.split('\n') if x.strip()]
VAR_173 = 0
for email in VAR_172:
if not db.auth_user(email=email):
VAR_173 += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%VAR_140 students registered', VAR_173)
redirect(URL('site'))
return locals()
def FUNC_54():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_46 = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_196 = VAR_195.remotes.origin
VAR_196.fetch()
VAR_196.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain VAR_45 could not be checked VAR_39. Check logs for VAR_178.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for VAR_178.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_55():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
VAR_26.element('input[type=submit]')['_value'] = T('Push')
VAR_26.add_button(T('Cancel'), URL('site'))
VAR_26.process()
if VAR_26.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_197 = VAR_195.index
VAR_197.add([apath(VAR_122=request) + VAR_3 + '/*'])
VAR_198 = VAR_197.commit(VAR_26.vars.changelog)
VAR_196 = VAR_195.remotes.origin
VAR_196.push()
session.flash = T(
"Git VAR_195 updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_26=form)
def VAR_94():
VAR_3 = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
VAR_199 = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=VAR_96&search_index=false").read()
session.plugins = loads_json(VAR_199)
except:
VAR_43.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(VAR_94=session.plugins["results"], VAR_3=request.args(0))
def FUNC_57():
VAR_3 = request.args(0)
VAR_116 = request.vars.source
VAR_96 = request.vars.plugin
if not (VAR_116 and VAR_3):
raise HTTP(500, T("Invalid request"))
if not VAR_116.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
VAR_26 = SQLFORM.factory()
VAR_110 = None
if VAR_26.process().accepted:
if "web2py.plugin." in VAR_116:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % \
VAR_116.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % cleanpath(VAR_96)
if plugin_install(VAR_3, urlopen(VAR_116),
request, VAR_5):
session.flash = T('New VAR_96 VAR_212: %s', VAR_5)
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_221="plugins", VAR_98=[VAR_3, ]))
return dict(VAR_26=form, VAR_3=VAR_3, VAR_96=FUNC_36, VAR_116=source)
|
VAR_0 = True
VAR_1 = 1000
if VAR_0:
if is_mobile:
VAR_43.view = VAR_43.view.replace('default/', 'default.mobile/')
VAR_43.menu = []
import .re
from gluon.admin import *
from gluon.fileutils import .abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config, prevent_open_redirect
from gluon.compileapp import .find_exposed_functions
from glob import glob
from gluon._compat import .iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import .shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your VAR_25 of git is %VAR_140. Upgrade to 0.3.1 or better." % git.__version__)
VAR_19 = True
except ImportError as VAR_114:
VAR_19 = False
VAR_117 = 'Requires gitpython module, but not VAR_212 or incompatible VAR_25: %s' % VAR_114
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi FUNC_50 mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def FUNC_0(VAR_2):
return len([VAR_69 for VAR_69 in VAR_2.split('\n') if VAR_69.strip() and not VAR_69.startswith('#')])
def FUNC_1(VAR_3, VAR_4='EDIT', VAR_5=None, VAR_6=0):
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_21 = str(request.now)[:19]
if not os.path.exists(VAR_20):
FUNC_2(VAR_20, 'w').write('[%VAR_140] START\n' % VAR_21)
if VAR_5:
FUNC_2(VAR_20, 'a').write(
'[%VAR_140] %VAR_140 %VAR_140: %s\n' % (VAR_21, VAR_4, VAR_5, VAR_6))
def FUNC_2(VAR_7, VAR_8):
if (DEMO_MODE or is_gae) and ('w' in VAR_8 or 'a' in VAR_8):
class CLASS_2:
def FUNC_64(self, VAR_2):
pass
def FUNC_65(self):
pass
return CLASS_2()
if PY2 or 'b' in VAR_8:
return open(VAR_7, VAR_8)
else:
return open(VAR_7, VAR_8, encoding="utf8")
def FUNC_3(VAR_7, VAR_8='r'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
return VAR_22.read()
finally:
VAR_22.close()
def FUNC_4(VAR_7, VAR_9, VAR_8='w'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
VAR_22.write(VAR_9)
finally:
VAR_22.close()
def FUNC_5(VAR_10=None):
VAR_3 = VAR_10 or request.args(0)
if (VAR_3 and os.path.exists(apath(VAR_3, VAR_122=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == VAR_3)(db.app.owner == auth.user.id).count())):
return VAR_3
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def VAR_197():
VAR_23 = prevent_open_redirect(request.vars.send)
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not VAR_23:
send = URL('site')
if session.authorized:
redirect(VAR_23)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(VAR_23, list): # ## why does this happen?
VAR_23 = str(VAR_23[0])
redirect(VAR_23)
else:
VAR_210 = login_record(False)
if VAR_210 >= allowed_number_of_attempts:
VAR_43.flash = \
T('admin disabled because too many invalid login attempts')
elif VAR_210 == allowed_number_of_attempts - 1:
VAR_43.flash = \
T('You have one more login attempt before you are locked out')
else:
VAR_43.flash = T('invalid password.')
return dict(VAR_23=send)
def FUNC_7():
session.forget()
session._unlock(VAR_43)
VAR_24, VAR_25 = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if VAR_24 in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not VAR_24:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % VAR_25.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade VAR_21 to %s') % VAR_25.split('(')[0])
def FUNC_8():
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def FUNC_9():
if session.pam_user:
session.flash = T(
'PAM authenticated FUNC_50, cannot change password here')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
VAR_144="span4 well")
if VAR_26.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
VAR_26.errors.current_admin_password = T('invalid password')
elif VAR_26.vars.new_admin_password != VAR_26.vars.new_admin_password_again:
VAR_26.errors.new_admin_password_again = T('no match')
else:
VAR_15 = abspath('parameters_%VAR_140.py' % request.env.server_port)
FUNC_4(VAR_15, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(VAR_26=form)
def FUNC_10():
VAR_27 = request.env.web2py_version
VAR_28 = 'file' in request.vars or 'appurl' in request.vars
class CLASS_1(object):
def __call__(self, VAR_9):
if not re.compile('^\w+$').match(VAR_9):
return (VAR_9, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(VAR_122=request), VAR_9)):
return (VAR_9, T('Application exists already'))
return (VAR_9, None)
VAR_29 = CLASS_1()
VAR_30 = SQLFORM.factory(Field('name', requires=VAR_29),
table_name='appcreate')
VAR_31 = SQLFORM.factory(Field('name', requires=VAR_29),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
VAR_30.process()
VAR_31.process()
if DEMO_MODE:
pass
elif VAR_30.accepted:
VAR_174 = cleanpath(VAR_30.vars.name)
VAR_175, VAR_125 = app_create(VAR_174, request, info=True)
if VAR_175:
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T('new application "%s" created', VAR_174)
gluon.rewrite.load()
redirect(URL('design', VAR_98=VAR_174))
else:
session.flash = \
DIV(T('unable to create application "%s"', VAR_174),
PRE(VAR_125))
redirect(URL(VAR_122=request))
elif VAR_31.accepted:
if (VAR_31.vars.url or '').endswith('.git'):
if not VAR_19:
session.flash = VAR_117
redirect(URL(VAR_122=request))
VAR_211 = os.path.join(apath(VAR_122=request), VAR_31.vars.name)
try:
VAR_217 = git.Repo.clone_from(VAR_31.vars.url, VAR_211)
session.flash = T('new application "%s" imported',
VAR_31.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(VAR_122=request))
elif VAR_31.vars.url:
try:
VAR_221 = urlopen(VAR_31.vars.url)
if VAR_221.code == 404:
raise Exception("404 VAR_16 not found")
except Exception as VAR_114:
session.flash = \
DIV(T('Unable to download VAR_3 because:'), PRE(repr(VAR_114)))
redirect(URL(VAR_122=request))
VAR_120 = VAR_31.vars.url
elif VAR_31.accepted and VAR_31.vars.file:
VAR_120 = request.vars.file.filename
VAR_221 = request.vars.file.file
else:
session.flash = 'No VAR_16 uploaded and no URL specified'
redirect(URL(VAR_122=request))
if VAR_221:
VAR_174 = cleanpath(VAR_31.vars.name)
VAR_212 = app_install(VAR_174, VAR_221,
request, VAR_120,
overwrite=VAR_31.vars.overwrite)
if VAR_221 and VAR_212:
VAR_155 = 'application %(VAR_174)VAR_140 VAR_212 with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T(VAR_155, dict(VAR_174=appname,
digest=md5_hash(VAR_212)))
gluon.rewrite.load()
else:
VAR_155 = 'unable to install application "%(VAR_174)s"'
session.flash = T(VAR_155, dict(VAR_174=VAR_31.vars.name))
redirect(URL(VAR_122=request))
VAR_32 = re.compile('^\w+$')
if is_manager():
VAR_33 = [VAR_7 for VAR_7 in os.listdir(apath(VAR_122=request)) if VAR_32.match(VAR_7) and
VAR_7 != '__pycache__']
else:
VAR_33 = [VAR_7.name for VAR_7 in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
VAR_33 = [VAR_7 for VAR_7 in VAR_33 if VAR_7 in FILTER_APPS]
VAR_33 = sorted(VAR_33, VAR_143=lambda VAR_7: a.upper())
VAR_34 = platform.python_version()
return dict(VAR_3=None, VAR_33=apps, VAR_27=myversion, VAR_34=myplatform,
VAR_30=form_create, VAR_31=form_update)
def FUNC_11(VAR_3):
import .datetime
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_32 = re.compile('\[(.*?)\][^\:]+\:\VAR_140+(\-?\VAR_68+)')
if not os.path.exists(VAR_20):
return []
VAR_35 = VAR_32.findall(open(VAR_20, 'r').read())
VAR_36, VAR_37 = [], 0
for VAR_156 in VAR_35:
if not VAR_156:
continue
VAR_118 = -(request.now - datetime.datetime.strptime(VAR_156[0],
'%Y-%VAR_156-%VAR_68 %H:%M:%S')).days
VAR_37 += int(VAR_156[1])
VAR_36.append([VAR_118, VAR_37])
return VAR_36
def FUNC_12():
VAR_3 = FUNC_5()
try:
if len(request.args) == 1:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
else:
VAR_120 = 'web2py.app.%VAR_140.compiled.w2p' % VAR_3
VAR_5 = app_pack_compiled(VAR_3, request, raise_ex=True)
except Exception as VAR_114:
VAR_176 = VAR_114
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_176)
redirect(URL('site'))
def FUNC_13():
VAR_3 = FUNC_5()
if len(request.args) == 2:
VAR_120 = 'web2py.plugin.%VAR_140.w2p' % request.args[1]
VAR_5 = plugin_pack(VAR_3, request.args[1], request)
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', VAR_98=request.args))
def FUNC_14(VAR_3, VAR_11, VAR_12=None):
import .urllib
import .zipfile
VAR_38 = 'http://www.web2py.com/examples/static/VAR_40.zip'
VAR_39 = StringIO()
VAR_39.write(urlopen(VAR_38).read())
VAR_40 = zipfile.ZipFile(VAR_39, VAR_4='a')
VAR_41 = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % VAR_3
VAR_40.writestr('web2py/VAR_41.py', VAR_41.encode('utf-8'))
VAR_42 = os.path.dirname(VAR_11)
for VAR_5 in VAR_12:
VAR_120 = os.path.join(VAR_11, VAR_5)
VAR_121 = os.path.join('web2py/applications', VAR_3, VAR_5)
VAR_40.write(VAR_120, VAR_121)
VAR_40.close()
VAR_43.headers['Content-Type'] = 'application/zip'
VAR_43.headers['Content-Disposition'] = 'attachment; VAR_5=web2py.app.%VAR_140.zip' % VAR_3
VAR_39.seek(0)
return VAR_43.stream(VAR_39)
def FUNC_15():
VAR_3 = FUNC_5()
VAR_11 = apath(VAR_3, VAR_122=request)
def FUNC_58(VAR_44):
return [VAR_221 for VAR_221 in VAR_44 if not (
VAR_221[:1] in '#' or VAR_221.endswith('~') or VAR_221.endswith('.bak'))]
VAR_45 = {}
for (VAR_122, VAR_68, VAR_221) in os.walk(VAR_11):
VAR_45[VAR_122] = {'folders': FUNC_58(VAR_68), 'files': FUNC_58(VAR_221)}
if request.post_vars.file:
VAR_123 = set(os.path.relpath(os.path.join(VAR_122, VAR_221), VAR_11) for VAR_122 in VAR_45 for VAR_221 in VAR_45[VAR_122]['files'])
VAR_45 = request.post_vars.file
VAR_45 = [files] if not isinstance(VAR_45, list) else VAR_45
VAR_45 = [VAR_16 for VAR_16 in VAR_45 if VAR_16 in VAR_123]
if request.post_vars.doexe is None:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True, VAR_12=VAR_45)
except Exception as VAR_114:
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_114)
redirect(URL(VAR_98=request.args))
else:
return FUNC_14(VAR_3, VAR_11, VAR_45)
return locals()
def FUNC_16():
VAR_46 = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
(VAR_124, VAR_125) = upgrade(request)
if VAR_124:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', VAR_125)
redirect(URL('site'))
return dict(VAR_46=dialog)
def FUNC_17():
VAR_3 = FUNC_5()
VAR_46 = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
VAR_46['_id'] = 'confirm_form'
VAR_46['_class'] = 'well'
for VAR_126 in VAR_46.components:
VAR_126['_class'] = 'btn'
if VAR_46.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == VAR_3).delete():
pass
elif db(db.app.name == VAR_3)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
except:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
else:
if app_uninstall(VAR_3, request):
session.flash = T('application "%s" uninstalled', VAR_3)
else:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_18():
VAR_3 = FUNC_5()
VAR_47 = app_cleanup(VAR_3, request)
if not VAR_47:
session.flash = T("some VAR_45 could not be removed")
else:
session.flash = T('cache, FUNC_42 and sessions cleaned')
redirect(URL('site'))
def FUNC_19():
VAR_3 = FUNC_5()
VAR_48 = app_compile(VAR_3, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not VAR_48:
session.flash = T('application compiled')
elif isinstance(VAR_48, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following VAR_84 could not be compiled:'), BR()] +
[CAT(BR(), VAR_182) for VAR_182 in VAR_48] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are FUNC_42 in your VAR_3:'),
CODE(VAR_48))
redirect(URL('site'))
def FUNC_20():
VAR_3 = FUNC_5()
remove_compiled_application(apath(VAR_3, VAR_122=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def FUNC_21():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_49 = request.vars.sender
if isinstance(VAR_49, list): # ## fix VAR_7 problem with Vista
VAR_49 = sender[0]
VAR_46 = FORM.confirm(T('Delete'),
{T('Cancel'): URL(VAR_49, VAR_157=request.vars.id)})
if VAR_46.accepted:
try:
VAR_177 = apath(VAR_5, VAR_122=request)
VAR_160 = FUNC_0(open(VAR_177, 'r').read())
os.unlink(VAR_177)
FUNC_1(VAR_3, 'DELETE', VAR_5, VAR_6=-VAR_160)
session.flash = T('file "%(VAR_5)s" deleted',
dict(VAR_5=filename))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 "%(VAR_5)s"',
dict(VAR_5=filename))
redirect(URL(VAR_49, VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_5=filename)
def FUNC_22():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
VAR_3 = FUNC_5()
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(VAR_5):
os.unlink(VAR_5)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
FUNC_2(VAR_5, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
VAR_200 = 'disabled: True\ntime-disabled: %s' % request.now
FUNC_2(VAR_5, 'wb').write(VAR_200.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def FUNC_23():
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
try:
VAR_2 = FUNC_3(VAR_15).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
VAR_50 = VAR_5[filename.rfind('.') + 1:].lower()
return dict(VAR_3=app,
VAR_5=filename,
VAR_2=data,
VAR_50=extension)
def FUNC_24():
VAR_3 = FUNC_5()
if len(request.args) > 1:
VAR_16 = request.args[1]
else:
VAR_16 = '.*\.py'
VAR_51 = listdir(
apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), VAR_16 + '$')
return dict(VAR_3=VAR_3, VAR_51=controllers)
def FUNC_25():
return ''
def FUNC_26():
VAR_52 = request.vars.keywords or ''
VAR_3 = FUNC_5()
def FUNC_59(VAR_5, VAR_52):
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), VAR_5)
if VAR_52 in read_file(VAR_5, 'r'):
return True
return False
VAR_15 = apath(request.args[0], VAR_122=request)
VAR_53 = glob(os.path.join(VAR_15, '*/*.py'))
VAR_54 = glob(os.path.join(VAR_15, '*/*.html'))
VAR_55 = glob(os.path.join(VAR_15, '*/*/*.html'))
VAR_45 = [x[len(VAR_15) + 1:].replace(
'\\', '/') for x in VAR_53 + VAR_54 + VAR_55 if FUNC_59(x, VAR_52)]
return VAR_43.json(dict(VAR_45=files, message=T.M('Searching: **%VAR_140** %%{VAR_16}', len(VAR_45))))
def FUNC_27():
VAR_3 = FUNC_5(request.vars.app)
VAR_56 = apath(VAR_3, VAR_122=request)
VAR_57 = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
VAR_57.update(VAR_58.read())
if not(request.ajax) and not(is_mobile):
VAR_43.title = T('Editing %s') % VAR_3
return VAR_43.render('default/FUNC_27.html', dict(VAR_3=VAR_3, editor_settings=VAR_57))
if 'settings' in request.vars:
if request.post_vars: # save new VAR_57
if PY2:
VAR_201 = request.post_vars.items()
else:
VAR_201 = list(request.post_vars.items())
VAR_201 += [(opt, 'false') for opt in VAR_57 if opt not in request.post_vars]
if VAR_58.save(VAR_201):
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved on session only')
VAR_43.headers["web2py-VAR_126-command"] = "update_editor(%VAR_140);$('a[href=#editor_settings] button.close').click();" % VAR_43.json(VAR_58.read())
return
else:
VAR_178 = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
VAR_178['plain_html'] = VAR_43.render('default/editor_settings.html', {'editor_settings': VAR_57})
return VAR_43.json(VAR_178)
""" File FUNC_27 handler """
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
VAR_59 = request.args[-1]
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
if VAR_5[-3:] == '.py':
VAR_127 = 'python'
elif VAR_5[-5:] == '.html':
VAR_127 = 'html'
elif VAR_5[-5:] == '.load':
VAR_127 = 'html'
elif VAR_5[-4:] == '.css':
VAR_127 = 'css'
elif VAR_5[-3:] == '.js':
VAR_127 = 'javascript'
else:
VAR_127 = 'html'
if ('revert' in request.vars) and os.path.exists(VAR_15 + '.bak'):
try:
VAR_2 = FUNC_3(VAR_15 + '.bak')
VAR_179 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
FUNC_4(VAR_15, VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
FUNC_4(VAR_15 + '.bak', VAR_179)
VAR_43.flash = T('file "%s" of %VAR_140 restored', (VAR_5, VAR_129))
else:
try:
VAR_2 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
VAR_130 = FUNC_0(VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != VAR_128:
session.flash = T('file changed on disk')
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15 + '.1', VAR_2)
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
VAR_98=request.args)})
else:
redirect(URL('resolve', VAR_98=request.args))
elif request.vars.data:
FUNC_4(VAR_15 + '.bak', VAR_2)
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15, VAR_2)
VAR_202 = FUNC_0(VAR_2)
FUNC_1(
VAR_3, 'EDIT', VAR_5, VAR_6=VAR_202 - VAR_130)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
VAR_43.flash = T('file saved on %s', VAR_129)
VAR_60 = (request.vars.data or request.vars.revert)
VAR_61 = None
if VAR_127 == 'python' and request.vars.data:
import _ast
try:
VAR_180 = request.vars.data.rstrip().replace('\VAR_122\n', '\n') + '\n'
compile(VAR_180, VAR_15, "exec", _ast.PyCF_ONLY_AST)
except Exception as VAR_114:
VAR_203 = sum([len(VAR_69) + 1 for l, VAR_69
in enumerate(request.vars.data.split("\n"))
if l < VAR_114.lineno - 1])
if VAR_114.text and VAR_114.offset:
VAR_213 = VAR_114.offset - (len(VAR_114.text) - len(
VAR_114.text.splitlines()[-1]))
else:
VAR_213 = 0
VAR_61 = {'start': VAR_203, 'end': VAR_203 +
VAR_213 + 1, 'lineno': VAR_114.lineno, 'offset': VAR_213}
try:
VAR_214 = VAR_114.__class__.__name__
except:
VAR_214 = 'unknown exception!'
VAR_43.flash = DIV(T('failed to compile VAR_16 because:'), BR(),
B(VAR_214), ' ' + T('at VAR_69 %s', VAR_114.lineno),
VAR_213 and ' ' +
T('at char %s', VAR_213) or '',
PRE(repr(VAR_114)))
if VAR_60 and request.args[1] == 'modules':
try:
VAR_181 = '.'.join(request.args[2:])[:-3]
exec('import .applications.%VAR_140.modules.%s' % (
request.args[0], VAR_181))
reload(sys.modules['applications.%VAR_140.modules.%s'
% (request.args[0], VAR_181)])
except Exception as VAR_114:
VAR_43.flash = DIV(
T('failed to reload module because:'), PRE(repr(VAR_114)))
VAR_62 = None
VAR_63 = None
VAR_64 = None
if VAR_127 == 'html' and len(request.args) >= 3:
VAR_131 = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(VAR_131, VAR_122=request)):
VAR_62 = URL('edit', VAR_98=[VAR_131.replace(os.sep, "/")])
VAR_182 = request.args[3].replace('.html', '')
VAR_64 = URL(request.args[0], request.args[2], VAR_182)
elif VAR_127 == 'python' and request.args[1] == 'controllers':
VAR_3 = FUNC_5()
VAR_183 = os.path.splitext(request.args[2])[0]
VAR_184 = os.path.join(VAR_3, 'views', VAR_183)
VAR_185 = apath(VAR_184, VAR_122=request)
VAR_186 = []
if os.path.exists(VAR_185):
if os.path.isdir(VAR_185):
VAR_186 = glob(os.path.join(VAR_185, '*.html'))
elif os.path.exists(VAR_185 + '.html'):
VAR_186.append(VAR_185 + '.html')
if len(VAR_186):
VAR_63 = []
for v in sorted(VAR_186):
VAR_215 = os.path.split(v)[-1]
VAR_216 = "/".join([VAR_184.replace(os.sep, "/"), VAR_215])
VAR_63.append(A(VAR_215.split(".")[0],
VAR_144="editor_filelink",
_href=URL('edit', VAR_98=[VAR_216])))
if len(request.args) > 2 and request.args[1] == 'controllers':
VAR_132 = (request.args[2])[:-3]
try:
VAR_83 = find_exposed_functions(VAR_2)
VAR_83 = functions and sorted(VAR_83) or []
except SyntaxError as err:
VAR_83 = ['SyntaxError:Line:%d' % err.lineno]
else:
(VAR_132, VAR_83) = (None, None)
if 'from_ajax' in request.vars:
return VAR_43.json({'file_hash': VAR_128, 'saved_on': VAR_129, 'functions': VAR_83, 'controller': VAR_132, 'application': request.args[0], 'highlight': VAR_61})
else:
VAR_133 = dict(VAR_3=request.args[0],
VAR_160=request.vars.lineno or 1,
editor_settings=VAR_57,
VAR_5=filename,
VAR_59=realfilename,
VAR_127=filetype,
VAR_2=data,
VAR_62=edit_controller,
VAR_128=file_hash,
VAR_129=saved_on,
VAR_132=controller,
VAR_83=functions,
VAR_64=view_link,
VAR_63=editviewlinks,
id=IS_SLUG()(VAR_5)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
VAR_134 = VAR_43.render('default/edit_js.html', VAR_133)
file_details['plain_html'] = VAR_134
if is_mobile:
return VAR_43.render('default.mobile/FUNC_27.html',
VAR_133, editor_settings=VAR_57)
else:
return VAR_43.json(VAR_133)
def FUNC_28():
VAR_3 = request.vars.app or ''
VAR_56 = apath('%(VAR_3)s' % {'app': VAR_3}, VAR_122=request)
VAR_65 = ['models', 'controllers', 'modules', 'private']
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace(os.path.sep, '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
VAR_66 = '#\VAR_140*(todo)+\VAR_140+(.*)'
VAR_32 = re.compile(VAR_66, re.IGNORECASE)
VAR_67 = []
for VAR_68 in VAR_65:
for VAR_221 in FUNC_38(VAR_3, VAR_68):
VAR_35 = []
VAR_5 = apath(os.path.join(VAR_3, VAR_68, VAR_221), VAR_122=request)
with FUNC_2(VAR_5, 'r') as f_s:
VAR_204 = f_s.read()
for VAR_156 in VAR_32.finditer(VAR_204):
VAR_203 = VAR_156.start()
VAR_160 = VAR_204.count('\n', 0, VAR_203) + 1
VAR_35.append({'text': VAR_156.group(0), 'lineno': VAR_160})
if len(VAR_35) != 0:
VAR_67.append({'filename': VAR_221, 'matches': VAR_35, 'dir': VAR_68})
return {'todo': VAR_67, 'app': VAR_3}
def FUNC_29():
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
VAR_57 = VAR_58.read()
if request.vars.session_name and request.vars.files:
VAR_135 = request.vars.session_name
VAR_45 = request.vars.files
VAR_57.update({VAR_135: ','.join(VAR_45)})
if VAR_58.save(VAR_57.items()):
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved on session only')
return VAR_43.render('default/FUNC_29.html', {'editor_sessions': VAR_57})
def FUNC_30():
VAR_5 = '/'.join(request.args)
VAR_15 = apath(VAR_5, VAR_122=request)
VAR_7 = FUNC_3(VAR_15).split('\n')
try:
VAR_8 = FUNC_3(VAR_15 + '.1').split('\n')
except IOError:
session.flash = 'Other VAR_16, no longer there'
redirect(URL('edit', VAR_98=request.args))
VAR_68 = difflib.ndiff(VAR_7, VAR_8)
def FUNC_60(VAR_69):
VAR_136 = ''
for (VAR_145, VAR_48) in enumerate(VAR_69):
if VAR_48 == ' ':
VAR_136 += ' '
elif VAR_48 == ' \t':
VAR_136 += ' '
elif VAR_145 == 0 and VAR_48 == '?':
pass
else:
break
return XML(VAR_136)
def FUNC_61(VAR_70):
VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}
return VAR_137[VAR_70[0]]
if request.vars:
VAR_48 = '\n'.join([VAR_70[2:].rstrip() for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0]
== ' ' or 'line%i' % VAR_111 in request.vars])
FUNC_4(VAR_15, VAR_48)
session.flash = 'files merged'
redirect(URL('edit', VAR_98=request.args))
else:
VAR_138 = lambda VAR_197, VAR_70: not VAR_70[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % VAR_197,
VAR_9=VAR_70[0] == '+')
VAR_139 = TABLE(*[TR(TD(VAR_138(VAR_111, VAR_70)),
TD(VAR_70[0]),
TD(FUNC_60(VAR_70[2:]),
TT(VAR_70[2:].rstrip())),
VAR_144=FUNC_61(VAR_70))
for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0] != '?'])
return dict(VAR_139=diff, VAR_5=filename)
def FUNC_31():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_43.title = request.args[-1]
VAR_71 = read_dict(apath(VAR_5, VAR_122=request))
if '__corrupted__' in VAR_71:
VAR_26 = SPAN(VAR_71['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_71.keys(), VAR_143=lambda x: to_native(x).lower())
VAR_73 = []
rows.append(H2(T('Original/Translation')))
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_140 = VAR_71[VAR_143]
(VAR_141, VAR_142, VAR_143) = key.partition('\x01')
if VAR_142:
VAR_141 = SPAN(VAR_141 + ': ', VAR_144='tm_ftag')
VAR_145 = VAR_143
else:
(VAR_145, VAR_141) = (VAR_141, '')
VAR_144 = 'untranslated' if VAR_145 == VAR_140 else 'translated'
if len(VAR_140) <= 40:
VAR_187 = INPUT(_type='text', _name=VAR_10, VAR_9=VAR_140,
_size=70, VAR_144=_class)
else:
VAR_187 = TEXTAREA(_name=VAR_10, VAR_9=VAR_140, _cols=70,
_rows=5, VAR_144=_class)
VAR_145 = (VAR_140 != VAR_145) and VAR_145 or B(VAR_145)
VAR_146 = DIV(LABEL(VAR_141, VAR_145, _style="font-weight:normal;"),
CAT(VAR_187, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % VAR_10,
VAR_144='btn')), _id=VAR_10, VAR_144='span6 well well-small')
VAR_73.append(DIV(VAR_146, VAR_144="row-fluid"))
VAR_73.append(DIV(INPUT(_type='submit', _value=T('update'), VAR_144="btn btn-primary"), VAR_144='controls'))
VAR_26 = FORM(*VAR_73)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_147 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10] == chr(127):
continue
VAR_147[VAR_143] = VAR_26.vars[VAR_10]
write_dict(apath(VAR_5, VAR_122=request), VAR_147)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def FUNC_32():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_74 = read_plural_dict(
apath(VAR_5, VAR_122=request)) # plural VAR_148 dictionary
VAR_75 = int(request.vars.nplurals) - 1 # plural VAR_148 quantity
VAR_76 = xrange(VAR_75)
if '__corrupted__' in VAR_74:
VAR_26 = SPAN(VAR_74['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_74.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
VAR_77 = []
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_148 = VAR_74[VAR_143]
if len(VAR_148) < VAR_75:
VAR_148.extend(None for VAR_111 in xrange(VAR_75 - len(VAR_148)))
VAR_149 = DIV(CAT(LABEL(T("Singular Form")), B(VAR_143,
VAR_144='fake-input')))
VAR_150 = [SPAN(LABEL(T("Plural Form #%s", VAR_173 + 1)), INPUT(_type='text', _name=VAR_10 + '_' + str(VAR_173), VAR_9=VAR_148[VAR_173], _size=20), VAR_144='span6') for VAR_173 in VAR_76]
VAR_151 = DIV(CAT(*VAR_150))
VAR_152 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % VAR_10, VAR_144='btn'), VAR_144='span6'))
VAR_153 = DIV(DIV(VAR_149, '\n', VAR_151, '\n', VAR_152, VAR_144='well well-small'), _id=VAR_10, VAR_144='row-fluid tab_row')
VAR_77.append(VAR_153)
VAR_77.append(DIV(TAG['button'](T('update'), _type='submit',
VAR_144='btn btn-primary'),
VAR_144='controls'))
VAR_78 = DIV(*VAR_77, **dict(VAR_144="row-fluid"))
VAR_26 = FORM(VAR_78)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_154 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10 + '_0'] == chr(127):
continue
VAR_154[VAR_143] = [VAR_26.vars[VAR_10 + '_' + str(VAR_173)]
for VAR_173 in VAR_76]
write_plural_dict(apath(VAR_5, VAR_122=request), VAR_154)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args, VAR_17=dict(
VAR_75=request.vars.nplurals)))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def VAR_79():
VAR_3 = FUNC_5()
VAR_79 = FUNC_3(apath('%VAR_140/ABOUT' % VAR_3, VAR_122=request))
VAR_80 = FUNC_3(apath('%VAR_140/LICENSE' % VAR_3, VAR_122=request))
return dict(VAR_3=VAR_3, VAR_79=MARKMIN(VAR_79), VAR_80=MARKMIN(VAR_80), VAR_6=FUNC_11(VAR_3))
def FUNC_34():
VAR_3 = FUNC_5()
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
VAR_5 = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(VAR_3, request.vars.pluginfile.file,
request, VAR_5):
session.flash = T('new VAR_96 installed')
redirect(URL('design', VAR_98=VAR_3))
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_122=request, VAR_98=VAR_3))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(VAR_122=request, VAR_98=VAR_3))
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+(\.\w+)+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84 if not x.endswith('.bak')]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_90 = os.path.join(apath(VAR_3, VAR_122=request), 'languages')
VAR_91 = dict([(VAR_188, info) for VAR_188, info
in iteritems(read_possible_languages(VAR_90))
if info[2] != 0]) # info[2] is langfile_mtime:
VAR_92 = apath('%VAR_140/cron' % VAR_3, VAR_122=request)
VAR_93 = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not is_gae:
if not os.path.exists(VAR_92):
os.mkdir(VAR_92)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
VAR_94 = []
def FUNC_62(VAR_95, VAR_94):
FUNC_56 += [VAR_70[7:].split('/')[0].split(
'.')[0] for VAR_70 in VAR_95 if VAR_70.startswith('plugin_')]
VAR_94[:] = list(set(VAR_94))
FUNC_56.sort()
return [VAR_70 for VAR_70 in VAR_95 if not VAR_70.startswith('plugin_')]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81, VAR_94),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51, VAR_94),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84, VAR_94),
VAR_87=FUNC_62(VAR_87, VAR_94),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88, VAR_94),
VAR_89=FUNC_62(VAR_89, VAR_94),
VAR_91=languages,
VAR_93=crontab,
VAR_94=FUNC_56)
def FUNC_35():
VAR_3 = request.args(0)
VAR_96 = request.args(1)
VAR_97 = 'plugin_' + VAR_96
VAR_46 = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', VAR_98=VAR_3)})
if VAR_46.accepted:
try:
for VAR_168 in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), VAR_168)
for VAR_70 in os.listdir(VAR_15):
if VAR_70.rsplit('.', 1)[0] == VAR_97:
VAR_5 = os.path.join(VAR_15, VAR_70)
if os.path.isdir(VAR_5):
shutil.rmtree(VAR_5)
else:
os.unlink(VAR_5)
session.flash = T('plugin "%(VAR_96)s" deleted',
dict(VAR_96=FUNC_36))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 VAR_96 "%(VAR_96)s"',
dict(VAR_96=FUNC_36))
redirect(URL('design', VAR_98=request.args(0), VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_96=FUNC_36)
def VAR_96():
VAR_3 = FUNC_5()
VAR_96 = request.args(1)
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+\.\w+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_91 = sorted([VAR_188 + '.py' for VAR_188, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
crontab = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
def FUNC_62(VAR_95):
VAR_32 = re.compile('^plugin_' + VAR_96 + '(/.*|\..*)?$')
return [VAR_70 for VAR_70 in VAR_95 if VAR_70 and VAR_32.match(VAR_70)]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84),
VAR_87=FUNC_62(VAR_87),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88),
VAR_89=FUNC_62(VAR_89),
VAR_91=languages,
VAR_93=crontab)
def FUNC_37():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_157 = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
VAR_3 = FUNC_5(request.vars.app)
VAR_15 = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
VAR_5 = re.sub('[^\w./-]+', '_', request.vars.filename)
if VAR_15[-7:] == '/rules/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_188 = re.match('^plural_rules-(.*)\.py$', VAR_5).group(1)
VAR_189 = read_possible_languages(apath(VAR_3, VAR_122=request))[VAR_188]
VAR_112 = dedent("""
VAR_75=2 # for example, English language has 2 VAR_148:
get_plural_id = lambda VAR_173: int(VAR_173 != 1)
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(VAR_188=VAR_189[0], langname=VAR_189[1])
elif VAR_15[-11:] == '/VAR_91/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), 'languages', VAR_5)
if not os.path.exists(VAR_15):
FUNC_4(VAR_15, '')
findT(apath(VAR_3, VAR_122=request), VAR_5[:-3])
session.flash = T('language VAR_16 "%(VAR_5)s" VAR_175/updated',
dict(VAR_5=filename))
redirect(request.vars.sender + VAR_157)
elif VAR_15[-8:] == '/VAR_81/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\n'
elif VAR_15[-13:] == '/VAR_51/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\VAR_173# %VAR_140\ndef VAR_197(): return dict(message="hello from %s")'
VAR_112 = text % (T('try something like'), VAR_5)
elif VAR_15[-7:] == '/VAR_84/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if VAR_5.find('.') < 0:
VAR_5 += '.html'
VAR_50 = VAR_5.split('.')[-1].lower()
if len(VAR_5) == 5:
raise SyntaxError
VAR_155 = T(
'This is the %(VAR_5)VAR_140 template', dict(VAR_5=filename))
if VAR_50 == 'html':
VAR_112 = dedent("""
{{VAR_85 'layout.html'}}
<h1>%VAR_140</h1>
{{=BEAUTIFY(VAR_43._vars)}}""" % VAR_155)[1:]
else:
VAR_222 = os.path.join(VAR_15, 'generic.' + VAR_50)
if os.path.exists(VAR_222):
VAR_112 = read_file(VAR_222)
else:
VAR_112 = ''
elif VAR_15[-9:] == '/VAR_87/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = dedent("""
from gluon import *\n""")[1:]
elif (VAR_15[-8:] == '/static/') or (VAR_15[-9:] == '/private/'):
if (request.vars.plugin and
not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin)):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
VAR_112 = ''
else:
redirect(request.vars.sender + VAR_157)
VAR_158 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_158)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
if os.path.exists(VAR_158):
raise SyntaxError
FUNC_4(VAR_158, VAR_112)
FUNC_1(VAR_3, 'CREATE', VAR_5)
if request.vars.dir:
VAR_110 = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
else:
session.flash = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
VAR_17 = {}
if request.vars.id:
VAR_17['id'] = request.vars.id
if request.vars.app:
VAR_17['app'] = request.vars.app
redirect(URL('edit',
VAR_98=[os.path.join(request.vars.location, VAR_5)], VAR_17=vars))
except Exception as VAR_114:
if not isinstance(VAR_114, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
VAR_43.flash = VAR_110
VAR_43.headers['web2py-VAR_126-content'] = 'append'
VAR_43.headers['web2py-VAR_126-command'] = "%VAR_140 %VAR_140 %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', VAR_98=[VAR_3, request.vars.dir, VAR_5]),
"$.web2py.enableElement($('#VAR_26 form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + VAR_157)
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(
listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace('\\', '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
def FUNC_39(VAR_15, VAR_16, VAR_17={}, VAR_3=None):
VAR_98 = (VAR_15, VAR_16) if 'app' in VAR_17 else (VAR_3, VAR_15, VAR_16)
VAR_99 = URL('edit', VAR_98=args, VAR_17=vars)
return A(VAR_16, VAR_144='editor_filelink', _href=VAR_99, _style='word-wrap: nowrap;')
def FUNC_40():
VAR_3 = request.vars.app or 'welcome'
VAR_65 = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
VAR_100 = []
for VAR_13 in VAR_65:
VAR_100.append(TAG[''](LI(VAR_13['name'], VAR_144="nav-header component", _onclick="collapse('" + VAR_13['name'] + "_files');"),
LI(UL(*[LI(FUNC_39(VAR_13['name'], VAR_221, dict(id=VAR_13['name'] + VAR_221.replace('.', '__')), VAR_3), _style="overflow:hidden", _id=VAR_13['name'] + "__" + VAR_221.replace('.', '__'))
for VAR_221 in FUNC_38(VAR_3, VAR_13['name'], VAR_14=VAR_13['reg'])],
VAR_144="nav nav-list small-font"),
_id=VAR_13['name'] + '_files', _style="display: none;")))
return dict(VAR_100=result_files)
def FUNC_41():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_5 = None
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
if request.vars.filename:
VAR_5 = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
VAR_5 = os.path.split(request.vars.file.filename)[-1]
if VAR_15[-8:] == '/VAR_81/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-9:] == '/VAR_87/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-13:] == '/VAR_51/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-7:] == '/VAR_84/' and not VAR_5[-5:] == '.html':
VAR_5 += '.html'
if VAR_15[-11:] == '/VAR_91/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_5 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_5)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
VAR_2 = request.vars.file.file.read()
VAR_160 = FUNC_0(VAR_2)
FUNC_4(VAR_5, VAR_2, 'wb')
FUNC_1(VAR_3, 'UPLOAD', VAR_5, VAR_160)
session.flash = T('file "%(VAR_5)s" uploaded',
dict(VAR_5=filename[len(VAR_15):]))
except Exception:
if VAR_5:
VAR_68 = dict(VAR_5=filename[len(VAR_15):])
else:
VAR_68 = dict(VAR_5='unknown')
session.flash = T('cannot upload VAR_16 "%(VAR_5)s"', VAR_68)
redirect(request.vars.sender)
def FUNC_42():
import operator
import os
import .hashlib
VAR_3 = FUNC_5()
if is_gae:
VAR_161 = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
VAR_161 = request.args(1) or 'new'
VAR_101 = {}
db_ready['status'] = FUNC_43(VAR_3)
VAR_101['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
VAR_101['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if VAR_161 == 'new':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in listdir(VAR_162, '^[VAR_7-fA-F0-9.\-]+$'):
VAR_190 = os.path.join(VAR_162, fn)
if not os.path.isfile(VAR_190):
continue
try:
VAR_205 = FUNC_2(VAR_190, 'rb')
try:
VAR_125 = pickle.load(VAR_205)
finally:
VAR_205.close()
except IOError:
continue
except EOFError:
continue
VAR_191 = hashlib.md5(to_bytes(VAR_125['traceback'])).hexdigest()
if VAR_191 in VAR_163:
os.unlink(VAR_190)
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2] if len(VAR_218) > 1 else 'unknown'
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1, pickel=VAR_125,
causer=VAR_220,
VAR_219=last_line,
VAR_191=hash, VAR_113=fn)
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=VAR_3, VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbnew':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_192, VAR_193 = FUNC_43(VAR_3)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in VAR_192(VAR_193.id > 0).select():
try:
VAR_125 = pickle.loads(fn.ticket_data)
VAR_191 = hashlib.md5(VAR_125['traceback']).hexdigest()
if VAR_191 in VAR_163:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2]
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1,
pickel=VAR_125, causer=VAR_220,
VAR_219=last_line, VAR_191=hash,
VAR_113=fn.ticket_id)
except AttributeError as VAR_114:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=app,
VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbold':
VAR_192, VAR_193 = FUNC_43(VAR_3)
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_192(VAR_193.ticket_id == VAR_70[7:]).delete()
VAR_192.commit()
VAR_206 = VAR_192(VAR_193.id > 0).select(VAR_193.ticket_id,
VAR_193.created_datetime,
orderby=~VAR_193.created_datetime)
VAR_207 = [row.ticket_id for row in VAR_206]
VAR_208 = dict([(row.ticket_id, row.created_datetime) for
row in VAR_206])
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method,
VAR_208=times, VAR_101=db_ready)
else:
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_' and (not VAR_70 == "delete_all}"):
os.unlink(apath('%VAR_140/FUNC_42/%s' % (VAR_3, VAR_70[7:]), VAR_122=request))
VAR_209 = lambda p: os.stat(apath('%VAR_140/FUNC_42/%s' %
(VAR_3, p), VAR_122=request)).st_mtime
VAR_207 = sorted(
listdir(apath('%VAR_140/FUNC_42/' % VAR_3, VAR_122=request), '^\w.*'),
VAR_143=VAR_209,
reverse=True)
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method, VAR_101=db_ready)
def FUNC_43(VAR_3):
VAR_102 = apath('%VAR_140/private' % VAR_3, VAR_122=request)
VAR_103 = os.path.join(VAR_102, 'ticket_storage.txt')
if os.path.exists(VAR_103):
VAR_166 = FUNC_3(VAR_103)
VAR_166 = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
VAR_166 = "google:datastore"
else:
return False
VAR_104 = 'web2py_ticket'
VAR_105 = VAR_104 + '_' + VAR_3
VAR_106 = apath('%VAR_140/databases' % VAR_3, VAR_122=request)
VAR_107 = DAL(VAR_166, VAR_168=VAR_106, auto_import=True)
if not VAR_107.get(VAR_105):
VAR_167 = VAR_107.define_table(
VAR_105,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return VAR_107, VAR_107.get(VAR_105)
def FUNC_44(VAR_15):
VAR_108 = VAR_15.replace('\\', '/')
if os.path.isabs(VAR_108) and os.path.isfile(VAR_108):
(VAR_168, VAR_5) = os.path.split(VAR_108)
(VAR_11, VAR_169) = os.path.splitext(VAR_5)
VAR_3 = FUNC_5()
VAR_170 = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for VAR_143 in VAR_170.keys():
VAR_194 = VAR_168.endswith("%VAR_140/%s" % (VAR_3, VAR_143))
if VAR_169.lower() == VAR_170[VAR_143] and VAR_194:
return to_native(A('"' + VAR_108 + '"',
_href=URL(VAR_122=request,
VAR_221='edit/%VAR_140/%VAR_140/%s' % (VAR_3, VAR_143, VAR_5))).xml())
return ''
def FUNC_45(VAR_18):
VAR_109 = VAR_18.split('"')
VAR_110 = (len(VAR_109) != 0) and VAR_109[0] or ''
VAR_111 = 1
while VAR_111 < len(VAR_109):
VAR_171 = FUNC_44(VAR_109[VAR_111])
if VAR_171 == '':
VAR_110 += '"' + VAR_109[VAR_111]
else:
VAR_110 += VAR_171
if VAR_111 + 1 < len(VAR_109):
VAR_110 += VAR_109[VAR_111 + 1]
VAR_111 = VAR_111 + 1
VAR_111 = VAR_111 + 1
return VAR_110
class CLASS_0(object):
def __init__(self, VAR_112):
self.s = FUNC_45(CODE(VAR_112).xml())
def FUNC_63(self):
return self.s
def VAR_113():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
VAR_114.load(request, VAR_3, VAR_113)
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def FUNC_47():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
request.tickets_db = FUNC_43(VAR_3)[0]
VAR_114.load(request, VAR_3, VAR_113)
VAR_43.view = 'default/VAR_113.html'
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def VAR_125():
raise RuntimeError('admin VAR_113 generator at your service')
def FUNC_49():
VAR_3 = FUNC_5()
update_all_languages(apath(VAR_3, VAR_122=request))
session.flash = T('Language VAR_45 (static VAR_71) updated')
redirect(URL('design', VAR_98=VAR_3, VAR_157='languages'))
def FUNC_50():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(VAR_26=auth())
else:
return dict(VAR_26=T("Disabled"))
def FUNC_51():
gluon.rewrite.load()
redirect(URL('site'))
def FUNC_52():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
VAR_115 = SQLFORM.grid(db.auth_user)
return locals()
def FUNC_53():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('emails', 'text'))
if VAR_26.process().accepted:
VAR_172 = [x.strip() for x in VAR_26.vars.emails.split('\n') if x.strip()]
VAR_173 = 0
for email in VAR_172:
if not db.auth_user(email=email):
VAR_173 += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%VAR_140 students registered', VAR_173)
redirect(URL('site'))
return locals()
def FUNC_54():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_46 = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_196 = VAR_195.remotes.origin
VAR_196.fetch()
VAR_196.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain VAR_45 could not be checked VAR_39. Check logs for VAR_178.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for VAR_178.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_55():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
VAR_26.element('input[type=submit]')['_value'] = T('Push')
VAR_26.add_button(T('Cancel'), URL('site'))
VAR_26.process()
if VAR_26.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_197 = VAR_195.index
VAR_197.add([apath(VAR_122=request) + VAR_3 + '/*'])
VAR_198 = VAR_197.commit(VAR_26.vars.changelog)
VAR_196 = VAR_195.remotes.origin
VAR_196.push()
session.flash = T(
"Git VAR_195 updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_26=form)
def VAR_94():
VAR_3 = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
VAR_199 = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=VAR_96&search_index=false").read()
session.plugins = loads_json(VAR_199)
except:
VAR_43.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(VAR_94=session.plugins["results"], VAR_3=request.args(0))
def FUNC_57():
VAR_3 = request.args(0)
VAR_116 = request.vars.source
VAR_96 = request.vars.plugin
if not (VAR_116 and VAR_3):
raise HTTP(500, T("Invalid request"))
if not VAR_116.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
VAR_26 = SQLFORM.factory()
VAR_110 = None
if VAR_26.process().accepted:
if "web2py.plugin." in VAR_116:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % \
VAR_116.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % cleanpath(VAR_96)
if plugin_install(VAR_3, urlopen(VAR_116),
request, VAR_5):
session.flash = T('New VAR_96 VAR_212: %s', VAR_5)
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_221="plugins", VAR_98=[VAR_3, ]))
return dict(VAR_26=form, VAR_3=VAR_3, VAR_96=FUNC_36, VAR_116=source)
| [
1,
2,
5,
10,
22,
31,
34,
35,
43,
48,
52,
56,
57,
60,
61,
64,
65,
74,
75,
79,
82,
90,
91,
98,
99,
106,
107,
116,
117,
120,
136,
141,
145,
158,
159,
162,
165,
168,
177,
178,
185,
186,
188,
210,
211,
214,
216,
217,
219,
221,
229,
240,
243,
245,
260,
275,
277,
287,
291,
295,
313,
315,
321,
324,
329,
330,
347,
348,
351,
362,
371,
372,
386,
387,
391,
396,
399,
410,
411,
415,
422,
428,
445,
447,
448,
460,
461,
464,
471,
492,
493,
501,
503,
504,
520,
521,
528,
529,
535,
538,
541,
555,
572,
573,
587,
589,
594,
595,
603,
606,
608,
609,
612,
613,
617,
630,
631,
634,
641,
643,
646,
647,
654,
666,
668,
676,
689,
690,
701,
716,
720,
741,
743,
744,
752,
773,
783,
795,
796,
815,
825,
853,
854,
861,
866,
869,
883,
885,
886,
891,
900,
902,
903,
907,
909,
917,
919,
922,
923,
934,
936,
940,
942,
950,
955,
962,
964,
965,
972,
976,
980,
990,
992,
999,
1000,
1002,
1008,
1023,
1024,
1033,
1035,
1038,
1045,
1055,
1060,
1075,
1076,
1080,
1084,
1085,
1089,
1093,
1096,
1110,
1111,
1112,
1117,
1118,
1126,
1127,
1139,
1140,
1149,
1152,
1155,
1156,
1160,
1161,
1165,
1166,
1171,
1172,
1177,
1178,
1179,
1187,
1189,
1196,
1211,
1212,
1218,
1222,
1241,
1242,
1247,
1251,
1252,
1253,
1258,
1259,
1267,
1268,
1280,
1281,
1292,
1295,
1296,
1300,
1301,
1305,
1306,
1311,
1312,
1316,
1317,
1318,
1322,
1326,
1340,
1341,
1358,
1366,
1367,
1368,
1369,
1371,
1372,
1373,
1374,
1375,
1377,
1378,
1379,
1380,
1381,
1384,
1386,
1394,
1399,
1401,
1404,
1407,
1409,
1411,
1414,
1417,
1420,
1424,
1428,
1431,
1445,
1449,
1452,
1455,
1457,
1458,
1460,
1466,
1469,
1472,
1475,
1478,
1494,
1498,
1509,
1510,
1516,
1517,
1522,
1523,
1540,
1541,
1550,
1555,
1558,
1561,
1564,
1567,
1570,
1573,
1576,
1589,
1591,
1592,
1598,
1610,
1613,
1618,
1620,
1635,
1637,
1651,
1654,
1656,
1660,
1665,
1667,
1672,
1690,
1695,
1710,
1713,
1714,
1723,
1725,
1726,
1734,
1750,
1751,
1755,
1760,
1769,
1770,
1773,
1775,
1776,
1778,
1780,
1783,
1788,
1792,
1794,
1796,
1797,
1800,
1803,
1805,
1808,
1810,
1811,
1814,
1818,
1824,
1833,
1834,
1837,
1841,
1857,
1858,
1862,
1863,
1866,
1871,
1872,
1880,
1881,
1886,
1887,
1895,
1896,
1911,
1912,
1913,
1914,
1915,
1916,
1917,
1934,
1951,
1952,
1978,
1979,
1993,
1994,
2001,
2007,
2021,
119,
161,
180,
213,
523,
531,
575,
597,
633,
856,
857,
905,
906,
967,
1026,
1078,
1087,
1214,
1244,
1343,
1543,
1594,
1753,
1772,
1799,
1813,
1836,
1860,
1865,
1883,
1919,
1954,
921,
938,
1802,
1807
] | [
1,
2,
5,
10,
22,
31,
34,
35,
43,
48,
52,
56,
57,
60,
61,
64,
65,
74,
75,
79,
82,
90,
91,
98,
99,
106,
107,
116,
117,
120,
136,
141,
145,
158,
159,
162,
165,
168,
177,
178,
185,
186,
188,
210,
211,
214,
216,
217,
219,
221,
229,
240,
243,
245,
260,
275,
277,
287,
291,
295,
313,
315,
321,
324,
329,
330,
347,
348,
351,
362,
371,
372,
386,
387,
391,
396,
399,
410,
411,
415,
422,
428,
445,
447,
448,
460,
461,
464,
471,
492,
493,
501,
503,
504,
520,
521,
528,
529,
535,
538,
541,
555,
572,
573,
587,
589,
594,
595,
603,
606,
608,
609,
612,
613,
617,
630,
631,
634,
641,
643,
646,
647,
654,
666,
668,
676,
689,
690,
701,
716,
720,
741,
743,
744,
752,
773,
783,
795,
796,
815,
825,
853,
854,
861,
866,
869,
883,
885,
886,
891,
900,
902,
903,
907,
909,
917,
919,
922,
923,
934,
936,
940,
942,
950,
955,
962,
964,
965,
972,
976,
980,
990,
992,
999,
1000,
1002,
1008,
1023,
1024,
1033,
1035,
1038,
1045,
1055,
1060,
1075,
1076,
1080,
1084,
1085,
1089,
1093,
1096,
1110,
1111,
1112,
1117,
1118,
1126,
1127,
1139,
1140,
1149,
1152,
1155,
1156,
1160,
1161,
1165,
1166,
1171,
1172,
1177,
1178,
1179,
1187,
1189,
1196,
1211,
1212,
1218,
1222,
1241,
1242,
1247,
1251,
1252,
1253,
1258,
1259,
1267,
1268,
1280,
1281,
1292,
1295,
1296,
1300,
1301,
1305,
1306,
1311,
1312,
1316,
1317,
1318,
1322,
1326,
1340,
1341,
1358,
1366,
1367,
1368,
1369,
1371,
1372,
1373,
1374,
1375,
1377,
1378,
1379,
1380,
1381,
1384,
1386,
1394,
1399,
1401,
1404,
1407,
1409,
1411,
1414,
1417,
1420,
1424,
1428,
1431,
1445,
1449,
1452,
1455,
1457,
1458,
1460,
1466,
1469,
1472,
1475,
1478,
1494,
1498,
1509,
1510,
1516,
1517,
1522,
1523,
1540,
1541,
1550,
1555,
1558,
1561,
1564,
1567,
1570,
1573,
1576,
1589,
1591,
1592,
1598,
1610,
1613,
1618,
1620,
1635,
1637,
1651,
1654,
1656,
1660,
1665,
1667,
1672,
1690,
1695,
1710,
1713,
1714,
1723,
1725,
1726,
1734,
1750,
1751,
1755,
1760,
1769,
1770,
1773,
1775,
1776,
1778,
1780,
1783,
1788,
1792,
1794,
1796,
1797,
1800,
1803,
1805,
1808,
1810,
1811,
1814,
1818,
1824,
1833,
1834,
1837,
1841,
1857,
1858,
1862,
1863,
1866,
1871,
1872,
1880,
1881,
1886,
1887,
1895,
1896,
1911,
1912,
1913,
1914,
1915,
1916,
1917,
1934,
1951,
1952,
1978,
1979,
1993,
1994,
2001,
2007,
2021,
119,
161,
180,
213,
523,
531,
575,
597,
633,
856,
857,
905,
906,
967,
1026,
1078,
1087,
1214,
1244,
1343,
1543,
1594,
1753,
1772,
1799,
1813,
1836,
1860,
1865,
1883,
1919,
1954,
921,
938,
1802,
1807
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.http.response import HttpResponseRedirect
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def handle_command(request, command):
"""
Internal dispatch function.
:param request: A request
:type request: django.http.HttpRequest
:param command: Command string
:type command: str
:return: A response
:rtype: django.http.HttpResponse
"""
path = request.POST.get("path") or request.META.get("HTTP_REFERER") or "/"
if command == "edit_on" or command == "edit_off":
set_edit_mode(request, command.endswith("_on"))
return HttpResponseRedirect(path)
def command_dispatch(request):
"""
Xtheme command dispatch view.
:param request: A request
:type request: django.http.HttpRequest
:return: A response
:rtype: django.http.HttpResponse
"""
command = request.POST.get("command")
if command:
response = handle_command(request, command)
if response:
return response
raise Problem("Error! Unknown command: `%r`" % command)
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.http.response import HttpResponseRedirect
from django.utils.html import escape
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def handle_command(request, command):
"""
Internal dispatch function.
:param request: A request
:type request: django.http.HttpRequest
:param command: Command string
:type command: str
:return: A response
:rtype: django.http.HttpResponse
"""
path = request.POST.get("path") or request.META.get("HTTP_REFERER") or "/"
if command == "edit_on" or command == "edit_off":
set_edit_mode(request, command.endswith("_on"))
return HttpResponseRedirect(path)
def command_dispatch(request):
"""
Xtheme command dispatch view.
:param request: A request
:type request: django.http.HttpRequest
:return: A response
:rtype: django.http.HttpResponse
"""
command = request.POST.get("command")
if command:
response = handle_command(request, command)
if response:
return response
raise Problem("Error! Unknown command: `%r`" % escape(command))
| xss | {
"code": [
" raise Problem(\"Error! Unknown command: `%r`\" % command)"
],
"line_no": [
45
]
} | {
"code": [
" raise Problem(\"Error! Unknown command: `%r`\" % escape(command))"
],
"line_no": [
46
]
} |
from django.http.response import HttpResponseRedirect
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def FUNC_0(VAR_0, VAR_1):
VAR_2 = VAR_0.POST.get("path") or VAR_0.META.get("HTTP_REFERER") or "/"
if VAR_1 == "edit_on" or VAR_1 == "edit_off":
set_edit_mode(VAR_0, VAR_1.endswith("_on"))
return HttpResponseRedirect(VAR_2)
def FUNC_1(VAR_0):
VAR_1 = VAR_0.POST.get("command")
if VAR_1:
VAR_3 = FUNC_0(VAR_0, VAR_1)
if VAR_3:
return VAR_3
raise Problem("Error! Unknown VAR_1: `%r`" % VAR_1)
|
from django.http.response import HttpResponseRedirect
from django.utils.html import escape
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def FUNC_0(VAR_0, VAR_1):
VAR_2 = VAR_0.POST.get("path") or VAR_0.META.get("HTTP_REFERER") or "/"
if VAR_1 == "edit_on" or VAR_1 == "edit_off":
set_edit_mode(VAR_0, VAR_1.endswith("_on"))
return HttpResponseRedirect(VAR_2)
def FUNC_1(VAR_0):
VAR_1 = VAR_0.POST.get("command")
if VAR_1:
VAR_3 = FUNC_0(VAR_0, VAR_1)
if VAR_3:
return VAR_3
raise Problem("Error! Unknown VAR_1: `%r`" % escape(VAR_1))
| [
1,
2,
3,
4,
5,
6,
7,
9,
12,
13,
17,
29,
30,
34,
46,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
32,
33,
34,
35,
36,
37,
38,
39
] | [
1,
2,
3,
4,
5,
6,
7,
10,
13,
14,
18,
30,
31,
35,
47,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
33,
34,
35,
36,
37,
38,
39,
40
] |
0CWE-22
| # -*- coding: utf-8 -*-
'''
A few checks to make sure the environment is sane
'''
from __future__ import absolute_import
# Original Author: Jeff Schroeder <jeffschroeder@computer.org>
# Import python libs
import os
import re
import sys
import stat
import errno
import socket
import logging
# Import third party libs
try:
import win32file
except ImportError:
import resource
# Import salt libs
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
log = logging.getLogger(__name__)
def zmq_version():
'''
ZeroMQ python bindings >= 2.1.9 are required
'''
try:
import zmq
except Exception:
# Return True for local mode
return True
ver = zmq.__version__
# The last matched group can be None if the version
# is something like 3.1 and that will work properly
match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', ver)
# Fallthrough and hope for the best
if not match:
msg = "Using untested zmq python bindings version: '{0}'".format(ver)
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING {0}\n".format(msg))
return True
major, minor, point = match.groups()
if major.isdigit():
major = int(major)
if minor.isdigit():
minor = int(minor)
# point very well could be None
if point and point.isdigit():
point = int(point)
if major == 2 and minor == 1:
# zmq 2.1dev could be built against a newer libzmq
if "dev" in ver and not point:
msg = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING: {0}\n".format(msg))
return True
elif point and point >= 9:
return True
elif major > 2 or (major == 2 and minor > 1):
return True
# If all else fails, gracefully croak and warn the user
log.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
msg = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write('CRITICAL {0}\n'.format(msg))
return False
def lookup_family(hostname):
'''
Lookup a hostname and determine its address family. The first address returned
will be AF_INET6 if the system is IPv6-enabled, and AF_INET otherwise.
'''
# If lookups fail, fall back to AF_INET sockets (and v4 addresses).
fallback = socket.AF_INET
try:
hostnames = socket.getaddrinfo(
hostname or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not hostnames:
return fallback
h = hostnames[0]
return h[0]
except socket.gaierror:
return fallback
def verify_socket(interface, pub_port, ret_port):
'''
Attempt to bind to the sockets to verify that they are available
'''
addr_family = lookup_family(interface)
for port in pub_port, ret_port:
sock = socket.socket(addr_family, socket.SOCK_STREAM)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((interface, int(port)))
except Exception as exc:
msg = 'Unable to bind socket {0}:{1}'.format(interface, port)
if exc.args:
msg = '{0}, error: {1}'.format(msg, str(exc))
else:
msg = '{0}, this might not be a problem.'.format(msg)
msg += '; Is there another salt-master running?'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write('WARNING: {0}\n'.format(msg))
return False
finally:
sock.close()
return True
def verify_files(files, user):
'''
Verify that the named files exist and are owned by the named user
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in files:
dirname = os.path.dirname(fn_)
try:
if dirname:
try:
os.makedirs(dirname)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if not os.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as err:
if os.path.isfile(dirname):
msg = 'Failed to create path {0}, is {1} a file?'.format(fn_, dirname)
raise SaltSystemExit(msg=msg)
if err.errno != errno.EACCES:
raise
msg = 'No permissions to access "{0}", are you running as the correct user?'.format(fn_)
raise SaltSystemExit(msg=msg)
except OSError as err:
msg = 'Failed to create path "{0}" - {1}'.format(fn_, err)
raise SaltSystemExit(msg=msg)
stats = os.stat(fn_)
if uid != stats.st_uid:
try:
os.chown(fn_, uid, -1)
except OSError:
pass
return True
def verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.is_windows():
return win_verify_env(dirs, permissive, pki_dir, skip_extra)
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.get_gid_list(user, include_default=False)
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
mode = os.stat(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
fmode = os.stat(dir_)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
# Allow the directory to be owned by any group root
# belongs to if we say it's ok to be permissive
pass
else:
# chown the file for the new user
os.chown(dir_, uid, gid)
for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]:
fsubdir = os.path.join(dir_, subdir)
if '{0}jobs'.format(os.path.sep) in fsubdir:
continue
for root, dirs, files in os.walk(fsubdir):
for name in files:
if name.startswith('.'):
continue
path = os.path.join(root, name)
try:
fmode = os.stat(path)
except (IOError, OSError):
pass
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
for name in dirs:
path = os.path.join(root, name)
fmode = os.stat(path)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
# Allow the pki dir to be 700 or 750, but nothing else.
# This prevents other users from writing out keys, while
# allowing the use-case of 3rd-party software (like django)
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readable
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
os.chmod(dir_, 448)
else:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
def check_user(user):
'''
Check user and assign process uid/gid.
'''
if salt.utils.is_windows():
return True
if user == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
pwuser = pwd.getpwnam(user)
try:
if hasattr(os, 'initgroups'):
os.initgroups(user, pwuser.pw_gid) # pylint: disable=minimum-python-version
else:
os.setgroups(salt.utils.get_gid_list(user, include_default=False))
os.setgid(pwuser.pw_gid)
os.setuid(pwuser.pw_uid)
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
if 'HOME' in os.environ:
os.environ['HOME'] = pwuser.pw_dir
if 'SHELL' in os.environ:
os.environ['SHELL'] = pwuser.pw_shell
for envvar in ('USER', 'LOGNAME'):
if envvar in os.environ:
os.environ[envvar] = pwuser.pw_name
except OSError:
msg = 'Salt configured to run as user "{0}" but unable to switch.'
msg = msg.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
except KeyError:
msg = 'User not found: "{0}"'.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
return True
def list_path_traversal(path):
'''
Returns a full list of directories leading up to, and including, a path.
So list_path_traversal('/path/to/salt') would return:
['/', '/path', '/path/to', '/path/to/salt']
in that order.
This routine has been tested on Windows systems as well.
list_path_traversal('c:\\path\\to\\salt') on Windows would return:
['c:\\', 'c:\\path', 'c:\\path\\to', 'c:\\path\\to\\salt']
'''
out = [path]
(head, tail) = os.path.split(path)
if tail == '':
# paths with trailing separators will return an empty string
out = [head]
(head, tail) = os.path.split(head)
while head != out[0]:
# loop until head is the same two consecutive times
out.insert(0, head)
(head, tail) = os.path.split(head)
return out
def check_path_traversal(path, user='root', skip_perm_errors=False):
'''
Walk from the root up to a directory and verify that the current
user has access to read each directory. This is used for making
sure a user can read all parent directories of the minion's key
before trying to go and generate a new key and raising an IOError
'''
for tpath in list_path_traversal(path):
if not os.access(tpath, os.R_OK):
msg = 'Could not access {0}.'.format(tpath)
if not os.path.exists(tpath):
msg += ' Path does not exist.'
else:
current_user = salt.utils.get_user()
# Make the error message more intelligent based on how
# the user invokes salt-call or whatever other script.
if user != current_user:
msg += ' Try running as user {0}.'.format(user)
else:
msg += ' Please give {0} read permissions.'.format(user)
# We don't need to bail on config file permission errors
# if the CLI
# process is run with the -a flag
if skip_perm_errors:
return
# Propagate this exception up so there isn't a sys.exit()
# in the middle of code that could be imported elsewhere.
raise SaltClientError(msg)
def check_max_open_files(opts):
'''
Check the number of max allowed open files and adjust if needed
'''
mof_c = opts.get('max_open_files', 100000)
if sys.platform.startswith('win'):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
accepted_keys_dir = os.path.join(opts.get('pki_dir'), 'minions')
accepted_count = len(os.listdir(accepted_keys_dir))
log.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
accepted_count
)
)
level = logging.INFO
if (accepted_count * 4) <= mof_s:
# We check for the soft value of max open files here because that's the
# value the user chose to raise to.
#
# The number of accepted keys multiplied by four(4) is lower than the
# soft value, everything should be OK
return
msg = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open files soft setting({1}). '.format(
accepted_count, mof_s
)
)
if accepted_count >= mof_s:
# This should never occur, it might have already crashed
msg += 'salt-master will crash pretty soon! '
level = logging.CRITICAL
elif (accepted_count * 2) >= mof_s:
# This is way too low, CRITICAL
level = logging.CRITICAL
elif (accepted_count * 3) >= mof_s:
level = logging.WARNING
# The accepted count is more than 3 time, WARN
elif (accepted_count * 4) >= mof_s:
level = logging.INFO
if mof_c < mof_h:
msg += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(mof_h - mof_c)
msg += 'Please consider raising this value.'
log.log(level=level, msg=msg)
def clean_path(root, path, subdir=False):
'''
Accepts the root the path needs to be under and verifies that the path is
under said root. Pass in subdir=True if the path can result in a
subdirectory of the root instead of having to reside directly in the root
'''
if not os.path.isabs(root):
return ''
if not os.path.isabs(path):
path = os.path.join(root, path)
path = os.path.normpath(path)
if subdir:
if path.startswith(root):
return path
else:
if os.path.dirname(path) == os.path.normpath(root):
return path
return ''
def clean_id(id_):
'''
Returns if the passed id is clean.
'''
if re.search(r'\.\.\{sep}'.format(sep=os.sep), id_):
return False
return True
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
return bool(clean_path(opts['pki_dir'], id_)) and clean_id(id_)
except (AttributeError, KeyError, TypeError) as e:
return False
def safe_py_code(code):
'''
Check a string to see if it has any potentially unsafe routines which
could be executed via python, this routine is used to improve the
safety of modules suct as virtualenv
'''
bads = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in bads:
if code.count(bad):
return False
return True
def verify_log(opts):
'''
If an insecre logging configuration is found, show a warning
'''
level = LOG_LEVELS.get(str(opts.get('log_level')).lower(), logging.NOTSET)
if level < logging.INFO:
log.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def win_verify_env(dirs, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
import salt.utils.win_functions
import salt.utils.win_dacl
# Get the root path directory where salt is installed
path = dirs[0]
while os.path.basename(path) not in ['salt', 'salt-tests-tmpdir']:
path, base = os.path.split(path)
# Create the root path directory if missing
if not os.path.isdir(path):
os.makedirs(path)
# Set permissions to the root path directory
current_user = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(current_user):
try:
# Make the Administrators group owner
# Use the SID to be locale agnostic
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
except CommandExecutionError:
msg = 'Unable to securely set the owner of "{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if not permissive:
try:
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(path, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of ' \
'"{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
# Create the directories
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
os.makedirs(dir_)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
# The PKI dir gets its own permissions
if dir_ == pki_dir:
try:
# Make Administrators group the owner
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
# Give Admins, System and Owner permissions
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(dir_, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
| # -*- coding: utf-8 -*-
'''
A few checks to make sure the environment is sane
'''
from __future__ import absolute_import
# Original Author: Jeff Schroeder <jeffschroeder@computer.org>
# Import python libs
import os
import re
import sys
import stat
import errno
import socket
import logging
# Import third party libs
try:
import win32file
except ImportError:
import resource
# Import salt libs
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
log = logging.getLogger(__name__)
def zmq_version():
'''
ZeroMQ python bindings >= 2.1.9 are required
'''
try:
import zmq
except Exception:
# Return True for local mode
return True
ver = zmq.__version__
# The last matched group can be None if the version
# is something like 3.1 and that will work properly
match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', ver)
# Fallthrough and hope for the best
if not match:
msg = "Using untested zmq python bindings version: '{0}'".format(ver)
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING {0}\n".format(msg))
return True
major, minor, point = match.groups()
if major.isdigit():
major = int(major)
if minor.isdigit():
minor = int(minor)
# point very well could be None
if point and point.isdigit():
point = int(point)
if major == 2 and minor == 1:
# zmq 2.1dev could be built against a newer libzmq
if "dev" in ver and not point:
msg = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING: {0}\n".format(msg))
return True
elif point and point >= 9:
return True
elif major > 2 or (major == 2 and minor > 1):
return True
# If all else fails, gracefully croak and warn the user
log.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
msg = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write('CRITICAL {0}\n'.format(msg))
return False
def lookup_family(hostname):
'''
Lookup a hostname and determine its address family. The first address returned
will be AF_INET6 if the system is IPv6-enabled, and AF_INET otherwise.
'''
# If lookups fail, fall back to AF_INET sockets (and v4 addresses).
fallback = socket.AF_INET
try:
hostnames = socket.getaddrinfo(
hostname or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not hostnames:
return fallback
h = hostnames[0]
return h[0]
except socket.gaierror:
return fallback
def verify_socket(interface, pub_port, ret_port):
'''
Attempt to bind to the sockets to verify that they are available
'''
addr_family = lookup_family(interface)
for port in pub_port, ret_port:
sock = socket.socket(addr_family, socket.SOCK_STREAM)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((interface, int(port)))
except Exception as exc:
msg = 'Unable to bind socket {0}:{1}'.format(interface, port)
if exc.args:
msg = '{0}, error: {1}'.format(msg, str(exc))
else:
msg = '{0}, this might not be a problem.'.format(msg)
msg += '; Is there another salt-master running?'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write('WARNING: {0}\n'.format(msg))
return False
finally:
sock.close()
return True
def verify_files(files, user):
'''
Verify that the named files exist and are owned by the named user
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in files:
dirname = os.path.dirname(fn_)
try:
if dirname:
try:
os.makedirs(dirname)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if not os.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as err:
if os.path.isfile(dirname):
msg = 'Failed to create path {0}, is {1} a file?'.format(fn_, dirname)
raise SaltSystemExit(msg=msg)
if err.errno != errno.EACCES:
raise
msg = 'No permissions to access "{0}", are you running as the correct user?'.format(fn_)
raise SaltSystemExit(msg=msg)
except OSError as err:
msg = 'Failed to create path "{0}" - {1}'.format(fn_, err)
raise SaltSystemExit(msg=msg)
stats = os.stat(fn_)
if uid != stats.st_uid:
try:
os.chown(fn_, uid, -1)
except OSError:
pass
return True
def verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.is_windows():
return win_verify_env(dirs, permissive, pki_dir, skip_extra)
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.get_gid_list(user, include_default=False)
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
mode = os.stat(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
fmode = os.stat(dir_)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
# Allow the directory to be owned by any group root
# belongs to if we say it's ok to be permissive
pass
else:
# chown the file for the new user
os.chown(dir_, uid, gid)
for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]:
fsubdir = os.path.join(dir_, subdir)
if '{0}jobs'.format(os.path.sep) in fsubdir:
continue
for root, dirs, files in os.walk(fsubdir):
for name in files:
if name.startswith('.'):
continue
path = os.path.join(root, name)
try:
fmode = os.stat(path)
except (IOError, OSError):
pass
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
for name in dirs:
path = os.path.join(root, name)
fmode = os.stat(path)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
# Allow the pki dir to be 700 or 750, but nothing else.
# This prevents other users from writing out keys, while
# allowing the use-case of 3rd-party software (like django)
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readable
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
os.chmod(dir_, 448)
else:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
def check_user(user):
'''
Check user and assign process uid/gid.
'''
if salt.utils.is_windows():
return True
if user == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
pwuser = pwd.getpwnam(user)
try:
if hasattr(os, 'initgroups'):
os.initgroups(user, pwuser.pw_gid) # pylint: disable=minimum-python-version
else:
os.setgroups(salt.utils.get_gid_list(user, include_default=False))
os.setgid(pwuser.pw_gid)
os.setuid(pwuser.pw_uid)
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
if 'HOME' in os.environ:
os.environ['HOME'] = pwuser.pw_dir
if 'SHELL' in os.environ:
os.environ['SHELL'] = pwuser.pw_shell
for envvar in ('USER', 'LOGNAME'):
if envvar in os.environ:
os.environ[envvar] = pwuser.pw_name
except OSError:
msg = 'Salt configured to run as user "{0}" but unable to switch.'
msg = msg.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
except KeyError:
msg = 'User not found: "{0}"'.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
return True
def list_path_traversal(path):
'''
Returns a full list of directories leading up to, and including, a path.
So list_path_traversal('/path/to/salt') would return:
['/', '/path', '/path/to', '/path/to/salt']
in that order.
This routine has been tested on Windows systems as well.
list_path_traversal('c:\\path\\to\\salt') on Windows would return:
['c:\\', 'c:\\path', 'c:\\path\\to', 'c:\\path\\to\\salt']
'''
out = [path]
(head, tail) = os.path.split(path)
if tail == '':
# paths with trailing separators will return an empty string
out = [head]
(head, tail) = os.path.split(head)
while head != out[0]:
# loop until head is the same two consecutive times
out.insert(0, head)
(head, tail) = os.path.split(head)
return out
def check_path_traversal(path, user='root', skip_perm_errors=False):
'''
Walk from the root up to a directory and verify that the current
user has access to read each directory. This is used for making
sure a user can read all parent directories of the minion's key
before trying to go and generate a new key and raising an IOError
'''
for tpath in list_path_traversal(path):
if not os.access(tpath, os.R_OK):
msg = 'Could not access {0}.'.format(tpath)
if not os.path.exists(tpath):
msg += ' Path does not exist.'
else:
current_user = salt.utils.get_user()
# Make the error message more intelligent based on how
# the user invokes salt-call or whatever other script.
if user != current_user:
msg += ' Try running as user {0}.'.format(user)
else:
msg += ' Please give {0} read permissions.'.format(user)
# We don't need to bail on config file permission errors
# if the CLI
# process is run with the -a flag
if skip_perm_errors:
return
# Propagate this exception up so there isn't a sys.exit()
# in the middle of code that could be imported elsewhere.
raise SaltClientError(msg)
def check_max_open_files(opts):
'''
Check the number of max allowed open files and adjust if needed
'''
mof_c = opts.get('max_open_files', 100000)
if sys.platform.startswith('win'):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
accepted_keys_dir = os.path.join(opts.get('pki_dir'), 'minions')
accepted_count = len(os.listdir(accepted_keys_dir))
log.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
accepted_count
)
)
level = logging.INFO
if (accepted_count * 4) <= mof_s:
# We check for the soft value of max open files here because that's the
# value the user chose to raise to.
#
# The number of accepted keys multiplied by four(4) is lower than the
# soft value, everything should be OK
return
msg = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open files soft setting({1}). '.format(
accepted_count, mof_s
)
)
if accepted_count >= mof_s:
# This should never occur, it might have already crashed
msg += 'salt-master will crash pretty soon! '
level = logging.CRITICAL
elif (accepted_count * 2) >= mof_s:
# This is way too low, CRITICAL
level = logging.CRITICAL
elif (accepted_count * 3) >= mof_s:
level = logging.WARNING
# The accepted count is more than 3 time, WARN
elif (accepted_count * 4) >= mof_s:
level = logging.INFO
if mof_c < mof_h:
msg += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(mof_h - mof_c)
msg += 'Please consider raising this value.'
log.log(level=level, msg=msg)
def clean_path(root, path, subdir=False):
'''
Accepts the root the path needs to be under and verifies that the path is
under said root. Pass in subdir=True if the path can result in a
subdirectory of the root instead of having to reside directly in the root
'''
if not os.path.isabs(root):
return ''
if not os.path.isabs(path):
path = os.path.join(root, path)
path = os.path.normpath(path)
if subdir:
if path.startswith(root):
return path
else:
if os.path.dirname(path) == os.path.normpath(root):
return path
return ''
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
if any(x in id_ for x in ('/', '\\', '\0')):
return False
return bool(clean_path(opts['pki_dir'], id_))
except (AttributeError, KeyError, TypeError):
return False
def safe_py_code(code):
'''
Check a string to see if it has any potentially unsafe routines which
could be executed via python, this routine is used to improve the
safety of modules suct as virtualenv
'''
bads = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in bads:
if code.count(bad):
return False
return True
def verify_log(opts):
'''
If an insecre logging configuration is found, show a warning
'''
level = LOG_LEVELS.get(str(opts.get('log_level')).lower(), logging.NOTSET)
if level < logging.INFO:
log.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def win_verify_env(dirs, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
import salt.utils.win_functions
import salt.utils.win_dacl
# Get the root path directory where salt is installed
path = dirs[0]
while os.path.basename(path) not in ['salt', 'salt-tests-tmpdir']:
path, base = os.path.split(path)
# Create the root path directory if missing
if not os.path.isdir(path):
os.makedirs(path)
# Set permissions to the root path directory
current_user = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(current_user):
try:
# Make the Administrators group owner
# Use the SID to be locale agnostic
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
except CommandExecutionError:
msg = 'Unable to securely set the owner of "{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if not permissive:
try:
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(path, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of ' \
'"{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
# Create the directories
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
os.makedirs(dir_)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
# The PKI dir gets its own permissions
if dir_ == pki_dir:
try:
# Make Administrators group the owner
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
# Give Admins, System and Owner permissions
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(dir_, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
| path_disclosure | {
"code": [
"def clean_id(id_):",
" Returns if the passed id is clean.",
" if re.search(r'\\.\\.\\{sep}'.format(sep=os.sep), id_):",
" return False",
" return True",
" return bool(clean_path(opts['pki_dir'], id_)) and clean_id(id_)",
" except (AttributeError, KeyError, TypeError) as e:"
],
"line_no": [
483,
485,
487,
488,
489,
497,
498
]
} | {
"code": [
" if any(x in id_ for x in ('/', '\\\\', '\\0')):",
" return False",
" except (AttributeError, KeyError, TypeError):"
],
"line_no": [
488,
489,
491
]
} |
from __future__ import absolute_import
import .os
import re
import sys
import stat
import .errno
import .socket
import .logging
try:
import win32file
except ImportError:
import resource
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
VAR_0 = logging.getLogger(__name__)
def FUNC_0():
try:
import zmq
except Exception:
return True
VAR_18 = zmq.__version__
VAR_19 = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', VAR_18)
if not VAR_19:
VAR_32 = "Using untested zmq python bindings version: '{0}'".format(VAR_18)
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING {0}\n".format(VAR_32))
return True
VAR_20, VAR_21, VAR_22 = VAR_19.groups()
if VAR_20.isdigit():
VAR_20 = int(VAR_20)
if VAR_21.isdigit():
VAR_21 = int(VAR_21)
if VAR_22 and VAR_22.isdigit():
VAR_22 = int(VAR_22)
if VAR_20 == 2 and VAR_21 == 1:
if "dev" in VAR_18 and not VAR_22:
VAR_32 = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING: {0}\n".format(VAR_32))
return True
elif VAR_22 and VAR_22 >= 9:
return True
elif VAR_20 > 2 or (VAR_20 == 2 and VAR_21 > 1):
return True
VAR_0.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
VAR_32 = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write('CRITICAL {0}\n'.format(VAR_32))
return False
def FUNC_1(VAR_1):
VAR_23 = socket.AF_INET
try:
VAR_35 = socket.getaddrinfo(
VAR_1 or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not VAR_35:
return VAR_23
VAR_36 = VAR_35[0]
return VAR_36[0]
except socket.gaierror:
return VAR_23
def FUNC_2(VAR_2, VAR_3, VAR_4):
VAR_24 = FUNC_1(VAR_2)
for port in VAR_3, VAR_4:
VAR_37 = socket.socket(VAR_24, socket.SOCK_STREAM)
try:
VAR_37.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
VAR_37.bind((VAR_2, int(port)))
except Exception as exc:
VAR_32 = 'Unable to bind socket {0}:{1}'.format(VAR_2, port)
if exc.args:
VAR_32 = '{0}, error: {1}'.format(VAR_32, str(exc))
else:
VAR_32 = '{0}, this might not be a problem.'.format(VAR_32)
msg += '; Is there another salt-master running?'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write('WARNING: {0}\n'.format(VAR_32))
return False
finally:
VAR_37.close()
return True
def FUNC_3(VAR_5, VAR_6):
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in VAR_5:
VAR_40 = VAR_54.path.dirname(fn_)
try:
if VAR_40:
try:
VAR_54.makedirs(VAR_40)
except OSError as VAR_49:
if VAR_49.errno != errno.EEXIST:
raise
if not VAR_54.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as VAR_49:
if VAR_54.path.isfile(VAR_40):
VAR_32 = 'Failed to create VAR_11 {0}, is {1} a file?'.format(fn_, VAR_40)
raise SaltSystemExit(VAR_32=msg)
if VAR_49.errno != errno.EACCES:
raise
VAR_32 = 'No permissions to access "{0}", are you running as the correct VAR_6?'.format(fn_)
raise SaltSystemExit(VAR_32=msg)
except OSError as VAR_49:
VAR_32 = 'Failed to create VAR_11 "{0}" - {1}'.format(fn_, VAR_49)
raise SaltSystemExit(VAR_32=msg)
VAR_41 = VAR_54.stat(fn_)
if VAR_39 != VAR_41.st_uid:
try:
VAR_54.chown(fn_, VAR_39, -1)
except OSError:
pass
return True
def FUNC_4(VAR_7, VAR_6, VAR_8=False, VAR_9='', VAR_10=False):
if salt.utils.is_windows():
return FUNC_14(VAR_7, VAR_8, VAR_9, VAR_10)
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
VAR_42 = VAR_38[3]
VAR_43 = salt.utils.get_gid_list(VAR_6, include_default=False)
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_52 = VAR_54.umask(18) # 077
VAR_54.makedirs(dir_)
if VAR_54.getuid() == 0:
VAR_54.chown(dir_, VAR_39, VAR_42)
VAR_54.umask(VAR_52)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
VAR_44 = VAR_54.stat(dir_)
if VAR_54.getuid() == 0:
VAR_50 = VAR_54.stat(dir_)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(dir_, VAR_39, VAR_42)
for VAR_15 in [a for a in VAR_54.listdir(dir_) if 'jobs' not in a]:
VAR_53 = VAR_54.path.join(dir_, VAR_15)
if '{0}jobs'.format(VAR_54.path.sep) in VAR_53:
continue
for VAR_14, VAR_7, VAR_5 in VAR_54.walk(VAR_53):
for name in VAR_5:
if name.startswith('.'):
continue
VAR_11 = VAR_54.path.join(VAR_14, name)
try:
VAR_50 = VAR_54.stat(VAR_11)
except (IOError, OSError):
pass
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
for name in VAR_7:
VAR_11 = VAR_54.path.join(VAR_14, name)
VAR_50 = VAR_54.stat(VAR_11)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
if dir_ == VAR_9:
VAR_51 = stat.S_IMODE(VAR_44.st_mode)
if VAR_51 != 448 and VAR_51 != 488:
if VAR_54.access(dir_, VAR_54.W_OK):
VAR_54.chmod(dir_, 448)
else:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
def FUNC_5(VAR_6):
if salt.utils.is_windows():
return True
if VAR_6 == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
VAR_45 = pwd.getpwnam(VAR_6)
try:
if hasattr(VAR_54, 'initgroups'):
VAR_54.initgroups(VAR_6, VAR_45.pw_gid) # pylint: disable=minimum-python-version
else:
VAR_54.setgroups(salt.utils.get_gid_list(VAR_6, include_default=False))
VAR_54.setgid(VAR_45.pw_gid)
VAR_54.setuid(VAR_45.pw_uid)
if 'HOME' in VAR_54.environ:
VAR_54.environ['HOME'] = VAR_45.pw_dir
if 'SHELL' in VAR_54.environ:
VAR_54.environ['SHELL'] = VAR_45.pw_shell
for VAR_56 in ('USER', 'LOGNAME'):
if VAR_56 in VAR_54.environ:
VAR_54.environ[VAR_56] = VAR_45.pw_name
except OSError:
VAR_32 = 'Salt configured to run as VAR_6 "{0}" but unable to switch.'
VAR_32 = msg.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
except KeyError:
VAR_32 = 'User not found: "{0}"'.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
return True
def FUNC_6(VAR_11):
VAR_25 = [VAR_11]
(VAR_26, VAR_27) = VAR_54.path.split(VAR_11)
if VAR_27 == '':
VAR_25 = [VAR_26]
(head, VAR_27) = VAR_54.path.split(VAR_26)
while VAR_26 != VAR_25[0]:
VAR_25.insert(0, VAR_26)
(head, VAR_27) = VAR_54.path.split(VAR_26)
return VAR_25
def FUNC_7(VAR_11, VAR_6='root', VAR_12=False):
for tpath in FUNC_6(VAR_11):
if not VAR_54.access(tpath, VAR_54.R_OK):
VAR_32 = 'Could not access {0}.'.format(tpath)
if not VAR_54.path.exists(tpath):
VAR_32 += ' Path does not exist.'
else:
VAR_34 = salt.utils.get_user()
if VAR_6 != VAR_34:
VAR_32 += ' Try running as VAR_6 {0}.'.format(VAR_6)
else:
VAR_32 += ' Please give {0} read permissions.'.format(VAR_6)
if VAR_12:
return
raise SaltClientError(VAR_32)
def FUNC_8(VAR_13):
VAR_28 = VAR_13.get('max_open_files', 100000)
if sys.platform.startswith('win'):
VAR_46 = VAR_47 = win32file._getmaxstdio()
else:
VAR_46, VAR_47 = resource.getrlimit(resource.RLIMIT_NOFILE)
VAR_29 = VAR_54.path.join(VAR_13.get('pki_dir'), 'minions')
VAR_30 = len(VAR_54.listdir(VAR_29))
VAR_0.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
VAR_30
)
)
VAR_31 = logging.INFO
if (VAR_30 * 4) <= VAR_46:
return
VAR_32 = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open VAR_5 soft setting({1}). '.format(
VAR_30, VAR_46
)
)
if VAR_30 >= VAR_46:
VAR_32 += 'salt-master will crash pretty soon! '
VAR_31 = logging.CRITICAL
elif (VAR_30 * 2) >= VAR_46:
VAR_31 = logging.CRITICAL
elif (VAR_30 * 3) >= VAR_46:
VAR_31 = logging.WARNING
elif (VAR_30 * 4) >= VAR_46:
VAR_31 = logging.INFO
if VAR_28 < VAR_47:
VAR_32 += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(VAR_47 - VAR_28)
VAR_32 += 'Please consider raising this value.'
VAR_0.log(VAR_31=level, VAR_32=msg)
def FUNC_9(VAR_14, VAR_11, VAR_15=False):
if not VAR_54.path.isabs(VAR_14):
return ''
if not VAR_54.path.isabs(VAR_11):
VAR_11 = VAR_54.path.join(VAR_14, VAR_11)
VAR_11 = VAR_54.path.normpath(VAR_11)
if VAR_15:
if VAR_11.startswith(VAR_14):
return VAR_11
else:
if VAR_54.path.dirname(VAR_11) == VAR_54.path.normpath(VAR_14):
return VAR_11
return ''
def FUNC_10(VAR_16):
if re.search(r'\.\.\{sep}'.format(sep=VAR_54.sep), VAR_16):
return False
return True
def FUNC_11(VAR_13, VAR_16):
try:
return bool(FUNC_9(VAR_13['pki_dir'], VAR_16)) and FUNC_10(VAR_16)
except (AttributeError, KeyError, TypeError) as e:
return False
def FUNC_12(VAR_17):
VAR_33 = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in VAR_33:
if VAR_17.count(bad):
return False
return True
def FUNC_13(VAR_13):
VAR_31 = LOG_LEVELS.get(str(VAR_13.get('log_level')).lower(), logging.NOTSET)
if VAR_31 < logging.INFO:
VAR_0.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def FUNC_14(VAR_7, VAR_8=False, VAR_9='', VAR_10=False):
import salt.utils.win_functions
import salt.utils.win_dacl
VAR_11 = VAR_7[0]
while VAR_54.path.basename(VAR_11) not in ['salt', 'salt-tests-tmpdir']:
VAR_11, VAR_48 = VAR_54.path.split(VAR_11)
if not VAR_54.path.isdir(VAR_11):
VAR_54.makedirs(VAR_11)
VAR_34 = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(VAR_34):
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
except CommandExecutionError:
VAR_32 = 'Unable to securely set the owner of "{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if not VAR_8:
try:
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(VAR_11, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of ' \
'"{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_54.makedirs(dir_)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
if dir_ == VAR_9:
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(dir_, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
|
from __future__ import absolute_import
import .os
import re
import sys
import stat
import .errno
import .socket
import .logging
try:
import win32file
except ImportError:
import resource
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
VAR_0 = logging.getLogger(__name__)
def FUNC_0():
try:
import zmq
except Exception:
return True
VAR_18 = zmq.__version__
VAR_19 = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', VAR_18)
if not VAR_19:
VAR_32 = "Using untested zmq python bindings version: '{0}'".format(VAR_18)
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING {0}\n".format(VAR_32))
return True
VAR_20, VAR_21, VAR_22 = VAR_19.groups()
if VAR_20.isdigit():
VAR_20 = int(VAR_20)
if VAR_21.isdigit():
VAR_21 = int(VAR_21)
if VAR_22 and VAR_22.isdigit():
VAR_22 = int(VAR_22)
if VAR_20 == 2 and VAR_21 == 1:
if "dev" in VAR_18 and not VAR_22:
VAR_32 = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING: {0}\n".format(VAR_32))
return True
elif VAR_22 and VAR_22 >= 9:
return True
elif VAR_20 > 2 or (VAR_20 == 2 and VAR_21 > 1):
return True
VAR_0.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
VAR_32 = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write('CRITICAL {0}\n'.format(VAR_32))
return False
def FUNC_1(VAR_1):
VAR_23 = socket.AF_INET
try:
VAR_35 = socket.getaddrinfo(
VAR_1 or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not VAR_35:
return VAR_23
VAR_36 = VAR_35[0]
return VAR_36[0]
except socket.gaierror:
return VAR_23
def FUNC_2(VAR_2, VAR_3, VAR_4):
VAR_24 = FUNC_1(VAR_2)
for port in VAR_3, VAR_4:
VAR_37 = socket.socket(VAR_24, socket.SOCK_STREAM)
try:
VAR_37.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
VAR_37.bind((VAR_2, int(port)))
except Exception as exc:
VAR_32 = 'Unable to bind socket {0}:{1}'.format(VAR_2, port)
if exc.args:
VAR_32 = '{0}, error: {1}'.format(VAR_32, str(exc))
else:
VAR_32 = '{0}, this might not be a problem.'.format(VAR_32)
msg += '; Is there another salt-master running?'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write('WARNING: {0}\n'.format(VAR_32))
return False
finally:
VAR_37.close()
return True
def FUNC_3(VAR_5, VAR_6):
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in VAR_5:
VAR_40 = VAR_54.path.dirname(fn_)
try:
if VAR_40:
try:
VAR_54.makedirs(VAR_40)
except OSError as VAR_49:
if VAR_49.errno != errno.EEXIST:
raise
if not VAR_54.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as VAR_49:
if VAR_54.path.isfile(VAR_40):
VAR_32 = 'Failed to create VAR_11 {0}, is {1} a file?'.format(fn_, VAR_40)
raise SaltSystemExit(VAR_32=msg)
if VAR_49.errno != errno.EACCES:
raise
VAR_32 = 'No permissions to access "{0}", are you running as the correct VAR_6?'.format(fn_)
raise SaltSystemExit(VAR_32=msg)
except OSError as VAR_49:
VAR_32 = 'Failed to create VAR_11 "{0}" - {1}'.format(fn_, VAR_49)
raise SaltSystemExit(VAR_32=msg)
VAR_41 = VAR_54.stat(fn_)
if VAR_39 != VAR_41.st_uid:
try:
VAR_54.chown(fn_, VAR_39, -1)
except OSError:
pass
return True
def FUNC_4(VAR_7, VAR_6, VAR_8=False, VAR_9='', VAR_10=False):
if salt.utils.is_windows():
return FUNC_13(VAR_7, VAR_8, VAR_9, VAR_10)
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
VAR_42 = VAR_38[3]
VAR_43 = salt.utils.get_gid_list(VAR_6, include_default=False)
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_52 = VAR_54.umask(18) # 077
VAR_54.makedirs(dir_)
if VAR_54.getuid() == 0:
VAR_54.chown(dir_, VAR_39, VAR_42)
VAR_54.umask(VAR_52)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
VAR_44 = VAR_54.stat(dir_)
if VAR_54.getuid() == 0:
VAR_50 = VAR_54.stat(dir_)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(dir_, VAR_39, VAR_42)
for VAR_15 in [a for a in VAR_54.listdir(dir_) if 'jobs' not in a]:
VAR_53 = VAR_54.path.join(dir_, VAR_15)
if '{0}jobs'.format(VAR_54.path.sep) in VAR_53:
continue
for VAR_14, VAR_7, VAR_5 in VAR_54.walk(VAR_53):
for name in VAR_5:
if name.startswith('.'):
continue
VAR_11 = VAR_54.path.join(VAR_14, name)
try:
VAR_50 = VAR_54.stat(VAR_11)
except (IOError, OSError):
pass
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
for name in VAR_7:
VAR_11 = VAR_54.path.join(VAR_14, name)
VAR_50 = VAR_54.stat(VAR_11)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
if dir_ == VAR_9:
VAR_51 = stat.S_IMODE(VAR_44.st_mode)
if VAR_51 != 448 and VAR_51 != 488:
if VAR_54.access(dir_, VAR_54.W_OK):
VAR_54.chmod(dir_, 448)
else:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
def FUNC_5(VAR_6):
if salt.utils.is_windows():
return True
if VAR_6 == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
VAR_45 = pwd.getpwnam(VAR_6)
try:
if hasattr(VAR_54, 'initgroups'):
VAR_54.initgroups(VAR_6, VAR_45.pw_gid) # pylint: disable=minimum-python-version
else:
VAR_54.setgroups(salt.utils.get_gid_list(VAR_6, include_default=False))
VAR_54.setgid(VAR_45.pw_gid)
VAR_54.setuid(VAR_45.pw_uid)
if 'HOME' in VAR_54.environ:
VAR_54.environ['HOME'] = VAR_45.pw_dir
if 'SHELL' in VAR_54.environ:
VAR_54.environ['SHELL'] = VAR_45.pw_shell
for VAR_56 in ('USER', 'LOGNAME'):
if VAR_56 in VAR_54.environ:
VAR_54.environ[VAR_56] = VAR_45.pw_name
except OSError:
VAR_32 = 'Salt configured to run as VAR_6 "{0}" but unable to switch.'
VAR_32 = msg.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
except KeyError:
VAR_32 = 'User not found: "{0}"'.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
return True
def FUNC_6(VAR_11):
VAR_25 = [VAR_11]
(VAR_26, VAR_27) = VAR_54.path.split(VAR_11)
if VAR_27 == '':
VAR_25 = [VAR_26]
(head, VAR_27) = VAR_54.path.split(VAR_26)
while VAR_26 != VAR_25[0]:
VAR_25.insert(0, VAR_26)
(head, VAR_27) = VAR_54.path.split(VAR_26)
return VAR_25
def FUNC_7(VAR_11, VAR_6='root', VAR_12=False):
for tpath in FUNC_6(VAR_11):
if not VAR_54.access(tpath, VAR_54.R_OK):
VAR_32 = 'Could not access {0}.'.format(tpath)
if not VAR_54.path.exists(tpath):
VAR_32 += ' Path does not exist.'
else:
VAR_34 = salt.utils.get_user()
if VAR_6 != VAR_34:
VAR_32 += ' Try running as VAR_6 {0}.'.format(VAR_6)
else:
VAR_32 += ' Please give {0} read permissions.'.format(VAR_6)
if VAR_12:
return
raise SaltClientError(VAR_32)
def FUNC_8(VAR_13):
VAR_28 = VAR_13.get('max_open_files', 100000)
if sys.platform.startswith('win'):
VAR_46 = VAR_47 = win32file._getmaxstdio()
else:
VAR_46, VAR_47 = resource.getrlimit(resource.RLIMIT_NOFILE)
VAR_29 = VAR_54.path.join(VAR_13.get('pki_dir'), 'minions')
VAR_30 = len(VAR_54.listdir(VAR_29))
VAR_0.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
VAR_30
)
)
VAR_31 = logging.INFO
if (VAR_30 * 4) <= VAR_46:
return
VAR_32 = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open VAR_5 soft setting({1}). '.format(
VAR_30, VAR_46
)
)
if VAR_30 >= VAR_46:
VAR_32 += 'salt-master will crash pretty soon! '
VAR_31 = logging.CRITICAL
elif (VAR_30 * 2) >= VAR_46:
VAR_31 = logging.CRITICAL
elif (VAR_30 * 3) >= VAR_46:
VAR_31 = logging.WARNING
elif (VAR_30 * 4) >= VAR_46:
VAR_31 = logging.INFO
if VAR_28 < VAR_47:
VAR_32 += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(VAR_47 - VAR_28)
VAR_32 += 'Please consider raising this value.'
VAR_0.log(VAR_31=level, VAR_32=msg)
def FUNC_9(VAR_14, VAR_11, VAR_15=False):
if not VAR_54.path.isabs(VAR_14):
return ''
if not VAR_54.path.isabs(VAR_11):
VAR_11 = VAR_54.path.join(VAR_14, VAR_11)
VAR_11 = VAR_54.path.normpath(VAR_11)
if VAR_15:
if VAR_11.startswith(VAR_14):
return VAR_11
else:
if VAR_54.path.dirname(VAR_11) == VAR_54.path.normpath(VAR_14):
return VAR_11
return ''
def FUNC_10(VAR_13, VAR_16):
try:
if any(x in VAR_16 for x in ('/', '\\', '\0')):
return False
return bool(FUNC_9(VAR_13['pki_dir'], VAR_16))
except (AttributeError, KeyError, TypeError):
return False
def FUNC_11(VAR_17):
VAR_33 = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in VAR_33:
if VAR_17.count(bad):
return False
return True
def FUNC_12(VAR_13):
VAR_31 = LOG_LEVELS.get(str(VAR_13.get('log_level')).lower(), logging.NOTSET)
if VAR_31 < logging.INFO:
VAR_0.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def FUNC_13(VAR_7, VAR_8=False, VAR_9='', VAR_10=False):
import salt.utils.win_functions
import salt.utils.win_dacl
VAR_11 = VAR_7[0]
while VAR_54.path.basename(VAR_11) not in ['salt', 'salt-tests-tmpdir']:
VAR_11, VAR_48 = VAR_54.path.split(VAR_11)
if not VAR_54.path.isdir(VAR_11):
VAR_54.makedirs(VAR_11)
VAR_34 = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(VAR_34):
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
except CommandExecutionError:
VAR_32 = 'Unable to securely set the owner of "{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if not VAR_8:
try:
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(VAR_11, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of ' \
'"{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_54.makedirs(dir_)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
if dir_ == VAR_9:
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(dir_, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
| [
1,
6,
7,
8,
9,
17,
18,
23,
24,
31,
33,
34,
42,
45,
46,
48,
49,
57,
59,
64,
65,
68,
70,
82,
83,
94,
95,
101,
113,
114,
119,
140,
142,
143,
159,
172,
181,
185,
193,
194,
208,
221,
229,
231,
236,
237,
240,
259,
268,
270,
271,
272,
273,
274,
275,
276,
277,
278,
291,
293,
295,
296,
315,
316,
317,
320,
323,
327,
344,
345,
349,
353,
361,
365,
369,
370,
385,
386,
391,
392,
393,
394,
397,
398,
400,
401,
408,
409,
410,
414,
417,
423,
425,
427,
428,
429,
430,
431,
433,
440,
442,
446,
450,
453,
458,
461,
462,
481,
482,
490,
491,
500,
501,
521,
522,
528,
531,
532,
540,
541,
545,
546,
549,
550,
554,
555,
557,
564,
567,
569,
570,
571,
574,
577,
580,
581,
583,
591,
592,
603,
604,
607,
609,
610,
611,
613,
614,
615,
618,
621,
624,
625,
627,
635,
637,
639,
2,
3,
4,
36,
37,
38,
97,
98,
99,
100,
116,
117,
118,
145,
146,
147,
196,
197,
198,
199,
298,
299,
300,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
372,
373,
374,
375,
376,
377,
403,
404,
405,
464,
465,
466,
467,
468,
484,
485,
486,
493,
494,
495,
503,
504,
505,
506,
507,
524,
525,
526,
534,
535,
536,
537
] | [
1,
6,
7,
8,
9,
17,
18,
23,
24,
31,
33,
34,
42,
45,
46,
48,
49,
57,
59,
64,
65,
68,
70,
82,
83,
94,
95,
101,
113,
114,
119,
140,
142,
143,
159,
172,
181,
185,
193,
194,
208,
221,
229,
231,
236,
237,
240,
259,
268,
270,
271,
272,
273,
274,
275,
276,
277,
278,
291,
293,
295,
296,
315,
316,
317,
320,
323,
327,
344,
345,
349,
353,
361,
365,
369,
370,
385,
386,
391,
392,
393,
394,
397,
398,
400,
401,
408,
409,
410,
414,
417,
423,
425,
427,
428,
429,
430,
431,
433,
440,
442,
446,
450,
453,
458,
461,
462,
481,
482,
493,
494,
514,
515,
521,
524,
525,
533,
534,
538,
539,
542,
543,
547,
548,
550,
557,
560,
562,
563,
564,
567,
570,
573,
574,
576,
584,
585,
596,
597,
600,
602,
603,
604,
606,
607,
608,
611,
614,
617,
618,
620,
628,
630,
632,
2,
3,
4,
36,
37,
38,
97,
98,
99,
100,
116,
117,
118,
145,
146,
147,
196,
197,
198,
199,
298,
299,
300,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
372,
373,
374,
375,
376,
377,
403,
404,
405,
464,
465,
466,
467,
468,
484,
485,
486,
496,
497,
498,
499,
500,
517,
518,
519,
527,
528,
529,
530
] |